diff --git a/.github/mergify.yml b/.github/mergify.yml index 48387c0b0c6..bb52ef89d10 100644 --- a/.github/mergify.yml +++ b/.github/mergify.yml @@ -69,3 +69,68 @@ pull_request_rules: backport: branches: - release/protocol/v4.x + - name: backport to release/indexer/v5.x branch + conditions: + - base=main + - label=backport/indexer/v5.x + actions: + backport: + branches: + - release/indexer/v5.x + - name: backport to release/protocol/v5.x branch + conditions: + - base=main + - label=backport/protocol/v5.x + actions: + backport: + branches: + - release/protocol/v5.x + - name: backport to release/protocol/v5.1.x branch + conditions: + - base=main + - label=backport/protocol/v5.1.x + actions: + backport: + branches: + - release/protocol/v5.1.x + - name: backport to release/protocol/v5.2.x branch + conditions: + - base=main + - label=backport/protocol/v5.2.x + actions: + backport: + branches: + - release/protocol/v5.2.x + - name: backport to release/indexer/v6.x branch + conditions: + - base=main + - label=backport/indexer/v6.x + actions: + backport: + branches: + - release/indexer/v6.x + - name: backport to release/protocol/v6.x branch + conditions: + - base=main + - label=backport/protocol/v6.x + actions: + backport: + branches: + - release/protocol/v6.x + - name: backport to release/indexer/v7.x branch + conditions: + - base=main + - label=backport/indexer/v7.x + actions: + backport: + branches: + - release/indexer/v7.x + - name: backport to release/protocol/v7.x branch + conditions: + - base=main + - label=backport/protocol/v7.x + actions: + backport: + branches: + - release/protocol/v7.x + diff --git a/.github/workflows/indexer-api-documentation-check.yml b/.github/workflows/indexer-api-documentation-check.yml index f48aa9122c2..976ee95002b 100644 --- a/.github/workflows/indexer-api-documentation-check.yml +++ b/.github/workflows/indexer-api-documentation-check.yml @@ -6,7 +6,7 @@ on: # yamllint disable-line rule:truthy push: branches: - main - - 'release/indexer/v0.[0-9]+.x' # e.g. release/indexer/v0.1.x + - 'release/indexer/v[0-9]+.[0-9]+.x' # e.g. release/indexer/v0.1.x - 'release/indexer/v[0-9]+.x' # e.g. release/indexer/v1.x paths: - 'indexer/**' diff --git a/.github/workflows/indexer-build-and-push-dev-staging.yml b/.github/workflows/indexer-build-and-push-dev-staging.yml index 50172568725..5a98b72552e 100644 --- a/.github/workflows/indexer-build-and-push-dev-staging.yml +++ b/.github/workflows/indexer-build-and-push-dev-staging.yml @@ -4,8 +4,8 @@ on: # yamllint disable-line rule:truthy push: branches: - main - - 'release/[a-z]+/v0.[0-9]+.x' # e.g. release/indexer/v0.1.x - - 'release/[a-z]+/v[0-9]+.x' # e.g. release/indexer/v1.x + - 'release/indexer/v[0-9]+.[0-9]+.x' # e.g. release/indexer/v0.1.x + - 'release/indexer/v[0-9]+.x' # e.g. release/indexer/v1.x # TODO(DEC-837): Customize github build and push to ECR by service with paths jobs: diff --git a/.github/workflows/indexer-build-and-push-mainnet.yml b/.github/workflows/indexer-build-and-push-mainnet.yml index 9d338322a40..78d54d2f68d 100644 --- a/.github/workflows/indexer-build-and-push-mainnet.yml +++ b/.github/workflows/indexer-build-and-push-mainnet.yml @@ -4,8 +4,8 @@ on: # yamllint disable-line rule:truthy push: branches: - main - - 'release/[a-z]+/0.[0-9]+.x' # e.g. release/indexer/v0.1.x - - 'release/[a-z]+/v[0-9]+.x' # e.g. release/indexer/v1.x + - 'release/indexer/v[0-9]+.[0-9]+.x' # e.g. release/indexer/v0.1.x + - 'release/indexer/v[0-9]+.x' # e.g. release/indexer/v1.x # TODO(DEC-837): Customize github build and push to ECR by service with paths jobs: diff --git a/.github/workflows/indexer-build-and-push-testnet.yml b/.github/workflows/indexer-build-and-push-testnet.yml index 7d645adbbd2..3cee1bf285e 100644 --- a/.github/workflows/indexer-build-and-push-testnet.yml +++ b/.github/workflows/indexer-build-and-push-testnet.yml @@ -4,8 +4,8 @@ on: # yamllint disable-line rule:truthy push: branches: - main - - 'release/[a-z]+/v0.[0-9]+.x' # e.g. release/indexer/v0.1.x - - 'release/[a-z]+/v[0-9]+.x' # e.g. release/indexer/v1.x + - 'release/indexer/v[0-9]+.[0-9]+.x' # e.g. release/indexer/v0.1.x + - 'release/indexer/v[0-9]+.x' # e.g. release/indexer/v1.x # TODO(DEC-837): Customize github build and push to ECR by service with paths jobs: diff --git a/.github/workflows/indexer-build-docker-image-check.yml b/.github/workflows/indexer-build-docker-image-check.yml index 45b912b57d4..75fd0c4d36e 100644 --- a/.github/workflows/indexer-build-docker-image-check.yml +++ b/.github/workflows/indexer-build-docker-image-check.yml @@ -7,7 +7,7 @@ on: # yamllint disable-line rule:truthy push: branches: - main - - 'release/indexer/v0.[0-9]+.x' # e.g. release/indexer/v0.1.x + - 'release/indexer/v[0-9]+.[0-9]+.x' # e.g. release/indexer/v0.1.x - 'release/indexer/v[0-9]+.x' # e.g. release/indexer/v1.x paths: - 'indexer/**' @@ -77,3 +77,33 @@ jobs: DOCKER_BUILDKIT=1 docker build \ --platform amd64 \ -f Dockerfile.bazooka.remote . + + check-build-auxo: + runs-on: ubuntu-latest + defaults: + run: + working-directory: ./indexer + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Install Node.js + uses: actions/setup-node@v3 + with: + node-version: 16 + + - name: Install pnpm + run: + npm install -g pnpm@6.34.0 + + - name: Build and install + run: | + pnpm install --loglevel warn --frozen-lockfile + pnpm run build:prod:all + + - name: Build docker image for auxo + id: build-image + run: | + DOCKER_BUILDKIT=1 docker build \ + --platform amd64 \ + -f Dockerfile.auxo.remote . diff --git a/.github/workflows/indexer-build-test-coverage.yml b/.github/workflows/indexer-build-test-coverage.yml index c445ead9321..c158f3e063e 100644 --- a/.github/workflows/indexer-build-test-coverage.yml +++ b/.github/workflows/indexer-build-test-coverage.yml @@ -6,7 +6,7 @@ on: # yamllint disable-line rule:truthy push: branches: - main - - 'release/indexer/v0.[0-9]+.x' # e.g. release/indexer/v0.1.x + - 'release/indexer/v[0-9]+.[0-9]+.x' # e.g. release/indexer/v0.1.x - 'release/indexer/v[0-9]+.x' # e.g. release/indexer/v1.x paths: - 'indexer/**' diff --git a/.github/workflows/indexer-lint.yml b/.github/workflows/indexer-lint.yml index 4aab1a407d1..978ae3fa4cf 100644 --- a/.github/workflows/indexer-lint.yml +++ b/.github/workflows/indexer-lint.yml @@ -6,7 +6,7 @@ on: # yamllint disable-line rule:truthy push: branches: - main - - 'release/indexer/v0.[0-9]+.x' # e.g. release/indexer/v0.1.x + - 'release/indexer/v[0-9]+.[0-9]+.x' # e.g. release/indexer/v0.1.x - 'release/indexer/v[0-9]+.x' # e.g. release/indexer/v1.x paths: - 'indexer/**' diff --git a/.github/workflows/proto.yml b/.github/workflows/proto.yml index d84e84f29a1..ec84ae15d5a 100644 --- a/.github/workflows/proto.yml +++ b/.github/workflows/proto.yml @@ -18,7 +18,7 @@ on: # yamllint disable-line rule:truthy push: branches: - main - - 'release/protocol/v0.[0-9]+.x' # e.g. release/protocol/v0.1.x + - 'release/protocol/v[0-9]+.[0-9]+.x' # e.g. release/protocol/v0.1.x - 'release/protocol/v[0-9]+.x' # e.g. release/protocol/v1.x paths: # Keep in sync with above diff --git a/.github/workflows/protocol-benchmark.yml b/.github/workflows/protocol-benchmark.yml new file mode 100644 index 00000000000..61e4098e4c6 --- /dev/null +++ b/.github/workflows/protocol-benchmark.yml @@ -0,0 +1,49 @@ +name: Protocol Benchmark +on: # yamllint disable-line rule:truthy + pull_request: + paths: + - 'protocol/**' + push: + branches: + - main + - 'release/protocol/v[0-9]+.[0-9]+.x' # e.g. release/protocol/v0.1.x + - 'release/protocol/v[0-9]+.x' # e.g. release/protocol/v1.x + paths: + - 'protocol/**' + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + benchmark: + runs-on: ubuntu-latest + defaults: + run: + working-directory: ./protocol + steps: + - name: Check out repository code + uses: actions/checkout@v3 + - name: Setup Golang + uses: actions/setup-go@v3 + with: + go-version: 1.22 + - name: Prune Docker system to free up space + run: | + docker system prune -a --volumes -f + - name: Run Benchmarks + run: make benchmark | tee ./benchmark_output.txt + - name: Download previous benchmark data + uses: actions/cache@v4 + with: + path: ./cache + key: ${{ runner.os }}-benchmark + - name: Store benchmark result + uses: benchmark-action/github-action-benchmark@v1 + with: + tool: 'go' + output-file-path: ./protocol/benchmark_output.txt + external-data-json-path: ./cache/benchmark-data.json + fail-on-alert: true + alert-threshold: '150%' + save-data-file: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} diff --git a/.github/workflows/protocol-build-and-push-mainnet.yml b/.github/workflows/protocol-build-and-push-mainnet.yml new file mode 100644 index 00000000000..3999f9fda43 --- /dev/null +++ b/.github/workflows/protocol-build-and-push-mainnet.yml @@ -0,0 +1,48 @@ +name: Protocol Build & Push Image to AWS ECR + +on: # yamllint disable-line rule:truthy + pull_request: + branches: + - 'release/protocol/v[0-9]+.[0-9]+.x' # e.g. release/protocol/v0.1.x + - 'release/protocol/v[0-9]+.x' # e.g. release/protocol/v1.x + push: + branches: + - 'release/protocol/v[0-9]+.[0-9]+.x' # e.g. release/protocol/v0.1.x + - 'release/protocol/v[0-9]+.x' # e.g. release/protocol/v1.x + +jobs: + build-and-push-mainnet: + runs-on: ubuntu-latest + defaults: + run: + working-directory: ./protocol + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + fetch-depth: '0' # without this, ignite fails. + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID_VALIDATOR_MAINNET }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_VALIDATOR_MAINNET }} + aws-region: ap-northeast-1 + + - name: Login to Amazon ECR + id: login-ecr + uses: aws-actions/amazon-ecr-login@v1 + + - name: Build, Tag, and Push the Image to Amazon ECR + id: build-image + env: + ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} + ECR_REPOSITORY: mainnet-full-node + run: | + make localnet-build-amd64 + commit_hash=$(git rev-parse --short=7 HEAD) + docker build \ + --platform amd64 \ + -t $ECR_REGISTRY/$ECR_REPOSITORY:$commit_hash \ + -f testing/mainnet/Dockerfile . + docker push $ECR_REGISTRY/$ECR_REPOSITORY --all-tags diff --git a/.github/workflows/protocol-build-and-push-snapshot.yml b/.github/workflows/protocol-build-and-push-snapshot.yml index 51ffb241932..e5ff2c8b05b 100644 --- a/.github/workflows/protocol-build-and-push-snapshot.yml +++ b/.github/workflows/protocol-build-and-push-snapshot.yml @@ -4,8 +4,8 @@ on: # yamllint disable-line rule:truthy push: branches: - main - - 'release/[a-z]+/v0.[0-9]+.x' # e.g. release/protocol/v0.1.x - - 'release/[a-z]+/v[0-9]+.x' # e.g. release/protocol/v1.x + - 'release/protocol/v[0-9]+.[0-9]+.x' # e.g. release/protocol/v0.1.x + - 'release/protocol/v[0-9]+.x' # e.g. release/protocol/v1.x jobs: build-and-push-snapshot-dev: diff --git a/.github/workflows/protocol-build-and-push-testnet.yml b/.github/workflows/protocol-build-and-push-testnet.yml new file mode 100644 index 00000000000..29e79317ee4 --- /dev/null +++ b/.github/workflows/protocol-build-and-push-testnet.yml @@ -0,0 +1,49 @@ +name: Protocol Build & Push Image to AWS ECR + +on: # yamllint disable-line rule:truthy + pull_request: + branches: + - 'release/protocol/v[0-9]+.[0-9]+.x' # e.g. release/protocol/v0.1.x + - 'release/protocol/v[0-9]+.x' # e.g. release/protocol/v1.x + push: + branches: + - 'release/protocol/v[0-9]+.[0-9]+.x' # e.g. release/protocol/v0.1.x + - 'release/protocol/v[0-9]+.x' # e.g. release/protocol/v1.x + +jobs: + build-and-push-testnet: + runs-on: ubuntu-latest + defaults: + run: + working-directory: ./protocol + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + fetch-depth: '0' # without this, ignite fails. + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID_VALIDATOR_TESTNET }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_VALIDATOR_TESTNET }} + aws-region: us-east-2 + + - name: Login to Amazon ECR + id: login-ecr + uses: aws-actions/amazon-ecr-login@v1 + + - name: Build, Tag, and Push the Image to Amazon ECR + id: build-image + env: + ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} + ECR_REPOSITORY: testnet-validator + AWS_REGION: us-east-2 + run: | + make localnet-build-amd64 + commit_hash=$(git rev-parse --short=7 HEAD) + docker build \ + --platform amd64 \ + -t $ECR_REGISTRY/$ECR_REPOSITORY:$commit_hash \ + -f testing/testnet/Dockerfile . + docker push $ECR_REGISTRY/$ECR_REPOSITORY --all-tags diff --git a/.github/workflows/protocol-build-and-push.yml b/.github/workflows/protocol-build-and-push.yml index b2463692f73..ade53413692 100644 --- a/.github/workflows/protocol-build-and-push.yml +++ b/.github/workflows/protocol-build-and-push.yml @@ -4,8 +4,8 @@ on: # yamllint disable-line rule:truthy push: branches: - main - - 'release/[a-z]+/v0.[0-9]+.x' # e.g. release/protocol/v0.1.x - - 'release/[a-z]+/v[0-9]+.x' # e.g. release/protocol/v1.x + - 'release/protocol/v[0-9]+.[0-9]+.x' # e.g. release/protocol/v0.1.x + - 'release/protocol/v[0-9]+.x' # e.g. release/protocol/v1.x jobs: build-and-push-dev: @@ -222,4 +222,4 @@ jobs: --platform amd64 \ -t $ECR_REGISTRY/$ECR_REPOSITORY:$commit_hash \ -f testing/testnet-staging/Dockerfile . - docker push $ECR_REGISTRY/$ECR_REPOSITORY --all-tags + docker push $ECR_REGISTRY/$ECR_REPOSITORY --all-tags \ No newline at end of file diff --git a/.github/workflows/protocol-container-tests.yml b/.github/workflows/protocol-container-tests.yml index 4dcc68a4c58..0c3a099f0ca 100644 --- a/.github/workflows/protocol-container-tests.yml +++ b/.github/workflows/protocol-container-tests.yml @@ -6,7 +6,7 @@ on: # yamllint disable-line rule:truthy push: branches: - main - - 'release/protocol/v0.[0-9]+.x' # e.g. release/protocol/v0.1.x + - 'release/protocol/v[0-9]+.[0-9]+.x' # e.g. release/protocol/v0.1.x - 'release/protocol/v[0-9]+.x' # e.g. release/protocol/v1.x paths: - 'protocol/**' @@ -27,7 +27,7 @@ jobs: - name: Setup Golang uses: actions/setup-go@v3 with: - go-version: 1.21 + go-version: 1.22 - name: Build images run: DOCKER_BUILDKIT=1 make test-container-build - name: Run container tests diff --git a/.github/workflows/protocol-exchange-tests.yml b/.github/workflows/protocol-exchange-tests.yml index 52c4f9de03c..2eb2a47ca41 100644 --- a/.github/workflows/protocol-exchange-tests.yml +++ b/.github/workflows/protocol-exchange-tests.yml @@ -6,7 +6,7 @@ on: # yamllint disable-line rule:truthy push: branches: - main - - 'release/protocol/v0.[0-9]+.x' # e.g. release/protocol/v0.1.x + - 'release/protocol/v[0-9]+.[0-9]+.x' # e.g. release/protocol/v0.1.x - 'release/protocol/v[0-9]+.x' # e.g. release/protocol/v1.x paths: - 'protocol/daemons/**' @@ -30,7 +30,7 @@ jobs: name: Setup Golang uses: actions/setup-go@v3 with: - go-version: 1.21 + go-version: 1.22 - name: Display go version run: go version diff --git a/.github/workflows/protocol-lint.yml b/.github/workflows/protocol-lint.yml index 88c7d8ce5cd..f99908ca02e 100644 --- a/.github/workflows/protocol-lint.yml +++ b/.github/workflows/protocol-lint.yml @@ -8,7 +8,7 @@ on: # yamllint disable-line rule:truthy push: branches: - main - - "release/protocol/v0.[0-9]+.x" # e.g. release/protocol/v0.1.x + - "release/protocol/v[0-9]+.[0-9]+.x" # e.g. release/protocol/v0.1.x - "release/protocol/v[0-9]+.x" # e.g. release/protocol/v1.x paths: - "protocol/**" @@ -45,6 +45,6 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-go@v3 with: - go-version: 1.21 + go-version: 1.22 - name: Run golangci-lint run: make lint diff --git a/.github/workflows/protocol-pregenesis.yml b/.github/workflows/protocol-pregenesis.yml index 44bbcd14305..691cdd19143 100644 --- a/.github/workflows/protocol-pregenesis.yml +++ b/.github/workflows/protocol-pregenesis.yml @@ -7,7 +7,7 @@ on: # yamllint disable-line rule:truthy push: branches: - main - - 'release/protocol/v0.[0-9]+.x' # e.g. release/protocol/v0.1.x + - 'release/protocol/v[0-9]+.[0-9]+.x' # e.g. release/protocol/v0.1.x - 'release/protocol/v[0-9]+.x' # e.g. release/protocol/v1.x paths: - protocol/** diff --git a/.github/workflows/protocol-release.yml b/.github/workflows/protocol-release.yml index d6c297b6488..66cb9527cae 100644 --- a/.github/workflows/protocol-release.yml +++ b/.github/workflows/protocol-release.yml @@ -26,7 +26,7 @@ jobs: - name: install go uses: actions/setup-go@v3 with: - go-version: '1.21' + go-version: '1.22' check-latest: true - name: Create Directory run: mkdir ./build diff --git a/.github/workflows/protocol-sim.yml b/.github/workflows/protocol-sim.yml index 512c4164d82..82f83aeecc3 100644 --- a/.github/workflows/protocol-sim.yml +++ b/.github/workflows/protocol-sim.yml @@ -8,7 +8,7 @@ on: # yamllint disable-line rule:truthy push: branches: - main - - 'release/protocol/v0.[0-9]+.x' # e.g. release/protocol/v0.1.x + - 'release/protocol/v[0-9]+.[0-9]+.x' # e.g. release/protocol/v0.1.x - 'release/protocol/v[0-9]+.x' # e.g. release/protocol/v1.x paths: - 'protocol/**' @@ -28,7 +28,7 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-go@v3 with: - go-version: 1.21 + go-version: 1.22 - name: Display go version run: go version - run: make build @@ -39,7 +39,7 @@ jobs: steps: - uses: actions/setup-go@v3 with: - go-version: 1.21 + go-version: 1.22 - name: Display go version run: go version - name: Install runsim @@ -59,7 +59,7 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-go@v3 with: - go-version: 1.21 + go-version: 1.22 - name: Display go version run: go version - uses: actions/cache@v2.1.3 @@ -109,7 +109,7 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-go@v3 with: - go-version: 1.21 + go-version: 1.22 - name: Display go version run: go version - uses: actions/cache@v2.1.3 @@ -159,7 +159,7 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-go@v3 with: - go-version: 1.21 + go-version: 1.22 - name: Display go version run: go version - uses: actions/cache@v2.1.3 @@ -209,7 +209,7 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-go@v3 with: - go-version: 1.21 + go-version: 1.22 - name: Display go version run: go version - uses: actions/cache@v2.1.3 diff --git a/.github/workflows/protocol-test.yml b/.github/workflows/protocol-test.yml index d8313513243..c934d9bae5f 100644 --- a/.github/workflows/protocol-test.yml +++ b/.github/workflows/protocol-test.yml @@ -6,7 +6,7 @@ on: # yamllint disable-line rule:truthy push: branches: - main - - 'release/protocol/v0.[0-9]+.x' # e.g. release/protocol/v0.1.x + - 'release/protocol/v[0-9]+.[0-9]+.x' # e.g. release/protocol/v0.1.x - 'release/protocol/v[0-9]+.x' # e.g. release/protocol/v1.x paths: - 'protocol/**' @@ -30,7 +30,7 @@ jobs: name: Setup Golang uses: actions/setup-go@v3 with: - go-version: 1.21 + go-version: 1.22 - name: Ensure `go.mod` is up to date run: go mod tidy && git diff --exit-code @@ -52,7 +52,7 @@ jobs: name: Setup Golang uses: actions/setup-go@v3 with: - go-version: 1.21 + go-version: 1.22 - name: Ensure `go.mod` is up to date run: go mod tidy && git diff --exit-code @@ -69,7 +69,7 @@ jobs: steps: - uses: actions/setup-go@v3 with: - go-version: 1.21 + go-version: 1.22 - uses: actions/checkout@v3 - name: Install goveralls run: go install github.com/mattn/goveralls@latest @@ -104,7 +104,10 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-go@v3 with: - go-version: 1.21 + go-version: 1.22 + - name: Prune Docker system to free up space + run: | + docker system prune -a --volumes -f - name: start localnet run: | DOCKER_BUILDKIT=1 make localnet-startd diff --git a/.gitignore b/.gitignore index a88d2ec1a4b..05ae345c17d 100644 --- a/.gitignore +++ b/.gitignore @@ -8,6 +8,9 @@ v4-proto-py/v4_proto v4-proto-js/build v4-proto-js/node_modules v4-proto-js/src +v4-proto-rs/target +v4-proto-rs/Cargo.lock .idea +.vscode **/.DS_Store diff --git a/CODEOWNERS b/CODEOWNERS new file mode 100644 index 00000000000..98e5016a7e5 --- /dev/null +++ b/CODEOWNERS @@ -0,0 +1 @@ +* @dydxprotocol/backend diff --git a/audits/Informal-Systems-Audit-Report-2023-Q4.pdf b/audits/Informal-Systems-Audit-Report-2023-Q4.pdf new file mode 100644 index 00000000000..284931fcb2e Binary files /dev/null and b/audits/Informal-Systems-Audit-Report-2023-Q4.pdf differ diff --git a/audits/Informal-Systems-Audit-Report-2024-Q1.pdf b/audits/Informal-Systems-Audit-Report-2024-Q1.pdf new file mode 100644 index 00000000000..89e74dbebd0 Binary files /dev/null and b/audits/Informal-Systems-Audit-Report-2024-Q1.pdf differ diff --git a/audits/Informal-Systems-Audit-Report-2024-Q2+.pdf b/audits/Informal-Systems-Audit-Report-2024-Q2+.pdf new file mode 100644 index 00000000000..c738c1601de Binary files /dev/null and b/audits/Informal-Systems-Audit-Report-2024-Q2+.pdf differ diff --git a/audits/Informal-Systems-Audit-Report-2024-Q2.pdf b/audits/Informal-Systems-Audit-Report-2024-Q2.pdf new file mode 100644 index 00000000000..4bcc7aa9d55 Binary files /dev/null and b/audits/Informal-Systems-Audit-Report-2024-Q2.pdf differ diff --git a/indexer/Dockerfile.auxo.remote b/indexer/Dockerfile.auxo.remote index 1d43896fd60..28bc1d1ac0e 100644 --- a/indexer/Dockerfile.auxo.remote +++ b/indexer/Dockerfile.auxo.remote @@ -6,9 +6,16 @@ WORKDIR /usr/app # Copy pnpm lock and workspace and package.json from base directory COPY ./pnpm-lock.yaml ./pnpm-workspace.yaml ./package.json ./ +# Copy patches +COPY ./patches ./patches + # Copy auxo and imported packages COPY ./packages/base/ ./packages/base/ +COPY ./packages/dev/ ./packages/dev/ +COPY ./packages/postgres/ ./packages/postgres/ COPY ./packages/v4-protos/ ./packages/v4-protos/ +COPY ./packages/kafka/ ./packages/kafka/ +COPY ./packages/redis/ ./packages/redis/ COPY ./services/auxo/ ./services/auxo/ COPY ./packages/v4-proto-parser/ ./packages/v4-proto-parser/ @@ -19,7 +26,7 @@ COPY tsconfig.json ./ RUN npm install -g pnpm@6 # Install npm modules using pnpm -RUN pnpm install --loglevel warn --frozen-lockfile +RUN pnpm install --loglevel warn --frozen-lockfile --unsafe-perm RUN pnpm run build:prod:all FROM public.ecr.aws/lambda/nodejs:16 diff --git a/indexer/Dockerfile.bazooka.remote b/indexer/Dockerfile.bazooka.remote index f5867721c8d..148733d44d7 100644 --- a/indexer/Dockerfile.bazooka.remote +++ b/indexer/Dockerfile.bazooka.remote @@ -4,16 +4,21 @@ FROM public.ecr.aws/lambda/nodejs:16 as builder WORKDIR /usr/app # Copy pnpm lock and workspace and package.json from base directory -COPY ./pnpm-lock.yaml ./pnpm-workspace.yaml ./package.json ./ +COPY ./pnpm-lock.yaml ./pnpm-workspace.yaml ./package.json ./patches ./ + +# Copy patches +COPY ./patches ./patches # Copy bazooka and imported packages COPY ./packages/base/ ./packages/base/ +COPY ./packages/dev/ ./packages/dev/ COPY ./packages/postgres/ ./packages/postgres/ COPY ./packages/v4-protos/ ./packages/v4-protos/ COPY ./packages/kafka/ ./packages/kafka/ COPY ./packages/redis/ ./packages/redis/ COPY ./services/bazooka/ ./services/bazooka/ COPY ./packages/v4-proto-parser/ ./packages/v4-proto-parser/ +COPY ./packages/notifications/ ./packages/notifications/ # Copy tsconfig in order to build typescript into javascript COPY tsconfig.json ./ @@ -22,7 +27,7 @@ COPY tsconfig.json ./ RUN npm install -g pnpm@6 # Install npm modules using pnpm -RUN pnpm install --loglevel warn --frozen-lockfile +RUN pnpm install --loglevel warn --frozen-lockfile --unsafe-perm RUN pnpm run build:prod:all FROM public.ecr.aws/lambda/nodejs:16 diff --git a/indexer/Dockerfile.postgres-package.local b/indexer/Dockerfile.postgres-package.local index 1d4f6fe0fec..916b88467e8 100644 --- a/indexer/Dockerfile.postgres-package.local +++ b/indexer/Dockerfile.postgres-package.local @@ -11,6 +11,9 @@ WORKDIR /home/dydx/app # Copy pnpm lock and workspace and package.json from base directory COPY ./pnpm-lock.yaml ./pnpm-workspace.yaml ./package.json ./ +# Copy patches +COPY ./patches ./patches + # Copy package.json from postgres and imported packages being run COPY ./packages/base/package.json ./packages/base/ COPY ./packages/postgres/package.json ./packages/postgres/ @@ -37,7 +40,7 @@ RUN chown dydx -R /home/dydx/app USER dydx # Install npm modules using pnpm -RUN pnpm i --loglevel warn --production --frozen-lockfile +RUN pnpm i --loglevel warn --production --frozen-lockfile --unsafe-perm WORKDIR /home/dydx/app/packages/postgres diff --git a/indexer/Dockerfile.service.local b/indexer/Dockerfile.service.local index 5237caba535..cfeee9eb3b7 100644 --- a/indexer/Dockerfile.service.local +++ b/indexer/Dockerfile.service.local @@ -13,6 +13,9 @@ WORKDIR /home/dydx/app # Copy pnpm lock and workspace and package.json from base directory COPY ./pnpm-lock.yaml ./pnpm-workspace.yaml ./package.json ./ +# Copy patches +COPY ./patches ./patches + # Copy package.json from all packages being run COPY ./packages/base/package.json ./packages/base/ COPY ./packages/kafka/package.json ./packages/kafka/ @@ -21,6 +24,7 @@ COPY ./packages/redis/package.json ./packages/redis/ COPY ./packages/v4-protos/package.json ./packages/v4-protos/ COPY ./packages/v4-proto-parser/package.json ./packages/v4-proto-parser/package.json COPY ./packages/compliance/package.json ./packages/compliance/ +COPY ./packages/notifications/package.json ./packages/notifications/ # Copy build files from all packages being run COPY ./packages/base/build ./packages/base/build/ @@ -30,6 +34,7 @@ COPY ./packages/redis/build ./packages/redis/build/ COPY ./packages/v4-protos/build ./packages/v4-protos/build/ COPY ./packages/v4-proto-parser/build ./packages/v4-proto-parser/build/ COPY ./packages/compliance/build ./packages/compliance/build/ +COPY ./packages/notifications/build ./packages/notifications/build/ # Copy package.json, build files, and environment files from service being run COPY ./services/${service}/package.json ./services/${service}/ @@ -42,7 +47,7 @@ COPY ./scripts/container-run.sh /home/dydx/app/services/$service/scripts/ RUN chown dydx -R /home/dydx/app USER dydx -RUN pnpm i --loglevel warn --production --frozen-lockfile +RUN pnpm i --loglevel warn --production --frozen-lockfile --unsafe-perm WORKDIR /home/dydx/app/services/$service diff --git a/indexer/Dockerfile.service.remote b/indexer/Dockerfile.service.remote index 582273b32b7..76e07965180 100644 --- a/indexer/Dockerfile.service.remote +++ b/indexer/Dockerfile.service.remote @@ -13,6 +13,9 @@ WORKDIR /home/dydx/app # Copy pnpm lock and workspace and package.json from base directory COPY ./pnpm-lock.yaml ./pnpm-workspace.yaml ./package.json ./ +# Copy patches +COPY ./patches ./patches + # Copy package.json from all packages being run COPY ./packages/base/package.json ./packages/base/ COPY ./packages/kafka/package.json ./packages/kafka/ @@ -21,6 +24,7 @@ COPY ./packages/redis/package.json ./packages/redis/ COPY ./packages/v4-protos/package.json ./packages/v4-protos/ COPY ./packages/v4-proto-parser/package.json ./packages/v4-proto-parser/package.json COPY ./packages/compliance/package.json ./packages/compliance/ +COPY ./packages/notifications/package.json ./packages/notifications/ # Copy build files from all packages being run COPY ./packages/base/build ./packages/base/build/ @@ -30,6 +34,7 @@ COPY ./packages/redis/build ./packages/redis/build/ COPY ./packages/v4-protos/build ./packages/v4-protos/build/ COPY ./packages/v4-proto-parser/build ./packages/v4-proto-parser/build/ COPY ./packages/compliance/build ./packages/compliance/build/ +COPY ./packages/notifications/build ./packages/notifications/build/ # Copy package.json, build files, and environment files from service being run COPY ./services/${service}/package.json ./services/${service}/ @@ -42,7 +47,7 @@ COPY ./scripts/container-run.sh /home/dydx/app/services/$service/scripts/ RUN chown dydx -R /home/dydx/app # Install npm modules using pnpm -RUN pnpm i --loglevel warn --production --frozen-lockfile +RUN pnpm i --loglevel warn --production --frozen-lockfile --unsafe-perm USER dydx diff --git a/indexer/README.md b/indexer/README.md index 119d272951e..7598e2c9d93 100644 --- a/indexer/README.md +++ b/indexer/README.md @@ -37,12 +37,23 @@ nvm alias default $(nvm version) # optional npm i -g pnpm@6 ``` -Now, you can install dependencies for Indexer. +### Installation + +Now, you can install dependencies for Indexer. This should also be run anytime packages are updated. ``` pnpm install ``` +### Build + +To build all services and packages, run: + +``` +pnpm run build:all +``` +This should be run whenever code is changed, and you need to deploy or run the updated code, including running unit tests, deploying locally, or deploying to AWS. + ## Adding Packages Use `packages/example-package` as a template: @@ -78,7 +89,7 @@ Protos can be found in `proto/` [here](https://github.com/dydxprotocol/v4-chain/ ## Running unit tests -First build all the services and packages by running: +First, make sure all services and packages are built with the latest code by running: ``` pnpm run build:all @@ -96,6 +107,8 @@ pnpm run test:all If you change any logic, you'll have to re-build the services and packages before running unit tests. +### To run a single test file: +`cd services/{service_name} && pnpm build && pnpm test -- {test_name}` # Running Dockerfile locally TODO(DEC-671): Add e2e tests @@ -217,4 +230,5 @@ Other example subscription events: { "type": "subscribe", "channel": "v4_markets" } { "type": "subscribe", "channel": "v4_orderbook", "id": "BTC-USD" } { "type": "subscribe", "channel": "v4_subaccounts", "id": "address/0" } +{ "type": "subscribe", "channel": "v4_block_height" } ``` diff --git a/indexer/docker-compose-local-deployment.yml b/indexer/docker-compose-local-deployment.yml index 832c27fa85f..075ee7a4d5c 100644 --- a/indexer/docker-compose-local-deployment.yml +++ b/indexer/docker-compose-local-deployment.yml @@ -13,7 +13,8 @@ services: to-websockets-subaccounts:1:1,\ to-websockets-trades:1:1,\ to-websockets-markets:1:1,\ - to-websockets-candles:1:1" + to-websockets-candles:1:1,\ + to-websockets-block-height:1:1" KAFKA_LISTENERS: INTERNAL://:9092,EXTERNAL_SAME_HOST://:29092 KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka:9092,EXTERNAL_SAME_HOST://localhost:29092 KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:PLAINTEXT,EXTERNAL_SAME_HOST:PLAINTEXT diff --git a/indexer/docker-compose.yml b/indexer/docker-compose.yml index 141505046ea..306469544bc 100644 --- a/indexer/docker-compose.yml +++ b/indexer/docker-compose.yml @@ -7,14 +7,15 @@ services: environment: KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 KAFKA_ADVERTISED_HOST_NAME: localhost - KAFKA_CREATE_TOPICS: + KAFKA_CREATE_TOPICS: "to-ender:1:1,\ to-vulcan:1:1,\ to-websockets-orderbooks:1:1,\ to-websockets-subaccounts:1:1,\ to-websockets-trades:1:1,\ to-websockets-markets:1:1,\ - to-websockets-candles:1:1" + to-websockets-candles:1:1,\ + to-websockets-block-height:1:1" postgres-test: build: context: . diff --git a/indexer/package.json b/indexer/package.json index ce29744048d..30cab042a32 100644 --- a/indexer/package.json +++ b/indexer/package.json @@ -13,8 +13,15 @@ "lint:all": "pnpm run --parallel lint", "lint:fix:all": "pnpm run --parallel lint:fix", "coverage:all": "pnpm recursive run --workspace-concurrency=1 coverage", - "test:all": "pnpm recursive run --workspace-concurrency=1 test" + "test:all": "pnpm recursive run --workspace-concurrency=1 test", + "postinstall": "patch-package" }, "author": "", - "license": "AGPL-3.0" + "license": "AGPL-3.0", + "dependencies": { + "@milahu/patch-package": "6.4.14" + }, + "devDependencies": { + "@types/ws": "8.5.10" + } } diff --git a/indexer/packages/base/__tests__/instance-id.test.ts b/indexer/packages/base/__tests__/instance-id.test.ts new file mode 100644 index 00000000000..58eef877fda --- /dev/null +++ b/indexer/packages/base/__tests__/instance-id.test.ts @@ -0,0 +1,87 @@ +import { setInstanceId, getInstanceId, resetForTests } from '../src/instance-id'; +import { axiosRequest } from '../src/axios'; +import { asMock } from '@dydxprotocol-indexer/dev'; +import logger from '../src/logger'; +import config from '../src/config'; + +jest.mock('../src/axios', () => ({ + ...(jest.requireActual('../src/axios') as object), + axiosRequest: jest.fn(), +})); + +describe('instance-id', () => { + describe('setInstanceId', () => { + const defaultTaskArn = 'defaultTaskArn'; + const defaultResponse = { + TaskARN: defaultTaskArn, + }; + const ecsUrl = config.ECS_CONTAINER_METADATA_URI_V4; + + beforeEach(() => { + config.ECS_CONTAINER_METADATA_URI_V4 = ecsUrl; + resetForTests(); + jest.resetAllMocks(); + jest.restoreAllMocks(); + asMock(axiosRequest).mockResolvedValue(defaultResponse); + }); + + afterAll(() => { + jest.clearAllMocks(); + jest.restoreAllMocks(); + }); + + it('should set instance id to task ARN in staging', async () => { + jest.spyOn(config, 'isStaging').mockReturnValueOnce(true); + config.ECS_CONTAINER_METADATA_URI_V4 = 'url'; + await setInstanceId(); + + expect(getInstanceId()).toEqual(defaultTaskArn); + }); + + it('should set instance id to task ARN in production', async () => { + jest.spyOn(config, 'isProduction').mockReturnValueOnce(true); + config.ECS_CONTAINER_METADATA_URI_V4 = 'url'; + await setInstanceId(); + + expect(getInstanceId()).toEqual(defaultTaskArn); + }); + + it('should not call metadata endpoint if not production or staging', async () => { + config.ECS_CONTAINER_METADATA_URI_V4 = 'url'; + await setInstanceId(); + + expect(getInstanceId()).not.toEqual(defaultTaskArn); + expect(asMock(axiosRequest)).not.toHaveBeenCalled(); + }); + + it('should not set instance id if already set', async () => { + jest.spyOn(config, 'isStaging').mockReturnValue(true); + config.ECS_CONTAINER_METADATA_URI_V4 = 'url'; + await setInstanceId(); + const instanceId = getInstanceId(); + await setInstanceId(); + + expect(getInstanceId()).toEqual(instanceId); + expect(axiosRequest).toHaveBeenCalledTimes(1); + }); + + it('should log error and set instance id to uuid if request errors', async () => { + jest.spyOn(config, 'isStaging').mockReturnValue(true); + config.ECS_CONTAINER_METADATA_URI_V4 = 'url'; + const loggerErrorSpy = jest.spyOn(logger, 'error'); + const emptyInstanceId = getInstanceId(); + asMock(axiosRequest).mockRejectedValueOnce(new Error()); + await setInstanceId(); + + expect(loggerErrorSpy).toHaveBeenCalledTimes(1); + expect(getInstanceId()).not.toEqual(emptyInstanceId); + }); + + it('should not call metadata endpoint if url is empty', async () => { + jest.spyOn(config, 'isStaging').mockReturnValue(true); + await setInstanceId(); + + expect(axiosRequest).not.toHaveBeenCalled(); + }); + }); +}); diff --git a/indexer/packages/base/package.json b/indexer/packages/base/package.json index 316f38de61c..25219008dfc 100644 --- a/indexer/packages/base/package.json +++ b/indexer/packages/base/package.json @@ -2,7 +2,7 @@ "name": "@dydxprotocol-indexer/base", "version": "0.0.1", "description": "", - "main": "build/index.js", + "main": "build/src/index.js", "devDependencies": { "@dydxprotocol-indexer/dev": "workspace:^0.0.1", "@types/big.js": "^6.1.5", @@ -10,6 +10,7 @@ "@types/lodash": "^4.14.182", "@types/traverse": "^0.6.32", "express": "^4.18.1", + "jest": "^28.1.2", "typescript": "^4.7.4" }, "scripts": { @@ -18,7 +19,7 @@ "build": "rm -rf build/ && tsc", "build:prod": "pnpm run build", "build:watch": "pnpm run build -- --watch", - "test": "echo \"Error: no test specified\"" + "test": "NODE_ENV=test jest --runInBand --forceExit" }, "repository": { "type": "git", @@ -31,6 +32,7 @@ }, "homepage": "https://github.com/dydxprotocol/indexer#readme", "dependencies": { + "@aws-sdk/client-ec2": "^3.354.0", "axios": "^1.2.1", "big.js": "^6.2.1", "bignumber.js": "^9.0.2", @@ -38,6 +40,7 @@ "hot-shots": "^9.1.0", "lodash": "^4.17.21", "traverse": "^0.6.6", + "uuid": "^8.3.2", "winston": "^3.8.1", "winston-transport": "^4.5.0", "@bugsnag/core": "^7.18.0", diff --git a/indexer/packages/base/src/axios/errors.ts b/indexer/packages/base/src/axios/errors.ts index 4e1726793ae..7058aaf5fcf 100644 --- a/indexer/packages/base/src/axios/errors.ts +++ b/indexer/packages/base/src/axios/errors.ts @@ -1,8 +1,8 @@ import { WrappedError } from '../errors'; export interface AxiosOriginalError extends Error { - isAxiosError: true; - toJSON(): Error; + isAxiosError: true, + toJSON(): Error, } export interface AxiosErrorResponse { diff --git a/indexer/packages/base/src/az-id.ts b/indexer/packages/base/src/az-id.ts new file mode 100644 index 00000000000..d790c5f8ccb --- /dev/null +++ b/indexer/packages/base/src/az-id.ts @@ -0,0 +1,51 @@ +import { DescribeAvailabilityZonesCommand, EC2Client } from '@aws-sdk/client-ec2'; + +import { axiosRequest } from './axios'; +import config from './config'; +import logger from './logger'; + +export async function getAvailabilityZoneId(): Promise { + if (config.ECS_CONTAINER_METADATA_URI_V4 !== '' && config.AWS_REGION !== '') { + const taskUrl = `${config.ECS_CONTAINER_METADATA_URI_V4}/task`; + try { + const response = await axiosRequest({ + method: 'GET', + url: taskUrl, + }) as { AvailabilityZone: string }; + const client = new EC2Client({ region: config.AWS_REGION }); + const command = new DescribeAvailabilityZonesCommand({ + ZoneNames: [response.AvailabilityZone], + }); + try { + const ec2Response = await client.send(command); + const zoneId = ec2Response.AvailabilityZones![0].ZoneId!; + logger.info({ + at: 'az-id#getAvailabilityZoneId', + message: `Got availability zone id ${zoneId}.`, + }); + return ec2Response.AvailabilityZones![0].ZoneId!; + } catch (error) { + logger.error({ + at: 'az-id#getAvailabilityZoneId', + message: 'Failed to fetch availabilty zone id from EC2. ', + error, + }); + return ''; + } + } catch (error) { + logger.error({ + at: 'az-id#getAvailabilityZoneId', + message: 'Failed to retrieve availability zone from metadata endpoint. No availabilty zone id found.', + error, + taskUrl, + }); + return ''; + } + } else { + logger.error({ + at: 'az-id#getAvailabilityZoneId', + message: 'No metadata URI or region. No availabilty zone id found.', + }); + return ''; + } +} diff --git a/indexer/packages/base/src/config-util.ts b/indexer/packages/base/src/config-util.ts index a4c5690b3d9..295689878ef 100644 --- a/indexer/packages/base/src/config-util.ts +++ b/indexer/packages/base/src/config-util.ts @@ -48,10 +48,10 @@ interface ParseOptions { // If `default` is present, then the environment variable will be optional and will default to the // value of `default` when unset. In particular, `default` may be null in which case the config // value will be null when the environment variable is not set. - default: T; + default: T, // Can be specified to ensure the default value is not used when running in a certain NODE_ENV. - requireInEnv?: NodeEnv[]; + requireInEnv?: NodeEnv[], } const NODE_ENV = process.env.NODE_ENV; @@ -229,10 +229,10 @@ export function parseSchema( ): { [K in keyof T]: T[K] extends ParseFn ? U : never; } & { - isDevelopment: () => boolean; - isStaging: () => boolean; - isProduction: () => boolean; - isTest: () => boolean; + isDevelopment: () => boolean, + isStaging: () => boolean, + isProduction: () => boolean, + isTest: () => boolean, } { const config = _.mapValues(schema, (parseFn: ParseFn, varName: string) => { const fullVarName = prefix ? `${prefix}_${varName}` : varName; diff --git a/indexer/packages/base/src/config.ts b/indexer/packages/base/src/config.ts index 65d7b2663c4..4cee7802e73 100644 --- a/indexer/packages/base/src/config.ts +++ b/indexer/packages/base/src/config.ts @@ -30,7 +30,7 @@ export const baseConfigSchema = { SEND_BUGSNAG_ERRORS: parseBoolean({ default: true, }), - SERVICE_NAME: parseString(), + SERVICE_NAME: parseString({ default: '' }), // Optional environment variables. NODE_ENV: parseString({ default: null }), @@ -38,6 +38,8 @@ export const baseConfigSchema = { STATSD_HOST: parseString({ default: 'localhost' }), STATSD_PORT: parseInteger({ default: 8125 }), LOG_LEVEL: parseString({ default: 'debug' }), + ECS_CONTAINER_METADATA_URI_V4: parseString({ default: '' }), + AWS_REGION: parseString({ default: '' }), }; export default parseSchema(baseConfigSchema); diff --git a/indexer/packages/base/src/index.ts b/indexer/packages/base/src/index.ts index 2bd3784a9c5..77bbdf34e84 100644 --- a/indexer/packages/base/src/index.ts +++ b/indexer/packages/base/src/index.ts @@ -13,6 +13,8 @@ export * from './constants'; export * from './bugsnag'; export * from './stats-util'; export * from './date-helpers'; +export * from './instance-id'; +export * from './az-id'; // Do this outside logger.ts to avoid a dependency cycle with logger transports that may trigger // additional logging. diff --git a/indexer/packages/base/src/instance-id.ts b/indexer/packages/base/src/instance-id.ts new file mode 100644 index 00000000000..5db6dfdba91 --- /dev/null +++ b/indexer/packages/base/src/instance-id.ts @@ -0,0 +1,51 @@ +import { v4 as uuidv4 } from 'uuid'; + +import { axiosRequest } from './axios'; +import config from './config'; +import logger from './logger'; + +let INSTANCE_ID: string = ''; + +export function getInstanceId(): string { + return INSTANCE_ID; +} + +export async function setInstanceId(): Promise { + if (INSTANCE_ID !== '') { + return; + } + if (config.ECS_CONTAINER_METADATA_URI_V4 !== '' && + ( + config.isProduction() || config.isStaging() + ) + ) { + // https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-metadata-endpoint-v4.html + const taskUrl = `${config.ECS_CONTAINER_METADATA_URI_V4}/task`; + try { + const response = await axiosRequest({ + method: 'GET', + url: taskUrl, + }) as { TaskARN: string }; + INSTANCE_ID = response.TaskARN; + } catch (error) { + logger.error({ + at: 'instance-id#setInstanceId', + message: 'Failed to retrieve task arn from metadata endpoint. Falling back to uuid.', + error, + taskUrl, + }); + INSTANCE_ID = uuidv4(); + } + } else { + INSTANCE_ID = uuidv4(); + + } +} + +// Exported for tests +export function resetForTests(): void { + if (!config.isTest()) { + throw new Error(`resetForTests() cannot be called for env: ${config.NODE_ENV}`); + } + INSTANCE_ID = ''; +} diff --git a/indexer/packages/base/src/logger.ts b/indexer/packages/base/src/logger.ts index 5b321b154fa..b33cd621f4d 100644 --- a/indexer/packages/base/src/logger.ts +++ b/indexer/packages/base/src/logger.ts @@ -5,7 +5,6 @@ import winston from 'winston'; import config from './config'; -import { redact } from './sanitization'; import { InfoObject } from './types'; // Fix types. The methods available depend on the levels used. We're using syslog levels, so these @@ -14,21 +13,21 @@ type UnusedLevels = 'warn' | 'help' | 'data' | 'prompt' | 'http' | 'verbose' | ' // Enforce type constraints on the objects passed into Winston logging functions. interface LeveledLogMethod { - (infoObject: InfoObject): winston.Logger; + (infoObject: InfoObject): winston.Logger, } // Exclude the functions whose type we want to change from the base definition. This seems to be // enough (and the only way I've found) to trick TypeScript into accepting the modified LoggerExport // as a valid extension of the base winston.Logger type. type SyslogLevels = 'emerg' | 'alert' | 'crit' | 'error' | 'warning' | 'notice' | 'info' | 'debug'; export interface LoggerExport extends Omit { - emerg: LeveledLogMethod; - alert: LeveledLogMethod; - crit: LeveledLogMethod; - error: LeveledLogMethod; - warning: LeveledLogMethod; - notice: LeveledLogMethod; - info: LeveledLogMethod; - debug: LeveledLogMethod; + emerg: LeveledLogMethod, + alert: LeveledLogMethod, + crit: LeveledLogMethod, + error: LeveledLogMethod, + warning: LeveledLogMethod, + notice: LeveledLogMethod, + info: LeveledLogMethod, + debug: LeveledLogMethod, } const logger: LoggerExport = winston.createLogger({ @@ -38,8 +37,7 @@ const logger: LoggerExport = winston.createLogger({ winston.format((info) => { return { ...info, // info contains some symbols that are lost when the object is cloned. - ...redact(info), - error: info.error, // cloning with redact() may break the error object + error: info.error, }; })(), winston.format.json(), diff --git a/indexer/packages/base/src/sanitization.ts b/indexer/packages/base/src/sanitization.ts index ca23347747b..05ae61622dd 100644 --- a/indexer/packages/base/src/sanitization.ts +++ b/indexer/packages/base/src/sanitization.ts @@ -1,32 +1,7 @@ import traverse from 'traverse'; -// Common request headers which should be redacted. Normalized to all-lowercase. -export const DEFAULT_SECRET_KEYS = [ - 'Authorization', - 'X-Routing-Key', // Used by PagerDuty. -]; - -const DEFAULT_REDACTED_PLACEHOLDER = '[REDACTED]'; const JSON_CIRCULAR_PLACEHOLDER = '[CIRCULAR]'; -/** - * Creates a deep copy of an object with values redacted where the key matches `secretKeys`. - */ -export function redact( - obj: T, - secretKeys: string[] = DEFAULT_SECRET_KEYS, - placeholder: string = DEFAULT_REDACTED_PLACEHOLDER, -): T { - const normalizedSecretKeys = secretKeys.map((s) => s.toLowerCase()); - - // eslint-disable-next-line array-callback-return - return traverse(obj).map(function traverseFunction(this: traverse.TraverseContext, value: {}) { - if (normalizedSecretKeys.includes(this.key?.toLowerCase() as string) && value !== null) { - this.update(placeholder); - } - }); -} - /** * Creates a deep copy of an object with circular references removed or replaced. */ diff --git a/indexer/packages/base/src/types.ts b/indexer/packages/base/src/types.ts index 532b4a88a69..e15ad43df35 100644 --- a/indexer/packages/base/src/types.ts +++ b/indexer/packages/base/src/types.ts @@ -21,8 +21,8 @@ export enum BugsnagReleaseStage { } export interface PagerDutyInfo { - message: {}; - id?: string; + message: {}, + id?: string, } // Enforce type constraints on the objects passed into Winston logging functions. diff --git a/indexer/packages/base/tsconfig.json b/indexer/packages/base/tsconfig.json index 8914796c0bc..ece83b94c06 100644 --- a/indexer/packages/base/tsconfig.json +++ b/indexer/packages/base/tsconfig.json @@ -4,6 +4,7 @@ "outDir": "build" }, "include": [ - "src" + "src", + "__tests__" ] } \ No newline at end of file diff --git a/indexer/packages/compliance/__tests__/geoblocking/restrict-countries.test.ts b/indexer/packages/compliance/__tests__/geoblocking/restrict-countries.test.ts index e03326c58a5..12f6bdb2bd7 100644 --- a/indexer/packages/compliance/__tests__/geoblocking/restrict-countries.test.ts +++ b/indexer/packages/compliance/__tests__/geoblocking/restrict-countries.test.ts @@ -1,6 +1,7 @@ import { isRestrictedCountryHeaders, CountryHeaders, + isWhitelistedAddress, } from '../../src/geoblocking/restrict-countries'; import * as util from '../../src/geoblocking/util'; import config from '../../src/config'; @@ -47,3 +48,17 @@ describe('isRestrictedCountryHeaders', () => { expect(isRestrictedCountryHeaders({})).toEqual(true); }); }); + +describe('isWhitelistedAddress', () => { + it('returns true if address is whitelisted', () => { + config.WHITELISTED_ADDRESSES = '0x123,0x456'; + + expect(isWhitelistedAddress('0x123')).toEqual(true); + }); + + it('returns false if address is not whitelisted', () => { + config.WHITELISTED_ADDRESSES = '0x123,0x456'; + + expect(isWhitelistedAddress('0x789')).toEqual(false); + }); +}); diff --git a/indexer/packages/compliance/src/clients/elliptic-provider.ts b/indexer/packages/compliance/src/clients/elliptic-provider.ts index e5370ef24e4..90044700470 100644 --- a/indexer/packages/compliance/src/clients/elliptic-provider.ts +++ b/indexer/packages/compliance/src/clients/elliptic-provider.ts @@ -26,6 +26,8 @@ export const API_PATH: string = '/v2/wallet/synchronous'; export const API_URI: string = `https://aml-api.elliptic.co${API_PATH}`; export const RISK_SCORE_KEY: string = 'risk_score'; export const NO_RULES_TRIGGERED_RISK_SCORE: number = -1; +// We use different negative values of risk score to represent different elliptic response states +export const NOT_IN_BLOCKCHAIN_RISK_SCORE: number = -2; export class EllipticProviderClient extends ComplianceClient { private apiKey: string; @@ -73,9 +75,17 @@ export class EllipticProviderClient extends ComplianceClient { message: 'Malformed response from Elliptic', response, }); + stats.increment( + `${config.SERVICE_NAME}.get_elliptic_risk_score.status_code`, + { status: 'malformed' }, + ); throw new ComplianceClientError('Malformed response'); } + stats.increment( + `${config.SERVICE_NAME}.get_elliptic_risk_score.status_code`, + { status: '200' }, + ); if (riskScore === null) { return NO_RULES_TRIGGERED_RISK_SCORE; } @@ -86,14 +96,26 @@ export class EllipticProviderClient extends ComplianceClient { error?.response?.status === 404 && error?.response?.data?.name === 'NotInBlockchain' ) { - return NO_RULES_TRIGGERED_RISK_SCORE; + stats.increment( + `${config.SERVICE_NAME}.get_elliptic_risk_score.status_code`, + { status: '404' }, + ); + return NOT_IN_BLOCKCHAIN_RISK_SCORE; } if (error?.response?.status === 429) { + stats.increment( + `${config.SERVICE_NAME}.get_elliptic_risk_score.status_code`, + { status: '429' }, + ); throw new TooManyRequestsError('Too many requests'); } if (error?.response?.status === 500 && retries < config.ELLIPTIC_MAX_RETRIES) { + stats.increment( + `${config.SERVICE_NAME}.get_elliptic_risk_score.status_code`, + { status: '500' }, + ); return this.getRiskScore(address, retries + 1); } diff --git a/indexer/packages/compliance/src/config.ts b/indexer/packages/compliance/src/config.ts index 3902806256f..40fd6413390 100644 --- a/indexer/packages/compliance/src/config.ts +++ b/indexer/packages/compliance/src/config.ts @@ -22,6 +22,11 @@ export const complianceConfigSchema = { default: '', // comma de-limited }), + // Whitelisted list of dydx addresses + WHITELISTED_ADDRESSES: parseString({ + default: '', // comma de-limited + }), + // Required environment variables. ELLIPTIC_API_KEY: parseString({ default: 'default_elliptic_api_key' }), ELLIPTIC_API_SECRET: parseString({ default: '' }), diff --git a/indexer/packages/compliance/src/geoblocking/restrict-countries.ts b/indexer/packages/compliance/src/geoblocking/restrict-countries.ts index bd04c9f0d3f..e9717a54d0d 100644 --- a/indexer/packages/compliance/src/geoblocking/restrict-countries.ts +++ b/indexer/packages/compliance/src/geoblocking/restrict-countries.ts @@ -31,3 +31,7 @@ export function isRestrictedCountryHeaders(headers: CountryHeaders): boolean { return false; } + +export function isWhitelistedAddress(address: string): boolean { + return config.WHITELISTED_ADDRESSES.split(',').includes(address); +} diff --git a/indexer/packages/compliance/src/index.ts b/indexer/packages/compliance/src/index.ts index b3a587079a2..0fc7422a052 100644 --- a/indexer/packages/compliance/src/index.ts +++ b/indexer/packages/compliance/src/index.ts @@ -5,3 +5,4 @@ export * from './geoblocking/util'; export * from './types'; export * from './config'; export * from './constants'; +export * from './clients/elliptic-provider'; diff --git a/indexer/packages/dev/.eslintrc.js b/indexer/packages/dev/.eslintrc.js index 0af3a7005d5..ca059b01166 100644 --- a/indexer/packages/dev/.eslintrc.js +++ b/indexer/packages/dev/.eslintrc.js @@ -62,6 +62,7 @@ module.exports = { ], 'import/no-extraneous-dependencies': 'off', 'import/prefer-default-export': 'off', + 'key-spacing': ['error', { beforeColon: false, afterColon: true }], 'max-classes-per-file': 'off', 'no-await-in-loop': 'off', 'no-continue': 'off', @@ -139,6 +140,16 @@ module.exports = { 'always', { exceptAfterSingleLine: true }, ], + '@typescript-eslint/member-delimiter-style': ['error', { + multiline: { + delimiter: 'comma', + requireLast: true, + }, + singleline: { + delimiter: 'comma', + requireLast: false, + }, + }], '@typescript-eslint/naming-convention': ['error', { selector: 'variableLike', diff --git a/indexer/packages/kafka/__tests__/batch-kafka-producer.test.ts b/indexer/packages/kafka/__tests__/batch-kafka-producer.test.ts index f62b28423b2..4da777c1346 100644 --- a/indexer/packages/kafka/__tests__/batch-kafka-producer.test.ts +++ b/indexer/packages/kafka/__tests__/batch-kafka-producer.test.ts @@ -1,16 +1,18 @@ import { KafkaTopics } from '../src'; import { BatchKafkaProducer, ProducerMessage } from '../src/batch-kafka-producer'; import { producer } from '../src/producer'; +import { IHeaders } from 'kafkajs'; import _ from 'lodash'; interface TestMessage { key?: string, value: string, + headers?: IHeaders, } function testMessage2ProducerMessage(data: TestMessage): ProducerMessage { const key: Buffer | undefined = data.key === undefined ? undefined : Buffer.from(data.key); - return { key, value: Buffer.from(data.value) }; + return { key, value: Buffer.from(data.value), headers: data.headers }; } function testMessage2ProducerMessages(data: TestMessage[]): ProducerMessage[] { @@ -35,9 +37,9 @@ describe('batch-kafka-producer', () => { [ 'will send key if key is not undefined', 5, - [{ key: '1', value: 'a' }, { key: '2', value: 'b' }, { key: '3', value: 'c' }], + [{ key: '1', value: 'a' }, { key: '2', value: 'b' }, { key: '3', value: 'c', headers: { timestamp: 'value' } }], [[{ key: '1', value: 'a' }, { key: '2', value: 'b' }]], - [{ key: '3', value: 'c' }], + [{ key: '3', value: 'c', headers: { timestamp: 'value' } }], ], [ 'will not send message until the batch size is reached', @@ -104,7 +106,9 @@ describe('batch-kafka-producer', () => { for (const msg of messages) { const key: Buffer | undefined = msg.key === undefined ? undefined : Buffer.from(msg.key); - batchProducer.addMessageAndMaybeFlush({ value: Buffer.from(msg.value), key }); + batchProducer.addMessageAndMaybeFlush( + { value: Buffer.from(msg.value), key, headers: msg.headers }, + ); } expect(producerSendMock.mock.calls).toHaveLength(expectedMessagesPerCall.length); diff --git a/indexer/packages/kafka/__tests__/consumer.test.ts b/indexer/packages/kafka/__tests__/consumer.test.ts index de801b2dfe4..e05d67d5e3c 100644 --- a/indexer/packages/kafka/__tests__/consumer.test.ts +++ b/indexer/packages/kafka/__tests__/consumer.test.ts @@ -10,10 +10,10 @@ import { TO_ENDER_TOPIC } from '../src'; describe.skip('consumer', () => { beforeAll(async () => { await Promise.all([ - consumer.connect(), + consumer!.connect(), producer.connect(), ]); - await consumer.subscribe({ topic: TO_ENDER_TOPIC }); + await consumer!.subscribe({ topic: TO_ENDER_TOPIC }); await startConsumer(); }); diff --git a/indexer/packages/kafka/package.json b/indexer/packages/kafka/package.json index 284f08173a1..1c463ace03a 100644 --- a/indexer/packages/kafka/package.json +++ b/indexer/packages/kafka/package.json @@ -32,8 +32,10 @@ "homepage": "https://github.com/dydxprotocol/indexer#readme", "dependencies": { "@dydxprotocol-indexer/base": "workspace:^0.0.1", + "@dydxprotocol-indexer/postgres": "workspace:^0.0.1", + "@dydxprotocol-indexer/v4-protos": "workspace:^0.0.1", "dotenv-flow": "^3.2.0", - "kafkajs": "^2.1.0", + "kafkajs": "^2.2.4", "lodash": "^4.17.21", "uuid": "^8.3.2" } diff --git a/indexer/packages/kafka/src/batch-kafka-producer.ts b/indexer/packages/kafka/src/batch-kafka-producer.ts index 6885e29ae99..5bc63f7a415 100644 --- a/indexer/packages/kafka/src/batch-kafka-producer.ts +++ b/indexer/packages/kafka/src/batch-kafka-producer.ts @@ -1,7 +1,8 @@ -import { logger } from '@dydxprotocol-indexer/base'; -import { Producer, RecordMetadata } from 'kafkajs'; +import { logger, stats } from '@dydxprotocol-indexer/base'; +import { IHeaders, Producer, RecordMetadata } from 'kafkajs'; import _ from 'lodash'; +import config from './config'; import { KafkaTopics } from './types'; /** @@ -10,6 +11,7 @@ import { KafkaTopics } from './types'; export type ProducerMessage = { key?: Buffer, value: Buffer, + headers?: IHeaders, }; /** @@ -52,7 +54,7 @@ export class BatchKafkaProducer { if (this.currentSize + msgBuffer.byteLength + keyByteLength > this.maxBatchSizeBytes) { this.sendBatch(); } - this.producerMessages.push({ key: message.key, value: msgBuffer }); + this.producerMessages.push({ key: message.key, value: msgBuffer, headers: message.headers }); this.currentSize += msgBuffer.byteLength; this.currentSize += keyByteLength; } @@ -64,6 +66,7 @@ export class BatchKafkaProducer { } private sendBatch(): void { + const startTime: number = Date.now(); if (!_.isEmpty(this.producerMessages)) { this.producerPromises.push( this.producer.send({ topic: this.topic, messages: this.producerMessages }), @@ -79,7 +82,10 @@ export class BatchKafkaProducer { 0, ), topic: this.topic, + sendTime: Date.now() - startTime, }); + stats.gauge(`${config.SERVICE_NAME}.kafka_batch_size`, this.currentSize); + stats.timing(`${config.SERVICE_NAME}.kafka_batch_send_time`, Date.now() - startTime); this.producerMessages = []; this.currentSize = 0; } diff --git a/indexer/packages/kafka/src/config.ts b/indexer/packages/kafka/src/config.ts index d98df029925..55eadb5a263 100644 --- a/indexer/packages/kafka/src/config.ts +++ b/indexer/packages/kafka/src/config.ts @@ -23,6 +23,12 @@ export const kafkaConfigSchema = { KAFKA_REBALANCE_TIMEOUT_MS: parseInteger({ default: 50_000 }), KAFKA_HEARTBEAT_INTERVAL_MS: parseInteger({ default: 5_000 }), KAFKA_CONCURRENT_PARTITIONS: parseInteger({ default: 1 }), + // The number of messages to process before committing the offset. + KAFKA_CONSUMER_AUTO_COMMIT_THRESHOLD: parseInteger({ default: 100 }), + // The interval at which the consumer will commit the offset. + // Note that the consumer will respect both the commit threshold and the commit interval + // config, whichever comes first. + KAFKA_CONSUMER_AUTO_COMMIT_INTERVAL_MS: parseInteger({ default: 5_000 }), // If true, consumers will have unique group ids, and SERVICE_NAME will be a common prefix for // the consumer group ids. KAFKA_ENABLE_UNIQUE_CONSUMER_GROUP_IDS: parseBoolean({ default: false }), @@ -30,6 +36,7 @@ export const kafkaConfigSchema = { KAFKA_MAX_BATCH_WEBSOCKET_MESSAGE_SIZE_BYTES: parseInteger({ default: 900000, // ~900 kB, 100 kB smaller than the 1 MB default max size of messages in Kafka }), + KAFKA_WAIT_MAX_TIME_MS: parseInteger({ default: 5_000 }), }; export default parseSchema(kafkaConfigSchema); diff --git a/indexer/packages/kafka/src/constants.ts b/indexer/packages/kafka/src/constants.ts index 9f28e5d29e3..01b5a3712bf 100644 --- a/indexer/packages/kafka/src/constants.ts +++ b/indexer/packages/kafka/src/constants.ts @@ -1,7 +1,8 @@ export const TO_ENDER_TOPIC: string = 'to-ender'; export const ORDERBOOKS_WEBSOCKET_MESSAGE_VERSION: string = '1.0.0'; -export const SUBACCOUNTS_WEBSOCKET_MESSAGE_VERSION: string = '2.4.0'; +export const SUBACCOUNTS_WEBSOCKET_MESSAGE_VERSION: string = '3.0.0'; export const TRADES_WEBSOCKET_MESSAGE_VERSION: string = '2.1.0'; export const MARKETS_WEBSOCKET_MESSAGE_VERSION: string = '1.0.0'; export const CANDLES_WEBSOCKET_MESSAGE_VERSION: string = '1.0.0'; +export const BLOCK_HEIGHT_WEBSOCKET_MESSAGE_VERSION: string = '1.0.0'; diff --git a/indexer/packages/kafka/src/consumer.ts b/indexer/packages/kafka/src/consumer.ts index b366bd433ad..82c26cf1e02 100644 --- a/indexer/packages/kafka/src/consumer.ts +++ b/indexer/packages/kafka/src/consumer.ts @@ -1,4 +1,5 @@ import { + getAvailabilityZoneId, logger, } from '@dydxprotocol-indexer/base'; import { @@ -13,14 +14,10 @@ const groupIdPrefix: string = config.SERVICE_NAME; const groupIdSuffix: string = config.KAFKA_ENABLE_UNIQUE_CONSUMER_GROUP_IDS ? `_${uuidv4()}` : ''; const groupId: string = `${groupIdPrefix}${groupIdSuffix}`; -export const consumer: Consumer = kafka.consumer({ - groupId, - sessionTimeout: config.KAFKA_SESSION_TIMEOUT_MS, - rebalanceTimeout: config.KAFKA_REBALANCE_TIMEOUT_MS, - heartbeatInterval: config.KAFKA_HEARTBEAT_INTERVAL_MS, - readUncommitted: false, - maxBytes: 4194304, // 4MB -}); +// As a hack, we made this mutable since CommonJS doesn't support top level await. +// Top level await would needed to fetch the az id (used as rack id). +// eslint-disable-next-line import/no-mutable-exports +export let consumer: Consumer | undefined; // List of functions to run per message consumed. let onMessageFunction: (topic: string, message: KafkaMessage) => Promise; @@ -50,38 +47,51 @@ export function updateOnBatchFunction( // Whether the consumer is stopped. let stopped: boolean = false; -consumer.on('consumer.disconnect', async () => { +export async function stopConsumer(): Promise { logger.info({ - at: 'consumers#disconnect', - message: 'Kafka consumer disconnected', + at: 'kafka-consumer#stop', + message: 'Stopping kafka consumer', groupId, }); - if (!stopped) { - await consumer.connect(); - logger.info({ - at: 'kafka-consumer#disconnect', - message: 'Kafka consumer reconnected', - groupId, - }); - } else { + stopped = true; + await consumer!.disconnect(); +} + +export async function initConsumer(): Promise { + consumer = kafka.consumer({ + groupId, + sessionTimeout: config.KAFKA_SESSION_TIMEOUT_MS, + rebalanceTimeout: config.KAFKA_REBALANCE_TIMEOUT_MS, + heartbeatInterval: config.KAFKA_HEARTBEAT_INTERVAL_MS, + maxWaitTimeInMs: config.KAFKA_WAIT_MAX_TIME_MS, + readUncommitted: false, + maxBytes: 4194304, // 4MB + rackId: await getAvailabilityZoneId(), + }); + + consumer!.on('consumer.disconnect', async () => { logger.info({ - at: 'kafka-consumer#disconnect', - message: 'Not reconnecting since task is shutting down', + at: 'consumers#disconnect', + message: 'Kafka consumer disconnected', groupId, }); - } -}); -export async function stopConsumer(): Promise { - logger.info({ - at: 'kafka-consumer#stop', - message: 'Stopping kafka consumer', - groupId, + if (!stopped) { + await consumer!.connect(); + logger.info({ + at: 'kafka-consumer#disconnect', + message: 'Kafka consumer reconnected', + groupId, + }); + } else { + logger.info({ + at: 'kafka-consumer#disconnect', + message: 'Not reconnecting since task is shutting down', + groupId, + }); + } }); - - stopped = true; - await consumer.disconnect(); } export async function startConsumer(batchProcessing: boolean = false): Promise { @@ -91,6 +101,8 @@ export async function startConsumer(batchProcessing: boolean = false): Promise", language: 'en' }``` + +`token`: A valid push notification token generated by the Google Firebase SDK. + +`language`: A string representing the user's preferred language, following the ISO 639-1 standard. This must be one of the supported languages listed below: + +- 'en' (English) +- 'es' (Spanish) +- 'fr' (French) +- 'de' (German) +- 'it' (Italian) +- 'ja' (Japanese) +- 'ko' (Korean) +- 'zh' (Chinese) diff --git a/indexer/packages/notifications/__tests__/localization.test.ts b/indexer/packages/notifications/__tests__/localization.test.ts new file mode 100644 index 00000000000..c3051ce4e70 --- /dev/null +++ b/indexer/packages/notifications/__tests__/localization.test.ts @@ -0,0 +1,65 @@ +import { + deriveLocalizedNotificationMessage, +} from '../src/localization'; +import { + NotificationType, + NotificationDynamicFieldKey, + createNotification, + isValidLanguageCode, +} from '../src/types'; + +describe('deriveLocalizedNotificationMessage', () => { + test('should generate a correct message for DepositSuccessNotification', () => { + const notification = createNotification(NotificationType.DEPOSIT_SUCCESS, { + [NotificationDynamicFieldKey.AMOUNT]: '1000', + [NotificationDynamicFieldKey.MARKET]: 'USDT', + }); + + const expected = { + title: 'Deposit Successful', + body: 'You have successfully deposited 1000 USDT to your dYdX account.', + }; + + const result = deriveLocalizedNotificationMessage(notification); + expect(result).toEqual(expected); + }); + + test('should generate a correct message for OrderFilledNotification', () => { + const notification = createNotification(NotificationType.ORDER_FILLED, { + [NotificationDynamicFieldKey.MARKET]: 'BTC/USD', + [NotificationDynamicFieldKey.AVERAGE_PRICE]: '45000', + [NotificationDynamicFieldKey.AMOUNT]: '1000', + }); + + const expected = { + title: 'Order Filled', + body: 'Your order for 1000 BTC/USD was filled at $45000', + }; + + const result = deriveLocalizedNotificationMessage(notification); + expect(result).toEqual(expected); + }); + + describe('isValidLanguageCode', () => { + test('should return true for valid language codes', () => { + const validCodes = ['en', 'es', 'fr', 'de', 'it', 'ja', 'ko', 'zh']; + validCodes.forEach((code) => { + expect(isValidLanguageCode(code)).toBe(true); + }); + }); + + test('should return false for invalid language codes', () => { + const invalidCodes = ['', 'EN', 'eng', 'esp', 'fra', 'deu', 'ita', 'jpn', 'kor', 'zho', 'xx']; + invalidCodes.forEach((code) => { + expect(isValidLanguageCode(code)).toBe(false); + }); + }); + + test('should return false for non-string inputs', () => { + const nonStringInputs = [null, undefined, 123, {}, []]; + nonStringInputs.forEach((input) => { + expect(isValidLanguageCode(input as any)).toBe(false); + }); + }); + }); +}); diff --git a/indexer/packages/notifications/__tests__/message.test.ts b/indexer/packages/notifications/__tests__/message.test.ts new file mode 100644 index 00000000000..b17cf0ce992 --- /dev/null +++ b/indexer/packages/notifications/__tests__/message.test.ts @@ -0,0 +1,65 @@ +import { logger } from '@dydxprotocol-indexer/base'; +import { sendFirebaseMessage } from '../src/message'; +import { sendMulticast } from '../src/lib/firebase'; +import { createNotification, NotificationType } from '../src/types'; + +jest.mock('../src/lib/firebase', () => ({ + sendMulticast: jest.fn(), +})); + +describe('sendFirebaseMessage', () => { + let loggerInfoSpy: jest.SpyInstance; + let loggerWarnSpy: jest.SpyInstance; + let loggerErrorSpy: jest.SpyInstance; + + beforeAll(() => { + loggerInfoSpy = jest.spyOn(logger, 'info').mockImplementation(); + loggerWarnSpy = jest.spyOn(logger, 'warning').mockImplementation(); + loggerErrorSpy = jest.spyOn(logger, 'error').mockImplementation(); + }); + + afterAll(() => { + loggerInfoSpy.mockRestore(); + loggerWarnSpy.mockRestore(); + loggerErrorSpy.mockRestore(); + }); + + const defaultToken = { + token: 'faketoken', + language: 'en', + }; + + const mockNotification = createNotification(NotificationType.ORDER_FILLED, { + AMOUNT: '10', + MARKET: 'BTC-USD', + AVERAGE_PRICE: '100.50', + }); + + it('should send a Firebase message successfully', async () => { + await sendFirebaseMessage( + [{ token: defaultToken.token, language: defaultToken.language }], + mockNotification, + ); + + expect(sendMulticast).toHaveBeenCalledWith(expect.objectContaining( + { + tokens: [defaultToken.token], + notification: { body: 'Your order for 10 BTC-USD was filled at $100.50', title: 'Order Filled' }, + })); + }); + + it('should log an error if sending the message fails', async () => { + const mockedSendMulticast = sendMulticast as jest.MockedFunction; + mockedSendMulticast.mockRejectedValueOnce(new Error('Send failed')); + + await sendFirebaseMessage( + [{ token: defaultToken.token, language: defaultToken.language }], + mockNotification, + ); + + expect(logger.error).toHaveBeenCalledWith(expect.objectContaining({ + message: 'Send failed', + notificationType: mockNotification.type, + })); + }); +}); diff --git a/indexer/packages/notifications/jest.config.js b/indexer/packages/notifications/jest.config.js new file mode 100644 index 00000000000..54982b9e45a --- /dev/null +++ b/indexer/packages/notifications/jest.config.js @@ -0,0 +1,2 @@ +// Use the base configuration as-is. +module.exports = require('./node_modules/@dydxprotocol-indexer/dev/jest.config'); diff --git a/indexer/packages/notifications/jest.globalSetup.js b/indexer/packages/notifications/jest.globalSetup.js new file mode 100644 index 00000000000..7fc6c3e5514 --- /dev/null +++ b/indexer/packages/notifications/jest.globalSetup.js @@ -0,0 +1,6 @@ +// This function runs once before all tests. +module.exports = () => { + // This loads the environment variables for tests. + // eslint-disable-next-line global-require + require('dotenv-flow/config'); +}; diff --git a/indexer/packages/notifications/jest.setup.js b/indexer/packages/notifications/jest.setup.js new file mode 100644 index 00000000000..cd2fbe11a25 --- /dev/null +++ b/indexer/packages/notifications/jest.setup.js @@ -0,0 +1 @@ +// This file runs before each test file. diff --git a/indexer/packages/notifications/package.json b/indexer/packages/notifications/package.json new file mode 100644 index 00000000000..f3340fb124c --- /dev/null +++ b/indexer/packages/notifications/package.json @@ -0,0 +1,37 @@ +{ + "name": "@dydxprotocol-indexer/notifications", + "version": "0.0.1", + "description": "", + "main": "build/src/index.js", + "devDependencies": { + "@dydxprotocol-indexer/dev": "workspace:^0.0.1", + "@types/jest": "^28.1.4", + "jest": "^28.1.2", + "typescript": "^4.7.4", + "ts-node": "^10.8.2" + }, + "scripts": { + "lint": "eslint --ext .ts,.js .", + "lint:fix": "eslint --ext .ts,.js . --fix", + "build": "rm -rf build/ && tsc", + "build:prod": "pnpm run build", + "build:watch": "pnpm run build -- --watch", + "test": "NODE_ENV=test jest --runInBand --forceExit" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/dydxprotocol/indexer.git" + }, + "author": "", + "license": "AGPL-3.0", + "bugs": { + "url": "https://github.com/dydxprotocol/indexer/issues" + }, + "homepage": "https://github.com/dydxprotocol/indexer#readme", + "dependencies": { + "firebase-admin": "^12.4.0", + "@dydxprotocol-indexer/base": "workspace:^0.0.1", + "@dydxprotocol-indexer/postgres": "workspace:^0.0.1", + "dotenv-flow": "^3.2.0" + } +} diff --git a/indexer/packages/notifications/src/config.ts b/indexer/packages/notifications/src/config.ts new file mode 100644 index 00000000000..abf8d686f7e --- /dev/null +++ b/indexer/packages/notifications/src/config.ts @@ -0,0 +1,27 @@ +/** + * Environment variables required for Notifications module. + */ + +import { + parseString, + parseSchema, + baseConfigSchema, +} from '@dydxprotocol-indexer/base'; + +export const notificationsConfigSchema = { + ...baseConfigSchema, + + // Private Key for the Google Firebase Messaging project + // default is a dummy value + FIREBASE_PRIVATE_KEY_BASE64: parseString({ default: 'LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tClBMQUNFSE9MREVSX0tFWV9GT1JfREVWRUxPUE1FTlQKLS0tLS1FTkQgUFJJVkFURSBLRVktLS0tLQo=' }), + + // APP ID for the Google Firebase Messaging project + // default is a dummy value + FIREBASE_PROJECT_ID: parseString({ default: 'dydx-v4' }), + + // Client email for the Google Firebase Messaging project + // default is a dummy value + FIREBASE_CLIENT_EMAIL: parseString({ default: 'firebase-adminsdk-f0joo@dydx-v4.iam.gserviceaccount.com' }), +}; + +export default parseSchema(notificationsConfigSchema); diff --git a/indexer/packages/notifications/src/index.ts b/indexer/packages/notifications/src/index.ts new file mode 100644 index 00000000000..021faadca5b --- /dev/null +++ b/indexer/packages/notifications/src/index.ts @@ -0,0 +1,4 @@ +export * from './lib/firebase'; +export * from './localization'; +export * from './types'; +export * from './message'; diff --git a/indexer/packages/notifications/src/lib/firebase.ts b/indexer/packages/notifications/src/lib/firebase.ts new file mode 100644 index 00000000000..addebc7c5ed --- /dev/null +++ b/indexer/packages/notifications/src/lib/firebase.ts @@ -0,0 +1,79 @@ +import { logger } from '@dydxprotocol-indexer/base'; +import { + App, + cert, + initializeApp, +} from 'firebase-admin/app'; +import { getMessaging } from 'firebase-admin/messaging'; + +import config from '../config'; + +// Helper function to initialize Firebase App object that is used to send notifications +function initializeFirebaseApp(): App | undefined { + // Create credentials object from config variables. + // To prevent AWS Secrets Manager from altering the private key, the key is base64 encoded + // before being submitted. Below, we decode the string and replace the escaped + // linebreak characters + const defaultGoogleApplicationCredentials: { [key: string]: string } = { + project_id: config.FIREBASE_PROJECT_ID, + private_key: Buffer.from(config.FIREBASE_PRIVATE_KEY_BASE64, 'base64').toString('ascii').replace(/\\n/g, '\n'), + client_email: config.FIREBASE_CLIENT_EMAIL, + }; + + logger.info({ + at: 'notifications#firebase', + message: 'Initializing Firebase App', + }); + + let firebaseApp: App; + try { + firebaseApp = initializeApp({ + credential: cert(defaultGoogleApplicationCredentials), + }); + } catch (error) { + logger.error({ + at: 'notifications#firebase', + message: 'Failed to initialize Firebase App', + error, + }); + return undefined; + } + + logger.info({ + at: 'notifications#firebase', + message: 'Firebase App initialized successfully', + }); + + return firebaseApp; +} + +const firebaseApp = initializeFirebaseApp(); + +// Initialize Firebase Messaging if the firebaseApp was initialized successfully +// This can fail if the credentials passed to the firebaseApp are invalid +let firebaseMessaging = null; +if (firebaseApp) { + try { + firebaseMessaging = getMessaging(firebaseApp); + logger.info({ + at: 'notifications#firebase', + message: 'Firebase Messaging initialized successfully', + }); + } catch (error) { + logger.error({ + at: 'notifications#firebase', + message: 'Firebase Messaging failed to initialize', + }); + } +} + +export const sendMulticast = firebaseMessaging + ? firebaseMessaging.sendEachForMulticast.bind(firebaseMessaging) + : () => { + logger.error({ + at: 'notifications#firebase', + message: 'Firebase Messaging is not initialized, sendEachForMulticast is a no-op', + }); + return Promise.resolve(null); + }; +export { BatchResponse, getMessaging, MulticastMessage } from 'firebase-admin/messaging'; diff --git a/indexer/packages/notifications/src/localization.ts b/indexer/packages/notifications/src/localization.ts new file mode 100644 index 00000000000..c11b7167350 --- /dev/null +++ b/indexer/packages/notifications/src/localization.ts @@ -0,0 +1,41 @@ +import { LOCALIZED_MESSAGES } from './localized-messages'; +import { + Notification, + LanguageCode, +} from './types'; + +function replacePlaceholders(template: string, variables: Record): string { + // The template string contains placeholders in the format "{KEY}". + // For example: 'Your order for {AMOUNT} {MARKET} was filled at ${AVERAGE_PRICE}'. + // This function replaces these placeholders with corresponding values + // from the "variables" object. + // If the key inside "{}" exists in the "variables" object, it is replaced + // with the matching value. + // If the key does not exist, the placeholder remains unchanged in the resulting string. + // The .replace method uses a regular expression to find all words inside "{}" + // and replaces them as described. + return template.replace(/{(\w+)}/g, (_, key) => variables[key] || `{${key}}`); +} + +type NotificationMessage = { + title: string, + body: string, +}; + +export function deriveLocalizedNotificationMessage( + notification: Notification, + languageCode: LanguageCode = 'en', +): NotificationMessage { + const localizationFields = LOCALIZED_MESSAGES[languageCode] || LOCALIZED_MESSAGES.en; + + return { + title: replacePlaceholders( + localizationFields[notification.titleKey], + notification.dynamicValues, + ), + body: replacePlaceholders( + localizationFields[notification.bodyKey], + notification.dynamicValues, + ), + }; +} diff --git a/indexer/packages/notifications/src/localized-messages.ts b/indexer/packages/notifications/src/localized-messages.ts new file mode 100644 index 00000000000..dca2d553726 --- /dev/null +++ b/indexer/packages/notifications/src/localized-messages.ts @@ -0,0 +1,71 @@ +/* eslint-disable no-template-curly-in-string */ +import { + LocalizationKey, LocalizationBodyKey, LocalizationTitleKey, LanguageCode, +} from './types'; + +export const LOCALIZED_MESSAGES: Record> = { + en: { + [LocalizationTitleKey.DEPOSIT_SUCCESS_TITLE]: 'Deposit Successful', + [LocalizationBodyKey.DEPOSIT_SUCCESS_BODY]: 'You have successfully deposited {AMOUNT} {MARKET} to your dYdX account.', + [LocalizationTitleKey.ORDER_FILLED_TITLE]: 'Order Filled', + [LocalizationBodyKey.ORDER_FILLED_BODY]: 'Your order for {AMOUNT} {MARKET} was filled at ${AVERAGE_PRICE}', + [LocalizationTitleKey.ORDER_TRIGGERED_TITLE]: '{MARKET} Order Triggered', + [LocalizationBodyKey.ORDER_TRIGGERED_BODY]: 'Your order for {AMOUNT} {MARKET} was triggered at ${PRICE}', + }, + es: { + [LocalizationTitleKey.DEPOSIT_SUCCESS_TITLE]: 'Depósito Exitoso', + [LocalizationBodyKey.DEPOSIT_SUCCESS_BODY]: 'Has depositado exitosamente {AMOUNT} {MARKET} en tu cuenta dYdX.', + [LocalizationTitleKey.ORDER_FILLED_TITLE]: 'Orden Ejecutada', + [LocalizationBodyKey.ORDER_FILLED_BODY]: 'Tu orden de {AMOUNT} {MARKET} se ejecutó a ${AVERAGE_PRICE}', + [LocalizationTitleKey.ORDER_TRIGGERED_TITLE]: 'Orden de {MARKET} Activada', + [LocalizationBodyKey.ORDER_TRIGGERED_BODY]: 'Tu orden de {AMOUNT} {MARKET} se activó a ${PRICE}', + }, + fr: { + [LocalizationTitleKey.DEPOSIT_SUCCESS_TITLE]: 'Dépôt Réussi', + [LocalizationBodyKey.DEPOSIT_SUCCESS_BODY]: 'Vous avez déposé avec succès {AMOUNT} {MARKET} sur votre compte dYdX.', + [LocalizationTitleKey.ORDER_FILLED_TITLE]: 'Ordre Exécuté', + [LocalizationBodyKey.ORDER_FILLED_BODY]: 'Votre ordre de {AMOUNT} {MARKET} a été exécuté à ${AVERAGE_PRICE}', + [LocalizationTitleKey.ORDER_TRIGGERED_TITLE]: 'Ordre {MARKET} Déclenché', + [LocalizationBodyKey.ORDER_TRIGGERED_BODY]: 'Votre ordre de {AMOUNT} {MARKET} a été déclenché à ${PRICE}', + }, + de: { + [LocalizationTitleKey.DEPOSIT_SUCCESS_TITLE]: 'Einzahlung Erfolgreich', + [LocalizationBodyKey.DEPOSIT_SUCCESS_BODY]: 'Sie haben erfolgreich {AMOUNT} {MARKET} auf Ihr dYdX-Konto eingezahlt.', + [LocalizationTitleKey.ORDER_FILLED_TITLE]: 'Auftrag Ausgeführt', + [LocalizationBodyKey.ORDER_FILLED_BODY]: 'Ihr Auftrag für {AMOUNT} {MARKET} wurde zu ${AVERAGE_PRICE} ausgeführt', + [LocalizationTitleKey.ORDER_TRIGGERED_TITLE]: '{MARKET} Auftrag Ausgelöst', + [LocalizationBodyKey.ORDER_TRIGGERED_BODY]: 'Ihr Auftrag für {AMOUNT} {MARKET} wurde bei ${PRICE} ausgelöst', + }, + it: { + [LocalizationTitleKey.DEPOSIT_SUCCESS_TITLE]: 'Deposito Riuscito', + [LocalizationBodyKey.DEPOSIT_SUCCESS_BODY]: 'Hai depositato con successo {AMOUNT} {MARKET} sul tuo account dYdX.', + [LocalizationTitleKey.ORDER_FILLED_TITLE]: 'Ordine Eseguito', + [LocalizationBodyKey.ORDER_FILLED_BODY]: 'Il tuo ordine di {AMOUNT} {MARKET} è stato eseguito a ${AVERAGE_PRICE}', + [LocalizationTitleKey.ORDER_TRIGGERED_TITLE]: 'Ordine {MARKET} Attivato', + [LocalizationBodyKey.ORDER_TRIGGERED_BODY]: 'Il tuo ordine di {AMOUNT} {MARKET} è stato attivato a ${PRICE}', + }, + ja: { + [LocalizationTitleKey.DEPOSIT_SUCCESS_TITLE]: '入金成功', + [LocalizationBodyKey.DEPOSIT_SUCCESS_BODY]: 'dYdXアカウントに{AMOUNT} {MARKET}を正常に入金しました。', + [LocalizationTitleKey.ORDER_FILLED_TITLE]: '注文約定', + [LocalizationBodyKey.ORDER_FILLED_BODY]: '{AMOUNT} {MARKET}の注文が${AVERAGE_PRICE}で約定しました', + [LocalizationTitleKey.ORDER_TRIGGERED_TITLE]: '{MARKET}注文トリガー', + [LocalizationBodyKey.ORDER_TRIGGERED_BODY]: '{AMOUNT} {MARKET}の注文が${PRICE}でトリガーされました', + }, + ko: { + [LocalizationTitleKey.DEPOSIT_SUCCESS_TITLE]: '입금 성공', + [LocalizationBodyKey.DEPOSIT_SUCCESS_BODY]: 'dYdX 계정에 {AMOUNT} {MARKET}을(를) 성공적으로 입금했습니다.', + [LocalizationTitleKey.ORDER_FILLED_TITLE]: '주문 체결', + [LocalizationBodyKey.ORDER_FILLED_BODY]: '{AMOUNT} {MARKET} 주문이 ${AVERAGE_PRICE}에 체결되었습니다', + [LocalizationTitleKey.ORDER_TRIGGERED_TITLE]: '{MARKET} 주문 트리거', + [LocalizationBodyKey.ORDER_TRIGGERED_BODY]: '{AMOUNT} {MARKET} 주문이 ${PRICE}에서 트리거되었습니다', + }, + zh: { + [LocalizationTitleKey.DEPOSIT_SUCCESS_TITLE]: '存款成功', + [LocalizationBodyKey.DEPOSIT_SUCCESS_BODY]: '您已成功将 {AMOUNT} {MARKET} 存入您的 dYdX 账户。', + [LocalizationTitleKey.ORDER_FILLED_TITLE]: '订单已成交', + [LocalizationBodyKey.ORDER_FILLED_BODY]: '您的 {AMOUNT} {MARKET} 订单已以 ${AVERAGE_PRICE} 成交', + [LocalizationTitleKey.ORDER_TRIGGERED_TITLE]: '{MARKET} 订单已触发', + [LocalizationBodyKey.ORDER_TRIGGERED_BODY]: '您的 {AMOUNT} {MARKET} 订单已在 ${PRICE} 触发', + }, +}; diff --git a/indexer/packages/notifications/src/message.ts b/indexer/packages/notifications/src/message.ts new file mode 100644 index 00000000000..9d48b602cf6 --- /dev/null +++ b/indexer/packages/notifications/src/message.ts @@ -0,0 +1,71 @@ +import { logger, stats } from '@dydxprotocol-indexer/base'; + +import config from './config'; +import { + MulticastMessage, + sendMulticast, +} from './lib/firebase'; +import { deriveLocalizedNotificationMessage } from './localization'; +import { LanguageCode, Notification } from './types'; + +export async function sendFirebaseMessage( + tokens: {token: string, language: string}[], + notification: Notification, +): Promise { + const start = Date.now(); + + // Each set of tokens for a users should have the same language + const language = tokens[0].language; + const { title, body } = deriveLocalizedNotificationMessage( + notification, + language as LanguageCode, + ); + + const message: MulticastMessage = { + tokens: tokens.map((token) => token.token), + notification: { + title, + body, + }, + fcmOptions: { + analyticsLabel: notification.type.toLowerCase(), + }, + apns: { + payload: { + aps: { + 'mutable-content': 1, + }, + data: { + firebase: { + }, + }, + }, + }, + }; + + try { + const result = await sendMulticast(message); + if (!result || result?.failureCount > 0) { + const errorMessages = result?.responses + .map((response) => response.error?.message) + .filter(Boolean); // Remove any undefined values + + throw new Error(`Failed to send Firebase message: ${errorMessages?.join(', ') || 'Unknown error'}`); + } + + logger.info({ + at: 'notifications#firebase', + message: 'Firebase message sent successfully', + notificationType: notification.type, + }); + } catch (error) { + logger.error({ + at: 'notifications#firebase', + message: error.message, + error: error as Error, + notificationType: notification.type, + }); + } finally { + stats.timing(`${config.SERVICE_NAME}.send_firebase_message.timing`, Date.now() - start); + } +} diff --git a/indexer/packages/notifications/src/types.ts b/indexer/packages/notifications/src/types.ts new file mode 100644 index 00000000000..011dd045580 --- /dev/null +++ b/indexer/packages/notifications/src/types.ts @@ -0,0 +1,147 @@ +// Types of notifications that can be sent +export enum NotificationType { + DEPOSIT_SUCCESS = 'DEPOSIT_SUCCESS', + ORDER_FILLED = 'ORDER_FILLED', + ORDER_TRIGGERED = 'ORDER_TRIGGERED', +} + +// Keys for the dynamic values that are used in the notification messages +// Each key corresponds to a placeholder in the localizable strings for each notification +export enum NotificationDynamicFieldKey { + AMOUNT = 'AMOUNT', + AVERAGE_PRICE = 'AVERAGE_PRICE', + PRICE = 'PRICE', + FILLED_AMOUNT = 'FILLED_AMOUNT', + MARKET = 'MARKET', + SIDE = 'SIDE', +} + +// Keys for the strings that are contained in the localzation file +// for each notification body and title +export enum LocalizationBodyKey { + DEPOSIT_SUCCESS_BODY = 'DEPOSIT_SUCCESS_BODY', + ORDER_FILLED_BODY = 'ORDER_FILLED_BODY', + ORDER_TRIGGERED_BODY = 'ORDER_TRIGGERED_BODY', +} + +export enum LocalizationTitleKey { + DEPOSIT_SUCCESS_TITLE = 'DEPOSIT_SUCCESS_TITLE', + ORDER_FILLED_TITLE = 'ORDER_FILLED_TITLE', + ORDER_TRIGGERED_TITLE = 'ORDER_TRIGGERED_TITLE', +} + +export type LocalizationKey = LocalizationBodyKey | LocalizationTitleKey; + +// Topics for each notification +// Topics are used to send notifications to specific topics in Firebase +export enum Topic { + TRADING = 'trading', + PRICE_ALERTS = 'price_alerts', +} + +export type LanguageCode = 'en' | 'es' | 'fr' | 'de' | 'it' | 'ja' | 'ko' | 'zh'; +export function isValidLanguageCode(code: string): code is LanguageCode { + return ['en', 'es', 'fr', 'de', 'it', 'ja', 'ko', 'zh'].includes(code); +} + +interface BaseNotification > { + type: NotificationType, + titleKey: LocalizationTitleKey, + bodyKey: LocalizationBodyKey, + topic: Topic, + dynamicValues: T, +} + +interface DepositSuccessNotification extends BaseNotification<{ + [NotificationDynamicFieldKey.AMOUNT]: string, + [NotificationDynamicFieldKey.MARKET]: string, +}> { + type: NotificationType.DEPOSIT_SUCCESS, + titleKey: LocalizationTitleKey.DEPOSIT_SUCCESS_TITLE, + bodyKey: LocalizationBodyKey.DEPOSIT_SUCCESS_BODY, + topic: Topic.TRADING, + dynamicValues: { + [NotificationDynamicFieldKey.AMOUNT]: string, + [NotificationDynamicFieldKey.MARKET]: string, + }, +} + +interface OrderFilledNotification extends BaseNotification <{ + [NotificationDynamicFieldKey.MARKET]: string, + [NotificationDynamicFieldKey.AVERAGE_PRICE]: string, +}>{ + type: NotificationType.ORDER_FILLED, + titleKey: LocalizationTitleKey.ORDER_FILLED_TITLE, + bodyKey: LocalizationBodyKey.ORDER_FILLED_BODY, + topic: Topic.TRADING, + dynamicValues: { + [NotificationDynamicFieldKey.MARKET]: string, + [NotificationDynamicFieldKey.AMOUNT]: string, + [NotificationDynamicFieldKey.AVERAGE_PRICE]: string, + }, +} + +interface OrderTriggeredNotification extends BaseNotification <{ + [NotificationDynamicFieldKey.MARKET]: string, + [NotificationDynamicFieldKey.PRICE]: string, +}>{ + type: NotificationType.ORDER_TRIGGERED, + titleKey: LocalizationTitleKey.ORDER_TRIGGERED_TITLE, + bodyKey: LocalizationBodyKey.ORDER_TRIGGERED_BODY, + topic: Topic.TRADING, + dynamicValues: { + [NotificationDynamicFieldKey.MARKET]: string, + [NotificationDynamicFieldKey.AMOUNT]: string, + [NotificationDynamicFieldKey.PRICE]: string, + }, +} + +export type Notification = +DepositSuccessNotification | +OrderFilledNotification | +OrderTriggeredNotification; + +// Factory function to create notifications. +// +// dynamicValues is a conditional type that changes based on the notification type: +// Below can be read as, if notificationType is DEPOSIT_SUCCESS then dynamicValues must +// match the type of DepositSuccessNotification['dynamicValues'] +export function createNotification( + notificationType: T, + dynamicValues: T extends NotificationType.DEPOSIT_SUCCESS + ? DepositSuccessNotification['dynamicValues'] + : T extends NotificationType.ORDER_FILLED + ? OrderFilledNotification['dynamicValues'] + : T extends NotificationType.ORDER_TRIGGERED + ? OrderTriggeredNotification['dynamicValues'] : never, +): Notification { + switch (notificationType) { + case NotificationType.DEPOSIT_SUCCESS: + return { + type: NotificationType.DEPOSIT_SUCCESS, + titleKey: LocalizationTitleKey.DEPOSIT_SUCCESS_TITLE, + bodyKey: LocalizationBodyKey.DEPOSIT_SUCCESS_BODY, + topic: Topic.TRADING, + dynamicValues: dynamicValues as DepositSuccessNotification['dynamicValues'], + } as DepositSuccessNotification; + case NotificationType.ORDER_FILLED: + return { + type: NotificationType.ORDER_FILLED, + titleKey: LocalizationTitleKey.ORDER_FILLED_TITLE, + bodyKey: LocalizationBodyKey.ORDER_FILLED_BODY, + topic: Topic.TRADING, + dynamicValues: dynamicValues as OrderFilledNotification['dynamicValues'], + } as OrderFilledNotification; + case NotificationType.ORDER_TRIGGERED: + return { + type: NotificationType.ORDER_TRIGGERED, + titleKey: LocalizationTitleKey.ORDER_TRIGGERED_TITLE, + bodyKey: LocalizationBodyKey.ORDER_TRIGGERED_BODY, + topic: Topic.TRADING, + dynamicValues: dynamicValues as OrderTriggeredNotification['dynamicValues'], + } as OrderTriggeredNotification; + // Add other cases for new notification types here + default: + throw new Error('Unknown notification type'); + } +} diff --git a/indexer/packages/notifications/tsconfig.eslint.json b/indexer/packages/notifications/tsconfig.eslint.json new file mode 100644 index 00000000000..d8fd94c4a8d --- /dev/null +++ b/indexer/packages/notifications/tsconfig.eslint.json @@ -0,0 +1,11 @@ +{ + "extends": "./tsconfig.json", + "include": [ + // Repeat included files from tsconfig.json. + "__tests__", + "src", + // Include e.g. configuration files. + "./.*.js", + "./*.js" + ] +} diff --git a/indexer/packages/notifications/tsconfig.json b/indexer/packages/notifications/tsconfig.json new file mode 100644 index 00000000000..5b131572429 --- /dev/null +++ b/indexer/packages/notifications/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "build" + }, + "include": [ + "src", + "__tests__", + ] +} diff --git a/indexer/packages/postgres/README.md b/indexer/packages/postgres/README.md index c4ddb219014..a8febac17f9 100644 --- a/indexer/packages/postgres/README.md +++ b/indexer/packages/postgres/README.md @@ -7,6 +7,8 @@ Add a knex migration by running `pnpm run migrate:make ` Run the migration with `pnpm run migrate` +In `__tests__/db/migrations.test.ts`, test cases may be expected to fail (and hence should be commented out) if a model is modified due to the latest migration. + In order to migrate in dev and staging, you must redeploy and run bazooka. TODO(CORE-512): Add info/resources around bazooka. [Doc](https://www.notion.so/dydx/Engineering-Runbook-15064661da9643188ce33e341b68e7bb#cb2283d80ef14a51924f3bd1a538fd82). diff --git a/indexer/packages/postgres/__tests__/db/helpers.test.ts b/indexer/packages/postgres/__tests__/db/helpers.test.ts index e51da33411a..258de174899 100644 --- a/indexer/packages/postgres/__tests__/db/helpers.test.ts +++ b/indexer/packages/postgres/__tests__/db/helpers.test.ts @@ -101,7 +101,9 @@ describe('helpers', () => { ); const unrealizedPnl: string = getUnrealizedPnl( - perpetualPosition, defaultPerpetualMarket, marketIdToMarket, + perpetualPosition, + defaultPerpetualMarket, + marketIdToMarket[defaultPerpetualMarket.marketId], ); expect(unrealizedPnl).toEqual(Big(-50000).toFixed()); @@ -125,7 +127,9 @@ describe('helpers', () => { ); const unrealizedPnl: string = getUnrealizedPnl( - perpetualPosition, defaultPerpetualMarket, marketIdToMarket, + perpetualPosition, + defaultPerpetualMarket, + marketIdToMarket[defaultPerpetualMarket.marketId], ); expect(unrealizedPnl).toEqual(Big(50000).toFixed()); diff --git a/indexer/packages/postgres/__tests__/db/migrations.test.ts b/indexer/packages/postgres/__tests__/db/migrations.test.ts index faa4a74c9cf..ec68e8d44ac 100644 --- a/indexer/packages/postgres/__tests__/db/migrations.test.ts +++ b/indexer/packages/postgres/__tests__/db/migrations.test.ts @@ -10,9 +10,7 @@ import { } from '../helpers/constants'; import { seedData } from '../helpers/mock-generators'; -// NOTE: If a model is modified for a migration then these -// tests must be skipped until the following migration -describe.skip('Test new migration', () => { +describe('Test new migration', () => { beforeEach(async () => { await migrate(); }); @@ -25,21 +23,36 @@ describe.skip('Test new migration', () => { await teardown(); }); - it('test adding most recent migration', async () => { + it('test UP and DOWN for most recent migration without seed data', async () => { + // remove latest migration + await multiDown(1); + + // re-add latest migration + await knexPrimary.migrate.latest({ loadExtensions: ['.js'] }); + + // re-remove latest migration + await multiDown(1); + }); + + // NOTE: If a model is modified for a migration then these + // tests must be skipped until the following migration + it.skip('[Will fail if a model is modified for migration - see README] test adding most recent migration', async () => { // remove latest migration await multiDown(1); // add data to verify you can roll up and then later roll down await seedData(); - // readd latest migration + // re-add latest migration await knexPrimary.migrate.latest({ loadExtensions: ['.js'] }); // re-remove latest migration await multiDown(1); }); - it('test adding most recent migration with rows that fail index that should only be applied going forward', async () => { + // NOTE: If a model is modified for a migration then these + // tests must be skipped until the following migration + it.skip('[Will fail if a model is modified for migration - see README] test adding most recent migration with rows that fail index that should only be applied going forward', async () => { // remove latest migration await multiDown(1); @@ -47,7 +60,7 @@ describe.skip('Test new migration', () => { await seedData(); await OrderTable.create(defaultOrder); - // readd latest migration + // re-add latest migration await knexPrimary.migrate.latest({ loadExtensions: ['.js'] }); // re-remove latest migration diff --git a/indexer/packages/postgres/__tests__/helpers/constants.ts b/indexer/packages/postgres/__tests__/helpers/constants.ts index 5928862fe93..cda89989a6b 100644 --- a/indexer/packages/postgres/__tests__/helpers/constants.ts +++ b/indexer/packages/postgres/__tests__/helpers/constants.ts @@ -17,6 +17,8 @@ import * as TradingRewardAggregationTable from '../../src/stores/trading-reward- import * as TransactionTable from '../../src/stores/transaction-table'; import * as TransferTable from '../../src/stores/transfer-table'; import { + AffiliateInfoCreateObject, + AffiliateReferredUsersCreateObject, AssetCreateObject, AssetPositionCreateObject, BlockCreateObject, @@ -31,6 +33,7 @@ import { FillCreateObject, FillType, FundingIndexUpdatesCreateObject, + LeaderboardPnlCreateObject, Liquidity, LiquidityTiersCreateObject, MarketCreateObject, @@ -41,11 +44,13 @@ import { OrderType, PerpetualMarketCreateObject, PerpetualMarketStatus, + PerpetualMarketType, PerpetualPositionCreateObject, PerpetualPositionStatus, PnlTicksCreateObject, PositionSide, SubaccountCreateObject, + SubaccountUsernamesCreateObject, TendermintEventCreateObject, TimeInForce, TradingRewardAggregationCreateObject, @@ -54,6 +59,9 @@ import { TransactionCreateObject, TransferCreateObject, WalletCreateObject, + PersistentCacheCreateObject, + VaultCreateObject, + VaultStatus, } from '../../src/types'; import { denomToHumanReadableConversion } from './conversion-helpers'; @@ -62,9 +70,15 @@ export const createdHeight: string = '2'; export const invalidTicker: string = 'INVALID-INVALID'; export const dydxChain: string = 'dydx'; export const defaultAddress: string = 'dydx1n88uc38xhjgxzw9nwre4ep2c8ga4fjxc565lnf'; +export const defaultAddress2: string = 'dydx1n88uc38xhjgxzw9nwre4ep2c8ga4fjxc575lnf'; +export const defaultAddress3: string = 'dydx199tqg4wdlnu4qjlxchpd7seg454937hjrknju4'; export const blockedAddress: string = 'dydx1f9k5qldwmqrnwy8hcgp4fw6heuvszt35egvtx2'; +// Vault address for vault id 0 was generated using +// script protocol/scripts/vault/get_vault.go +export const vaultAddress: string = 'dydx1c0m5x87llaunl5sgv3q5vd7j5uha26d2q2r2q0'; // ============== Subaccounts ============== +export const defaultWalletAddress: string = 'dydx199tqg4wdlnu4qjlxchpd7seg454937hjrknju4'; export const defaultSubaccount: SubaccountCreateObject = { address: defaultAddress, @@ -87,7 +101,55 @@ export const defaultSubaccount3: SubaccountCreateObject = { updatedAtHeight: createdHeight, }; -export const defaultWalletAddress: string = 'defaultWalletAddress'; +export const defaultSubaccount2Num0: SubaccountCreateObject = { + address: defaultAddress2, + subaccountNumber: 0, + updatedAt: createdDateTime.toISO(), + updatedAtHeight: createdHeight, +}; + +export const defaultSubaccount3Num0: SubaccountCreateObject = { + address: defaultAddress3, + subaccountNumber: 0, + updatedAt: createdDateTime.toISO(), + updatedAtHeight: createdHeight, +}; + +// defaultWalletAddress belongs to defaultWallet2 and is different from defaultAddress +export const defaultSubaccountDefaultWalletAddress: SubaccountCreateObject = { + address: defaultWalletAddress, + subaccountNumber: 0, + updatedAt: createdDateTime.toISO(), + updatedAtHeight: createdHeight, +}; + +export const defaultSubaccountWithAlternateAddress: SubaccountCreateObject = { + address: defaultAddress2, + subaccountNumber: 0, + updatedAt: createdDateTime.toISO(), + updatedAtHeight: createdHeight, +}; + +export const vaultSubaccount: SubaccountCreateObject = { + address: vaultAddress, + subaccountNumber: 0, + updatedAt: createdDateTime.toISO(), + updatedAtHeight: createdHeight, +}; + +export const isolatedSubaccount: SubaccountCreateObject = { + address: defaultAddress, + subaccountNumber: 128, + updatedAt: createdDateTime.toISO(), + updatedAtHeight: createdHeight, +}; + +export const isolatedSubaccount2: SubaccountCreateObject = { + address: defaultAddress, + subaccountNumber: 256, + updatedAt: createdDateTime.toISO(), + updatedAtHeight: createdHeight, +}; export const defaultSubaccountId: string = SubaccountTable.uuid( defaultAddress, @@ -101,16 +163,51 @@ export const defaultSubaccountId3: string = SubaccountTable.uuid( defaultAddress, defaultSubaccount3.subaccountNumber, ); +export const defaultSubaccountIdDefaultWalletAddress: string = SubaccountTable.uuid( + defaultWalletAddress, + defaultSubaccountDefaultWalletAddress.subaccountNumber, +); +export const defaultSubaccountIdWithAlternateAddress: string = SubaccountTable.uuid( + defaultAddress2, + defaultSubaccountWithAlternateAddress.subaccountNumber, +); +export const isolatedSubaccountId: string = SubaccountTable.uuid( + defaultAddress, + isolatedSubaccount.subaccountNumber, +); +export const isolatedSubaccountId2: string = SubaccountTable.uuid( + defaultAddress, + isolatedSubaccount2.subaccountNumber, +); + +export const vaultSubaccountId: string = SubaccountTable.uuid( + vaultAddress, + vaultSubaccount.subaccountNumber, +); // ============== Wallets ============== export const defaultWallet: WalletCreateObject = { address: defaultAddress, totalTradingRewards: denomToHumanReadableConversion(0), + totalVolume: '0', }; export const defaultWallet2: WalletCreateObject = { address: defaultWalletAddress, totalTradingRewards: denomToHumanReadableConversion(1), + totalVolume: '0', +}; + +export const vaultWallet: WalletCreateObject = { + address: vaultAddress, + totalTradingRewards: denomToHumanReadableConversion(0), + totalVolume: '0', +}; + +export const defaultWallet3: WalletCreateObject = { + address: defaultAddress2, + totalTradingRewards: denomToHumanReadableConversion(0), + totalVolume: '0', }; // ============== Assets ============== @@ -157,6 +254,16 @@ export const defaultAssetPositionId2: string = AssetPositionTable.uuid( defaultAssetPosition2.subaccountId, defaultAssetPosition2.assetId, ); +export const isolatedSubaccountAssetPosition: AssetPositionCreateObject = { + subaccountId: isolatedSubaccountId, + assetId: '0', + size: '5000', + isLong: true, +}; +export const isolatedSubaccountAssetPositionId: string = AssetPositionTable.uuid( + isolatedSubaccountAssetPosition.subaccountId, + isolatedSubaccountAssetPosition.assetId, +); // ============== PerpetualMarkets ============== @@ -176,6 +283,9 @@ export const defaultPerpetualMarket: PerpetualMarketCreateObject = { subticksPerTick: 100, stepBaseQuantums: 10, liquidityTierId: 0, + marketType: PerpetualMarketType.CROSS, + baseOpenInterest: '100000', + defaultFundingRate1H: '0', }; export const defaultPerpetualMarket2: PerpetualMarketCreateObject = { id: '1', @@ -193,6 +303,9 @@ export const defaultPerpetualMarket2: PerpetualMarketCreateObject = { subticksPerTick: 10, stepBaseQuantums: 1, liquidityTierId: 0, + marketType: PerpetualMarketType.CROSS, + baseOpenInterest: '100000', + defaultFundingRate1H: '0', }; export const defaultPerpetualMarket3: PerpetualMarketCreateObject = { id: '2', @@ -210,6 +323,51 @@ export const defaultPerpetualMarket3: PerpetualMarketCreateObject = { subticksPerTick: 10, stepBaseQuantums: 1, liquidityTierId: 0, + marketType: PerpetualMarketType.CROSS, + baseOpenInterest: '100000', + defaultFundingRate1H: '0', +}; + +export const isolatedPerpetualMarket: PerpetualMarketCreateObject = { + id: '3', + clobPairId: '4', + ticker: 'ISO-USD', + marketId: 3, + status: PerpetualMarketStatus.ACTIVE, + priceChange24H: '0.000000001', + volume24H: '10000000', + trades24H: 200, + nextFundingRate: '1.2', + openInterest: '40000', + quantumConversionExponent: -16, + atomicResolution: -2, + subticksPerTick: 10, + stepBaseQuantums: 1, + liquidityTierId: 0, + marketType: PerpetualMarketType.ISOLATED, + baseOpenInterest: '100000', + defaultFundingRate1H: '0.0001', +}; + +export const isolatedPerpetualMarket2: PerpetualMarketCreateObject = { + id: '4', + clobPairId: '5', + ticker: 'ISO2-USD', + marketId: 4, + status: PerpetualMarketStatus.ACTIVE, + priceChange24H: '0.000000001', + volume24H: '10000000', + trades24H: 200, + nextFundingRate: '1.2', + openInterest: '40000', + quantumConversionExponent: -16, + atomicResolution: -2, + subticksPerTick: 10, + stepBaseQuantums: 1, + liquidityTierId: 0, + marketType: PerpetualMarketType.ISOLATED, + baseOpenInterest: '100000', + defaultFundingRate1H: '0.0001', }; // ============== Orders ============== @@ -233,6 +391,25 @@ export const defaultOrder: OrderCreateObject = { updatedAtHeight: '1', }; +export const isolatedMarketOrder: OrderCreateObject = { + subaccountId: isolatedSubaccountId, + clientId: '1', + clobPairId: '4', + side: OrderSide.BUY, + size: '25', + totalFilled: '0', + price: '20000', + type: OrderType.LIMIT, + status: OrderStatus.OPEN, + timeInForce: TimeInForce.FOK, + reduceOnly: false, + goodTilBlock: '100', + orderFlags: ORDER_FLAG_SHORT_TERM.toString(), + clientMetadata: '0', + updatedAt: '2023-01-22T00:00:00.000Z', + updatedAtHeight: '1', +}; + export const defaultOrderGoodTilBlockTime: OrderCreateObject = { ...defaultOrder, clientId: '2', @@ -257,6 +434,13 @@ export const defaultOrderId: string = OrderTable.uuid( defaultOrder.orderFlags, ); +export const isolatedMarketOrderId: string = OrderTable.uuid( + isolatedMarketOrder.subaccountId, + isolatedMarketOrder.clientId, + isolatedMarketOrder.clobPairId, + isolatedMarketOrder.orderFlags, +); + export const defaultOrderGoodTilBlockTimeId: string = OrderTable.uuid( defaultOrderGoodTilBlockTime.subaccountId, defaultOrderGoodTilBlockTime.clientId, @@ -361,6 +545,28 @@ export const defaultPerpetualPositionId: string = PerpetualPositionTable.uuid( defaultPerpetualPosition.openEventId, ); +export const isolatedPerpetualPosition: PerpetualPositionCreateObject = { + subaccountId: isolatedSubaccountId, + perpetualId: isolatedPerpetualMarket.id, + side: PositionSide.LONG, + status: PerpetualPositionStatus.OPEN, + size: '10', + maxSize: '25', + entryPrice: '1.5', + sumOpen: '10', + sumClose: '0', + createdAt: createdDateTime.toISO(), + createdAtHeight: createdHeight, + openEventId: defaultTendermintEventId, + lastEventId: defaultTendermintEventId2, + settledFunding: '200000', +}; + +export const isolatedPerpetualPositionId: string = PerpetualPositionTable.uuid( + isolatedPerpetualPosition.subaccountId, + isolatedPerpetualPosition.openEventId, +); + // ============== Fills ============== export const defaultFill: FillCreateObject = { @@ -379,6 +585,45 @@ export const defaultFill: FillCreateObject = { createdAtHeight: createdHeight, clientMetadata: '0', fee: '1.1', + affiliateRevShare: '1.10', +}; + +export const isolatedMarketFill: FillCreateObject = { + subaccountId: isolatedSubaccountId, + side: OrderSide.BUY, + liquidity: Liquidity.TAKER, + type: FillType.LIMIT, + clobPairId: '4', + orderId: isolatedMarketOrderId, + size: '10', + price: '20000', + quoteAmount: '200000', + eventId: defaultTendermintEventId2, + transactionHash: '', // TODO: Add a real transaction Hash + createdAt: createdDateTime.toISO(), + createdAtHeight: createdHeight, + clientMetadata: '0', + fee: '1.1', + affiliateRevShare: '0', +}; + +export const isolatedMarketFill2: FillCreateObject = { + subaccountId: isolatedSubaccountId2, + side: OrderSide.BUY, + liquidity: Liquidity.TAKER, + type: FillType.LIMIT, + clobPairId: '4', + orderId: isolatedMarketOrderId, + size: '10', + price: '20000', + quoteAmount: '200000', + eventId: defaultTendermintEventId3, + transactionHash: '', // TODO: Add a real transaction Hash + createdAt: createdDateTime.toISO(), + createdAtHeight: createdHeight, + clientMetadata: '0', + fee: '1.1', + affiliateRevShare: '0', }; // ============== Transfers ============== @@ -405,6 +650,12 @@ export const defaultTransfer3: TransferCreateObject = { assetId: defaultAsset2.id, }; +export const defaultTransferWithAlternateAddress: TransferCreateObject = { + ...defaultTransfer, + senderSubaccountId: defaultSubaccountIdWithAlternateAddress, + recipientSubaccountId: defaultSubaccountId, +}; + export const defaultTransferId: string = TransferTable.uuid( defaultTransfer.eventId, defaultTransfer.assetId, @@ -414,6 +665,15 @@ export const defaultTransferId: string = TransferTable.uuid( defaultTransfer.recipientWalletAddress, ); +export const defaultTransferWithAlternateAddressId: string = TransferTable.uuid( + defaultTransferWithAlternateAddress.eventId, + defaultTransferWithAlternateAddress.assetId, + defaultTransferWithAlternateAddress.senderSubaccountId, + defaultTransferWithAlternateAddress.recipientSubaccountId, + defaultTransferWithAlternateAddress.senderWalletAddress, + defaultTransferWithAlternateAddress.recipientWalletAddress, +); + export const defaultWithdrawal: TransferCreateObject = { senderSubaccountId: defaultSubaccountId, recipientWalletAddress: defaultWalletAddress, @@ -480,6 +740,22 @@ export const defaultMarket3: MarketCreateObject = { oraclePrice: '0.000000065', }; +export const isolatedMarket: MarketCreateObject = { + id: 3, + pair: 'ISO-USD', + exponent: -12, + minPriceChangePpm: 50, + oraclePrice: '1.00', +}; + +export const isolatedMarket2: MarketCreateObject = { + id: 4, + pair: 'ISO2-USD', + exponent: -20, + minPriceChangePpm: 50, + oraclePrice: '0.000000085', +}; + // ============== LiquidityTiers ============== export const defaultLiquidityTier: LiquidityTiersCreateObject = { @@ -494,6 +770,8 @@ export const defaultLiquidityTier2: LiquidityTiersCreateObject = { name: 'Mid-Cap', initialMarginPpm: '100000', // 10% maintenanceFractionPpm: '500000', // 50% + openInterestLowerCap: '0', + openInterestUpperCap: '5000000', }; // ============== OraclePrices ============== @@ -536,6 +814,8 @@ export const defaultCandle: CandleCreateObject = { usdVolume: '2200000', trades: 300, startingOpenInterest: '200000', + orderbookMidPriceOpen: '11500', + orderbookMidPriceClose: '12500', }; export const defaultCandleId: string = CandleTable.uuid( @@ -574,6 +854,22 @@ export const defaultFundingIndexUpdateId: string = FundingIndexUpdatesTable.uuid defaultFundingIndexUpdate.perpetualId, ); +export const isolatedMarketFundingIndexUpdate: FundingIndexUpdatesCreateObject = { + perpetualId: isolatedPerpetualMarket.id, + eventId: defaultTendermintEventId, + rate: '0.0004', + oraclePrice: '10000', + fundingIndex: '10200', + effectiveAt: createdDateTime.toISO(), + effectiveAtHeight: createdHeight, +}; + +export const isolatedMarketFundingIndexUpdateId: string = FundingIndexUpdatesTable.uuid( + isolatedMarketFundingIndexUpdate.effectiveAtHeight, + isolatedMarketFundingIndexUpdate.eventId, + isolatedMarketFundingIndexUpdate.perpetualId, +); + // ========= Compliance Data ========== export const blockedComplianceData: ComplianceDataCreateObject = { @@ -641,3 +937,145 @@ export const defaultTradingRewardAggregationId: string = TradingRewardAggregatio defaultTradingRewardAggregation.period, defaultTradingRewardAggregation.startedAtHeight, ); + +// ============== Subaccount Usernames ============== +export const defaultSubaccountUsername: SubaccountUsernamesCreateObject = { + username: 'LyingRaisin32', + subaccountId: defaultSubaccountId, +}; + +export const defaultSubaccountUsername2: SubaccountUsernamesCreateObject = { + username: 'LyingRaisin33', + subaccountId: defaultSubaccountId2, +}; + +export const duplicatedSubaccountUsername: SubaccountUsernamesCreateObject = { + username: 'LyingRaisin32', + subaccountId: defaultSubaccountId3, +}; + +// defaultWalletAddress belongs to defaultWallet2 and is different from defaultAddress +export const subaccountUsernameWithDefaultWalletAddress: SubaccountUsernamesCreateObject = { + username: 'EvilRaisin11', + subaccountId: defaultSubaccountIdDefaultWalletAddress, +}; + +export const subaccountUsernameWithAlternativeAddress: SubaccountUsernamesCreateObject = { + username: 'HonestRaisin32', + subaccountId: defaultSubaccountIdWithAlternateAddress, +}; + +// ============== Leaderboard pnl Data ============== + +export const defaultLeaderboardPnlOneDay: LeaderboardPnlCreateObject = { + address: defaultAddress, + timeSpan: 'ONE_DAY', + pnl: '10000', + currentEquity: '1000', + rank: 1, +}; + +export const defaultLeaderboardPnl2OneDay: LeaderboardPnlCreateObject = { + address: defaultAddress2, + timeSpan: 'ONE_DAY', + pnl: '100', + currentEquity: '10000', + rank: 2, +}; + +export const defaultLeaderboardPnl1AllTime: LeaderboardPnlCreateObject = { + address: defaultAddress, + timeSpan: 'ALL_TIME', + pnl: '10000', + currentEquity: '1000', + rank: 1, +}; + +export const defaultLeaderboardPnlOneDayToUpsert: LeaderboardPnlCreateObject = { + address: defaultAddress, + timeSpan: 'ONE_DAY', + pnl: '100000', + currentEquity: '1000', + rank: 1, +}; + +// ============== Affiliate referred users data ============== +export const defaultAffiliateReferredUser: AffiliateReferredUsersCreateObject = { + affiliateAddress: defaultAddress, + refereeAddress: defaultAddress2, + referredAtBlock: '1', +}; + +// ============== Persistent cache Data ============== + +export const defaultKV: PersistentCacheCreateObject = { + key: 'someKey', + value: 'someValue', +}; + +export const defaultKV2: PersistentCacheCreateObject = { + key: 'otherKey', + value: 'otherValue', +}; + +// ============== Affiliate Info Data ============== + +export const defaultAffiliateInfo: AffiliateInfoCreateObject = { + address: defaultAddress, + affiliateEarnings: '10', + referredMakerTrades: 10, + referredTakerTrades: 20, + totalReferredMakerFees: '10', + totalReferredTakerFees: '10', + totalReferredMakerRebates: '-10', + totalReferredUsers: 5, + firstReferralBlockHeight: '1', + referredTotalVolume: '1000', +}; + +export const defaultAffiliateInfo2: AffiliateInfoCreateObject = { + address: defaultWalletAddress, + affiliateEarnings: '11', + referredMakerTrades: 11, + referredTakerTrades: 21, + totalReferredMakerFees: '11', + totalReferredTakerFees: '11', + totalReferredMakerRebates: '-11', + totalReferredUsers: 5, + firstReferralBlockHeight: '11', + referredTotalVolume: '1000', +}; + +export const defaultAffiliateInfo3: AffiliateInfoCreateObject = { + address: defaultAddress2, + affiliateEarnings: '12', + referredMakerTrades: 12, + referredTakerTrades: 22, + totalReferredMakerFees: '12', + totalReferredTakerFees: '12', + totalReferredMakerRebates: '-12', + totalReferredUsers: 10, + firstReferralBlockHeight: '12', + referredTotalVolume: '1111111', +}; + +// ============== Tokens ============= + +export const defaultFirebaseNotificationToken = { + token: 'DEFAULT_TOKEN', + address: defaultAddress, + language: 'en', + updatedAt: createdDateTime.toISO(), +}; + +// ============== Vaults ============= + +export const defaultVaultAddress: string = 'dydx1pzaql7h3tkt9uet8yht80me5td6gh0aprf58yk'; + +export const defaultVault: VaultCreateObject = { + address: defaultVaultAddress, + clobPairId: '0', + status: VaultStatus.QUOTING, + createdAt: createdDateTime.toISO(), + updatedAt: createdDateTime.toISO(), +}; diff --git a/indexer/packages/postgres/__tests__/helpers/mock-generators.ts b/indexer/packages/postgres/__tests__/helpers/mock-generators.ts index d70d0004e72..ea785b2944c 100644 --- a/indexer/packages/postgres/__tests__/helpers/mock-generators.ts +++ b/indexer/packages/postgres/__tests__/helpers/mock-generators.ts @@ -1,5 +1,6 @@ import * as AssetTable from '../../src/stores/asset-table'; import * as BlockTable from '../../src/stores/block-table'; +import * as FirebaseNotificationTokenTable from '../../src/stores/firebase-notification-token-table'; import * as LiquidityTiersTable from '../../src/stores/liquidity-tiers-table'; import * as MarketTable from '../../src/stores/market-table'; import * as PerpetualMarketTable from '../../src/stores/perpetual-market-table'; @@ -26,18 +27,38 @@ import { defaultTendermintEvent2, defaultTendermintEvent3, defaultTendermintEvent4, + defaultFirebaseNotificationToken, defaultWallet, + isolatedMarket, + isolatedMarket2, + isolatedPerpetualMarket, + isolatedPerpetualMarket2, + isolatedSubaccount, + isolatedSubaccount2, + defaultSubaccount2Num0, + defaultSubaccount3Num0, } from './constants'; +export async function seedAdditionalSubaccounts() { + await Promise.all([ + SubaccountTable.create(defaultSubaccount2Num0), + SubaccountTable.create(defaultSubaccount3Num0), + ]); +} + export async function seedData() { await Promise.all([ SubaccountTable.create(defaultSubaccount), SubaccountTable.create(defaultSubaccount2), + SubaccountTable.create(isolatedSubaccount), + SubaccountTable.create(isolatedSubaccount2), ]); await Promise.all([ MarketTable.create(defaultMarket), MarketTable.create(defaultMarket2), MarketTable.create(defaultMarket3), + MarketTable.create(isolatedMarket), + MarketTable.create(isolatedMarket2), ]); await Promise.all([ LiquidityTiersTable.create(defaultLiquidityTier), @@ -47,6 +68,8 @@ export async function seedData() { PerpetualMarketTable.create(defaultPerpetualMarket), PerpetualMarketTable.create(defaultPerpetualMarket2), PerpetualMarketTable.create(defaultPerpetualMarket3), + PerpetualMarketTable.create(isolatedPerpetualMarket), + PerpetualMarketTable.create(isolatedPerpetualMarket2), ]); await Promise.all([ BlockTable.create(defaultBlock), @@ -66,4 +89,7 @@ export async function seedData() { await Promise.all([ WalletTable.create(defaultWallet), ]); + await Promise.all([ + FirebaseNotificationTokenTable.create(defaultFirebaseNotificationToken), + ]); } diff --git a/indexer/packages/postgres/__tests__/lib/api-translations.test.ts b/indexer/packages/postgres/__tests__/lib/api-translations.test.ts index 2ba80bc4a36..ec54f29eae4 100644 --- a/indexer/packages/postgres/__tests__/lib/api-translations.test.ts +++ b/indexer/packages/postgres/__tests__/lib/api-translations.test.ts @@ -1,7 +1,5 @@ import { APITimeInForce, TimeInForce } from '../../src/types'; import { - getChildSubaccountNums, - getParentSubaccountNum, isOrderTIFPostOnly, orderTIFToAPITIF, } from '../../src/lib/api-translations'; @@ -36,41 +34,4 @@ describe('apiTranslations', () => { expect(isOrderTIFPostOnly(orderTimeInForce)).toEqual(expectedPostOnly); }); }); - - describe('getChildSubaccountNums', () => { - it('Gets a list of all possible child subaccount numbers for a parent subaccount 0', () => { - const childSubaccounts = getChildSubaccountNums(0); - expect(childSubaccounts.length).toEqual(1000); - expect(childSubaccounts[0]).toEqual(0); - expect(childSubaccounts[1]).toEqual(128); - expect(childSubaccounts[999]).toEqual(128 * 999); - }); - it('Gets a list of all possible child subaccount numbers for a parent subaccount 127', () => { - const childSubaccounts = getChildSubaccountNums(127); - expect(childSubaccounts.length).toEqual(1000); - expect(childSubaccounts[0]).toEqual(127); - expect(childSubaccounts[1]).toEqual(128 + 127); - expect(childSubaccounts[999]).toEqual(128 * 999 + 127); - }); - }); - - describe('getChildSubaccountNums', () => { - it('Throws an error if the parent subaccount number is greater than or equal to the maximum parent subaccount number', () => { - expect(() => getChildSubaccountNums(128)).toThrowError('Parent subaccount number must be less than 128'); - }); - }); - - describe('getParentSubaccountNum', () => { - it('Gets the parent subaccount number from a child subaccount number', () => { - expect(getParentSubaccountNum(0)).toEqual(0); - expect(getParentSubaccountNum(128)).toEqual(0); - expect(getParentSubaccountNum(128 * 999 - 1)).toEqual(127); - }); - }); - - describe('getParentSubaccountNum', () => { - it('Throws an error if the child subaccount number is greater than the max child subaccount number', () => { - expect(() => getParentSubaccountNum(128001)).toThrowError('Child subaccount number must be less than 128000'); - }); - }); }); diff --git a/indexer/packages/postgres/__tests__/lib/parent-subaccount-helpers.ts b/indexer/packages/postgres/__tests__/lib/parent-subaccount-helpers.ts new file mode 100644 index 00000000000..b31d98ba987 --- /dev/null +++ b/indexer/packages/postgres/__tests__/lib/parent-subaccount-helpers.ts @@ -0,0 +1,15 @@ +import { + getParentSubaccountNum, +} from '../../src/lib/parent-subaccount-helpers'; + +describe('getParentSubaccountNum', () => { + it('Gets the parent subaccount number from a child subaccount number', () => { + expect(getParentSubaccountNum(0)).toEqual(0); + expect(getParentSubaccountNum(128)).toEqual(0); + expect(getParentSubaccountNum(128 * 999 - 1)).toEqual(127); + }); + + it('Throws an error if the child subaccount number is greater than the max child subaccount number', () => { + expect(() => getParentSubaccountNum(128001)).toThrowError('Child subaccount number must be less than or equal to 128000'); + }); +}); diff --git a/indexer/packages/postgres/__tests__/lib/protocol-translations.test.ts b/indexer/packages/postgres/__tests__/lib/protocol-translations.test.ts index 6aa48520040..e7e46050d84 100644 --- a/indexer/packages/postgres/__tests__/lib/protocol-translations.test.ts +++ b/indexer/packages/postgres/__tests__/lib/protocol-translations.test.ts @@ -230,12 +230,6 @@ describe('protocolTranslations', () => { describe('orderTypeToProtocolConditionType', () => { it.each([ ['LIMIT', OrderType.LIMIT, IndexerOrder_ConditionType.CONDITION_TYPE_UNSPECIFIED], - ['HARD_TRADE', OrderType.HARD_TRADE, IndexerOrder_ConditionType.CONDITION_TYPE_UNSPECIFIED], - [ - 'FAILED_HARD_TRADE', - OrderType.FAILED_HARD_TRADE, - IndexerOrder_ConditionType.CONDITION_TYPE_UNSPECIFIED, - ], ['MARKET', OrderType.MARKET, IndexerOrder_ConditionType.CONDITION_TYPE_UNSPECIFIED], [ 'TRAILING_STOP', diff --git a/indexer/packages/postgres/__tests__/loops/block-height-refresher.test.ts b/indexer/packages/postgres/__tests__/loops/block-height-refresher.test.ts new file mode 100644 index 00000000000..62b42d593cc --- /dev/null +++ b/indexer/packages/postgres/__tests__/loops/block-height-refresher.test.ts @@ -0,0 +1,39 @@ +import { clearData, migrate, teardown } from '../../src/helpers/db-helpers'; +import { clear, getLatestBlockHeight, updateBlockHeight } from '../../src/loops/block-height-refresher'; +import { defaultBlock2 } from '../helpers/constants'; +import { seedData } from '../helpers/mock-generators'; +import config from '../../src/config'; + +describe('blockHeightRefresher', () => { + beforeAll(async () => { + await migrate(); + await seedData(); + await updateBlockHeight(); + }); + + afterAll(async () => { + await clearData(); + await teardown(); + }); + + describe('getLatestBlockHeight', () => { + it('successfully gets the latest block height after update', async () => { + await updateBlockHeight(); + expect(getLatestBlockHeight()).toBe(defaultBlock2.blockHeight); + }); + }); + + describe('clear', () => { + it('throws an error if block height does not exist', () => { + clear(); + expect(() => getLatestBlockHeight()).toThrowError('Unable to find latest block height'); + }); + + it('throws an error when clear is called in non-test environment', () => { + const originalEnv = config.NODE_ENV; + config.NODE_ENV = 'production'; + expect(() => clear()).toThrowError('clear cannot be used in non-test env'); + config.NODE_ENV = originalEnv; + }); + }); +}); diff --git a/indexer/packages/postgres/__tests__/stores/affiliate-info-table.test.ts b/indexer/packages/postgres/__tests__/stores/affiliate-info-table.test.ts new file mode 100644 index 00000000000..dccf4e1ece6 --- /dev/null +++ b/indexer/packages/postgres/__tests__/stores/affiliate-info-table.test.ts @@ -0,0 +1,481 @@ +import { + AffiliateInfoFromDatabase, Liquidity, FillType, +} from '../../src/types'; +import { clearData, migrate, teardown } from '../../src/helpers/db-helpers'; +import { + defaultOrder, + defaultWallet, + defaultFill, + defaultWallet2, + defaultAffiliateInfo, + defaultAffiliateInfo2, + defaultTendermintEventId, + defaultTendermintEventId2, + defaultTendermintEventId3, + defaultTendermintEventId4, + vaultAddress, +} from '../helpers/constants'; +import * as AffiliateInfoTable from '../../src/stores/affiliate-info-table'; +import * as OrderTable from '../../src/stores/order-table'; +import * as AffiliateReferredUsersTable from '../../src/stores/affiliate-referred-users-table'; +import * as FillTable from '../../src/stores/fill-table'; +import { seedData } from '../helpers/mock-generators'; +import { DateTime } from 'luxon'; + +describe('Affiliate info store', () => { + beforeAll(async () => { + await migrate(); + }); + + afterEach(async () => { + await clearData(); + }); + + afterAll(async () => { + await teardown(); + }); + + it('Successfully creates affiliate info', async () => { + await AffiliateInfoTable.create(defaultAffiliateInfo); + }); + + it('Cannot create duplicate info for duplicate address', async () => { + await AffiliateInfoTable.create(defaultAffiliateInfo); + await expect(AffiliateInfoTable.create(defaultAffiliateInfo)).rejects.toThrowError(); + }); + + it('Can upsert affiliate info multiple times', async () => { + await AffiliateInfoTable.upsert(defaultAffiliateInfo); + let info: AffiliateInfoFromDatabase | undefined = await AffiliateInfoTable.findById( + defaultAffiliateInfo.address, + ); + expect(info).toEqual(expect.objectContaining(defaultAffiliateInfo)); + + await AffiliateInfoTable.upsert(defaultAffiliateInfo2); + info = await AffiliateInfoTable.findById(defaultAffiliateInfo2.address); + expect(info).toEqual(expect.objectContaining(defaultAffiliateInfo2)); + }); + + it('Successfully finds all affiliate infos', async () => { + await Promise.all([ + AffiliateInfoTable.create(defaultAffiliateInfo), + AffiliateInfoTable.create(defaultAffiliateInfo2), + ]); + + const infos: AffiliateInfoFromDatabase[] = await AffiliateInfoTable.findAll( + {}, + [], + { readReplica: true }, + ); + + expect(infos.length).toEqual(2); + expect(infos).toEqual(expect.arrayContaining([ + expect.objectContaining(defaultAffiliateInfo), + expect.objectContaining(defaultAffiliateInfo2), + ])); + }); + + it('Successfully finds affiliate info by Id', async () => { + await AffiliateInfoTable.create(defaultAffiliateInfo); + + const info: AffiliateInfoFromDatabase | undefined = await AffiliateInfoTable.findById( + defaultAffiliateInfo.address, + ); + expect(info).toEqual(expect.objectContaining(defaultAffiliateInfo)); + }); + + it('Returns undefined if affiliate info not found by Id', async () => { + await AffiliateInfoTable.create(defaultAffiliateInfo); + + const info: AffiliateInfoFromDatabase | undefined = await AffiliateInfoTable.findById( + 'non_existent_address', + ); + expect(info).toBeUndefined(); + }); + + describe('updateInfo', () => { + it('Successfully creates new affiliate info', async () => { + const referenceDt: DateTime = await populateFillsAndReferrals(); + + // Perform update + await AffiliateInfoTable.updateInfo( + referenceDt.minus({ minutes: 2 }).toISO(), + referenceDt.toISO(), + ); + + // Get affiliate info (wallet2 is affiliate) + const updatedInfo: AffiliateInfoFromDatabase | undefined = await AffiliateInfoTable.findById( + defaultWallet2.address, + ); + + const expectedAffiliateInfo: AffiliateInfoFromDatabase = { + address: defaultWallet2.address, + affiliateEarnings: '1000', + referredMakerTrades: 1, + referredTakerTrades: 1, + totalReferredMakerFees: '0', + totalReferredTakerFees: '1000', + totalReferredMakerRebates: '-1000', + totalReferredUsers: 1, + firstReferralBlockHeight: '1', + referredTotalVolume: '2', + }; + + expect(updatedInfo).toEqual(expectedAffiliateInfo); + }); + + it('Successfully updates/increments affiliate info for stats and new referrals', async () => { + const referenceDt: DateTime = await populateFillsAndReferrals(); + + // Perform update: catches first 2 fills + await AffiliateInfoTable.updateInfo( + referenceDt.minus({ minutes: 3 }).toISO(), + referenceDt.minus({ minutes: 2 }).toISO(), + ); + + const updatedInfo1: AffiliateInfoFromDatabase | undefined = await AffiliateInfoTable.findById( + defaultWallet2.address, + ); + const expectedAffiliateInfo1: AffiliateInfoFromDatabase = { + address: defaultWallet2.address, + affiliateEarnings: '1005', + referredMakerTrades: 3, + referredTakerTrades: 1, + totalReferredMakerFees: '2100', + totalReferredTakerFees: '0', + totalReferredMakerRebates: '0', + totalReferredUsers: 1, + firstReferralBlockHeight: '1', + referredTotalVolume: '4', + }; + expect(updatedInfo1).toEqual(expectedAffiliateInfo1); + + // Perform update: catches next 2 fills + await AffiliateInfoTable.updateInfo( + referenceDt.minus({ minutes: 2 }).toISO(), + referenceDt.minus({ minutes: 1 }).toISO(), + ); + + const updatedInfo2 = await AffiliateInfoTable.findById( + defaultWallet2.address, + ); + const expectedAffiliateInfo2: AffiliateInfoFromDatabase = { + address: defaultWallet2.address, + affiliateEarnings: '2005', + referredMakerTrades: 4, + referredTakerTrades: 2, + totalReferredMakerFees: '2100', + totalReferredTakerFees: '1000', + totalReferredMakerRebates: '-1000', + totalReferredUsers: 1, + firstReferralBlockHeight: '1', + referredTotalVolume: '6', + }; + expect(updatedInfo2).toEqual(expectedAffiliateInfo2); + + // Perform update: catches no fills but new affiliate referral + await AffiliateReferredUsersTable.create({ + affiliateAddress: defaultWallet2.address, + refereeAddress: vaultAddress, + referredAtBlock: '2', + }); + await AffiliateInfoTable.updateInfo( + referenceDt.minus({ minutes: 1 }).toISO(), + referenceDt.toISO(), + ); + const updatedInfo3 = await AffiliateInfoTable.findById( + defaultWallet2.address, + ); + const expectedAffiliateInfo3: AffiliateInfoFromDatabase = { + address: defaultWallet2.address, + affiliateEarnings: '2005', + referredMakerTrades: 4, + referredTakerTrades: 2, + totalReferredMakerFees: '2100', + totalReferredTakerFees: '1000', + totalReferredMakerRebates: '-1000', + totalReferredUsers: 2, + firstReferralBlockHeight: '1', + referredTotalVolume: '6', + }; + expect(updatedInfo3).toEqual(expectedAffiliateInfo3); + }); + + it('Does not use fills from before referal block height', async () => { + const referenceDt: DateTime = DateTime.utc(); + + await seedData(); + await OrderTable.create(defaultOrder); + + // Referal at block 2 but fill is at block 1 + await AffiliateReferredUsersTable.create({ + affiliateAddress: defaultWallet2.address, + refereeAddress: defaultWallet.address, + referredAtBlock: '2', + }); + await FillTable.create({ + ...defaultFill, + liquidity: Liquidity.TAKER, + subaccountId: defaultOrder.subaccountId, + createdAt: referenceDt.toISO(), + createdAtHeight: '1', + eventId: defaultTendermintEventId, + price: '1', + size: '1', + fee: '1000', + affiliateRevShare: '500', + }); + + await AffiliateInfoTable.updateInfo( + referenceDt.minus({ minutes: 1 }).toISO(), + referenceDt.toISO(), + ); + + const updatedInfo: AffiliateInfoFromDatabase | undefined = await AffiliateInfoTable.findById( + defaultWallet2.address, + ); + // expect one referred user but no fill stats + const expectedAffiliateInfo: AffiliateInfoFromDatabase = { + address: defaultWallet2.address, + affiliateEarnings: '0', + referredMakerTrades: 0, + referredTakerTrades: 0, + totalReferredMakerFees: '0', + totalReferredTakerFees: '0', + totalReferredMakerRebates: '0', + totalReferredUsers: 1, + firstReferralBlockHeight: '2', + referredTotalVolume: '0', + }; + expect(updatedInfo).toEqual(expectedAffiliateInfo); + }); + }); + + describe('paginatedFindWithAddressFilter', () => { + beforeEach(async () => { + await migrate(); + await Promise.all( + Array.from({ length: 10 }, (_, i) => AffiliateInfoTable.create({ + ...defaultAffiliateInfo, + address: `address_${i}`, + affiliateEarnings: i.toString(), + }), + ), + ); + }); + + it('Successfully filters by address', async () => { + const infos: AffiliateInfoFromDatabase[] = await AffiliateInfoTable + .paginatedFindWithAddressFilter( + ['address_0'], + 0, + 10, + false, + ); + expect(infos).toBeDefined(); + expect(infos!.length).toEqual(1); + expect(infos![0]).toEqual(expect.objectContaining({ + ...defaultAffiliateInfo, + address: 'address_0', + affiliateEarnings: '0', + })); + }); + + it('Successfully sorts by affiliate earning', async () => { + const infos: AffiliateInfoFromDatabase[] = await AffiliateInfoTable + .paginatedFindWithAddressFilter( + [], + 0, + 10, + true, + ); + expect(infos).toBeDefined(); + expect(infos!.length).toEqual(10); + expect(infos![0]).toEqual(expect.objectContaining({ + ...defaultAffiliateInfo, + address: 'address_9', + affiliateEarnings: '9', + })); + expect(infos![9]).toEqual(expect.objectContaining({ + ...defaultAffiliateInfo, + address: 'address_0', + affiliateEarnings: '0', + })); + }); + + it('Successfully uses offset (default to sorted) and limit', async () => { + const infos: AffiliateInfoFromDatabase[] = await AffiliateInfoTable + .paginatedFindWithAddressFilter( + [], + 5, + 2, + false, + ); + expect(infos).toBeDefined(); + expect(infos!.length).toEqual(2); + expect(infos![0]).toEqual(expect.objectContaining({ + ...defaultAffiliateInfo, + address: 'address_4', + // affiliateEarnings in DB: 9, 8, 7, 6, 5, 4, ... + // so we get 4 with offset = 5. + affiliateEarnings: '4', + })); + expect(infos![1]).toEqual(expect.objectContaining({ + ...defaultAffiliateInfo, + address: 'address_3', + affiliateEarnings: '3', + })); + }); + + it('Successfully filters, sorts, offsets, and limits', async () => { + const infos: AffiliateInfoFromDatabase[] = await AffiliateInfoTable + .paginatedFindWithAddressFilter( + [], + 3, + 2, + true, + ); + expect(infos).toBeDefined(); + expect(infos!.length).toEqual(2); + expect(infos![0]).toEqual(expect.objectContaining({ + ...defaultAffiliateInfo, + address: 'address_6', + affiliateEarnings: '6', + })); + expect(infos![1]).toEqual(expect.objectContaining({ + ...defaultAffiliateInfo, + address: 'address_5', + affiliateEarnings: '5', + })); + }); + + it('Returns empty array if no results', async () => { + const infos: AffiliateInfoFromDatabase[] = await AffiliateInfoTable + .paginatedFindWithAddressFilter( + ['address_11'], + 0, + 10, + false, + ); + expect(infos).toBeDefined(); + expect(infos!.length).toEqual(0); + }); + + it('Successfully use sorted - equal earnings between affiliates', async () => { + await AffiliateInfoTable.create({ + ...defaultAffiliateInfo, + address: 'address_10', + affiliateEarnings: '9', // same as address_9 + }); + const infos: AffiliateInfoFromDatabase[] = await AffiliateInfoTable + .paginatedFindWithAddressFilter( + [], + 0, + 100, + true, + ); + expect(infos).toBeDefined(); + expect(infos!.length).toEqual(11); + expect(infos![0]).toEqual(expect.objectContaining({ + ...defaultAffiliateInfo, + address: 'address_10', // '10' < '9' in lexicographical order + affiliateEarnings: '9', + })); + expect(infos![1]).toEqual(expect.objectContaining({ + ...defaultAffiliateInfo, + address: 'address_9', + affiliateEarnings: '9', + })); + }); + + }); +}); + +async function populateFillsAndReferrals(): Promise { + const referenceDt = DateTime.utc(); + + await seedData(); + + // defaultWallet2 will be affiliate and defaultWallet will be referee + await AffiliateReferredUsersTable.create({ + affiliateAddress: defaultWallet2.address, + refereeAddress: defaultWallet.address, + referredAtBlock: '1', + }); + + // Create order and fils for defaultWallet (referee) + await OrderTable.create(defaultOrder); + + await Promise.all([ + FillTable.create({ + ...defaultFill, + liquidity: Liquidity.TAKER, + subaccountId: defaultOrder.subaccountId, + createdAt: referenceDt.minus({ minutes: 1 }).toISO(), + eventId: defaultTendermintEventId, + price: '1', + size: '1', + fee: '1000', + affiliateRevShare: '500', + }), + FillTable.create({ + ...defaultFill, + liquidity: Liquidity.MAKER, + subaccountId: defaultOrder.subaccountId, + createdAt: referenceDt.minus({ minutes: 1 }).toISO(), + eventId: defaultTendermintEventId2, + price: '1', + size: '1', + fee: '-1000', + affiliateRevShare: '500', + }), + FillTable.create({ + ...defaultFill, + liquidity: Liquidity.MAKER, // use uneven number of maker/taker + subaccountId: defaultOrder.subaccountId, + createdAt: referenceDt.minus({ minutes: 2 }).toISO(), + eventId: defaultTendermintEventId3, + price: '1', + size: '1', + fee: '1000', + affiliateRevShare: '500', + }), + FillTable.create({ + ...defaultFill, + liquidity: Liquidity.MAKER, + subaccountId: defaultOrder.subaccountId, + createdAt: referenceDt.minus({ minutes: 2 }).toISO(), + eventId: defaultTendermintEventId4, + price: '1', + size: '1', + fee: '1000', + affiliateRevShare: '500', + }), + FillTable.create({ + ...defaultFill, + liquidity: Liquidity.TAKER, + subaccountId: defaultOrder.subaccountId, + createdAt: referenceDt.minus({ minutes: 2 }).toISO(), + eventId: defaultTendermintEventId4, + price: '1', + size: '1', + fee: '1000', + affiliateRevShare: '0', + type: FillType.LIQUIDATED, + }), + FillTable.create({ + ...defaultFill, + liquidity: Liquidity.MAKER, + subaccountId: defaultOrder.subaccountId, + createdAt: referenceDt.minus({ minutes: 2 }).toISO(), + eventId: defaultTendermintEventId, + price: '1', + size: '1', + fee: '100', + affiliateRevShare: '5', + type: FillType.LIQUIDATION, + }), + ]); + + return referenceDt; +} diff --git a/indexer/packages/postgres/__tests__/stores/affiliate-referred-users-table.test.ts b/indexer/packages/postgres/__tests__/stores/affiliate-referred-users-table.test.ts new file mode 100644 index 00000000000..367b26db680 --- /dev/null +++ b/indexer/packages/postgres/__tests__/stores/affiliate-referred-users-table.test.ts @@ -0,0 +1,121 @@ +import { AffiliateReferredUserFromDatabase, AffiliateReferredUsersCreateObject } from '../../src/types'; +import { clearData, migrate, teardown } from '../../src/helpers/db-helpers'; +import { defaultAffiliateReferredUser } from '../helpers/constants'; +import * as AffiliateReferredUsersTable from '../../src/stores/affiliate-referred-users-table'; + +describe('AffiliateReferredUsers store', () => { + beforeAll(async () => { + await migrate(); + }); + + afterEach(async () => { + await clearData(); + }); + + afterAll(async () => { + await teardown(); + }); + + it('Successfully creates affiliate referee pairs', async () => { + await AffiliateReferredUsersTable.create(defaultAffiliateReferredUser); + await AffiliateReferredUsersTable.create({ + ...defaultAffiliateReferredUser, + refereeAddress: 'fake_address', + }); + }); + + it('Should not allow duplicate refree address', async () => { + await AffiliateReferredUsersTable.create(defaultAffiliateReferredUser); + + // Second creation should fail due to the duplicate refereeAddress + await expect( + AffiliateReferredUsersTable.create({ + ...defaultAffiliateReferredUser, + affiliateAddress: 'another_affiliate_address', + }), + ).rejects.toThrow(); + }); + + it('Successfully finds all entries', async () => { + const entry1: AffiliateReferredUsersCreateObject = { + ...defaultAffiliateReferredUser, + refereeAddress: 'referee_address1', + }; + const entry2: AffiliateReferredUsersCreateObject = { + ...defaultAffiliateReferredUser, + affiliateAddress: 'affiliate_address1', + refereeAddress: 'referee_address2', + }; + + await Promise.all([ + AffiliateReferredUsersTable.create(defaultAffiliateReferredUser), + AffiliateReferredUsersTable.create(entry1), + AffiliateReferredUsersTable.create(entry2), + ]); + + const entries: AffiliateReferredUserFromDatabase[] = await AffiliateReferredUsersTable.findAll( + {}, + [], + { readReplica: true }, + ); + + expect(entries.length).toEqual(3); + expect(entries).toEqual( + expect.arrayContaining([ + expect.objectContaining(defaultAffiliateReferredUser), + expect.objectContaining(entry1), + expect.objectContaining(entry2), + ]), + ); + }); + + it('Successfully finds entries by affiliate address', async () => { + const entry1: AffiliateReferredUsersCreateObject = { + affiliateAddress: 'affiliate_address1', + refereeAddress: 'referee_address1', + referredAtBlock: '1', + }; + const entry2: AffiliateReferredUsersCreateObject = { + affiliateAddress: 'affiliate_address1', + refereeAddress: 'referee_address2', + referredAtBlock: '20', + }; + + await AffiliateReferredUsersTable.create(entry1); + await AffiliateReferredUsersTable.create(entry2); + + const entries: AffiliateReferredUserFromDatabase[] | undefined = await AffiliateReferredUsersTable.findByAffiliateAddress('affiliate_address1'); + + if (entries) { + expect(entries.length).toEqual(2); + expect(entries).toEqual( + expect.arrayContaining([ + expect.objectContaining(entry1), + expect.objectContaining(entry2), + ]), + ); + } else { + throw new Error('findByAffiliateAddress returned undefined, expected an array'); + } + }); + + it('Successfully finds entry by referee address', async () => { + const entry1: AffiliateReferredUsersCreateObject = { + affiliateAddress: 'affiliate_address1', + refereeAddress: 'referee_address1', + referredAtBlock: '1', + }; + const entry2: AffiliateReferredUsersCreateObject = { + affiliateAddress: 'affiliate_address1', + refereeAddress: 'referee_address2', + referredAtBlock: '20', + }; + + await AffiliateReferredUsersTable.create(entry1); + await AffiliateReferredUsersTable.create(entry2); + + const entry: AffiliateReferredUserFromDatabase | undefined = await AffiliateReferredUsersTable.findByRefereeAddress('referee_address1'); + + expect(entry).toEqual(expect.objectContaining(entry1)); + }); +}); diff --git a/indexer/packages/postgres/__tests__/stores/candle-table.test.ts b/indexer/packages/postgres/__tests__/stores/candle-table.test.ts index 9f6f31ce46c..aab085081e1 100644 --- a/indexer/packages/postgres/__tests__/stores/candle-table.test.ts +++ b/indexer/packages/postgres/__tests__/stores/candle-table.test.ts @@ -65,12 +65,11 @@ describe('CandleTable', () => { const updatedCandle: CandleUpdateObject = { id: defaultCandleId, open: '100', + orderbookMidPriceClose: '200', + orderbookMidPriceOpen: '300', }; - await CandleTable.update({ - id: defaultCandleId, - open: '100', - }); + await CandleTable.update(updatedCandle); const candle: CandleFromDatabase | undefined = await CandleTable.findById( defaultCandleId, diff --git a/indexer/packages/postgres/__tests__/stores/compliance-data-table.test.ts b/indexer/packages/postgres/__tests__/stores/compliance-data-table.test.ts index 2802726c831..f620ca50a14 100644 --- a/indexer/packages/postgres/__tests__/stores/compliance-data-table.test.ts +++ b/indexer/packages/postgres/__tests__/stores/compliance-data-table.test.ts @@ -1,5 +1,6 @@ import { ComplianceDataFromDatabase, ComplianceProvider } from '../../src/types'; import * as ComplianceDataTable from '../../src/stores/compliance-table'; +import * as WalletTable from '../../src/stores/wallet-table'; import { clearData, migrate, @@ -9,6 +10,7 @@ import { blockedComplianceData, blockedAddress, nonBlockedComplianceData, + defaultWallet, } from '../helpers/constants'; import { DateTime } from 'luxon'; @@ -139,6 +141,29 @@ describe('Compliance data store', () => { expect(complianceData).toEqual(blockedComplianceData); }); + it('Successfully filters by onlyDydxAddressWithDeposit', async () => { + // Create two compliance entries, one with a corresponding wallet entry and another without + await Promise.all([ + WalletTable.create(defaultWallet), + ComplianceDataTable.create(nonBlockedComplianceData), + ComplianceDataTable.create({ + ...nonBlockedComplianceData, + address: 'not_dydx_address', + }), + ]); + + const complianceData: ComplianceDataFromDatabase[] = await ComplianceDataTable.findAll( + { + addressInWalletsTable: true, + }, + [], + { readReplica: true }, + ); + + expect(complianceData.length).toEqual(1); + expect(complianceData[0]).toEqual(nonBlockedComplianceData); + }); + it('Unable finds compliance data', async () => { const complianceData: ComplianceDataFromDatabase | undefined = await ComplianceDataTable.findByAddressAndProvider( diff --git a/indexer/packages/postgres/__tests__/stores/compliance-status-table.test.ts b/indexer/packages/postgres/__tests__/stores/compliance-status-table.test.ts index d243cab2527..d3d3f0af545 100644 --- a/indexer/packages/postgres/__tests__/stores/compliance-status-table.test.ts +++ b/indexer/packages/postgres/__tests__/stores/compliance-status-table.test.ts @@ -98,7 +98,7 @@ describe('Compliance status store', () => { const complianceStatus: ComplianceStatusFromDatabase[] = await ComplianceStatusTable.findAll( { - status: ComplianceStatus.COMPLIANT, + status: [ComplianceStatus.COMPLIANT], }, [], { readReplica: true }, @@ -191,7 +191,7 @@ describe('Compliance status store', () => { compliantUpsertStatusData, { ...noncompliantStatusUpsertData, - status: ComplianceStatus.FIRST_STRIKE, + status: ComplianceStatus.FIRST_STRIKE_CLOSE_ONLY, }, { ...noncompliantStatusUpsertData, @@ -217,7 +217,7 @@ describe('Compliance status store', () => { }), expect.objectContaining({ ...noncompliantStatusUpsertData, - status: ComplianceStatus.FIRST_STRIKE, + status: ComplianceStatus.FIRST_STRIKE_CLOSE_ONLY, }), expect.objectContaining({ ...noncompliantStatusUpsertData, diff --git a/indexer/packages/postgres/__tests__/stores/fill-table.test.ts b/indexer/packages/postgres/__tests__/stores/fill-table.test.ts index d31ed9e177c..e641c0da361 100644 --- a/indexer/packages/postgres/__tests__/stores/fill-table.test.ts +++ b/indexer/packages/postgres/__tests__/stores/fill-table.test.ts @@ -71,7 +71,7 @@ describe('Fill store', () => { FillTable.create(defaultFill), ]); - const fills: FillFromDatabase[] = await FillTable.findAll({}, [], {}); + const { results: fills } = await FillTable.findAll({}, [], {}); expect(fills.length).toEqual(2); expect(fills[0]).toEqual(expect.objectContaining(defaultFill)); @@ -91,7 +91,7 @@ describe('Fill store', () => { }), ]); - const fills: FillFromDatabase[] = await FillTable.findAll({}, [], { + const { results: fills } = await FillTable.findAll({}, [], { orderBy: [[FillColumns.eventId, Ordering.DESC]], }); @@ -103,6 +103,59 @@ describe('Fill store', () => { expect(fills[1]).toEqual(expect.objectContaining(defaultFill)); }); + it('Successfully finds fills using pagination', async () => { + await Promise.all([ + FillTable.create(defaultFill), + FillTable.create({ + ...defaultFill, + eventId: defaultTendermintEventId2, + }), + ]); + + const responsePageOne = await FillTable.findAll({ + page: 1, + limit: 1, + }, [], { + orderBy: [[FillColumns.eventId, Ordering.DESC]], + }); + + expect(responsePageOne.results.length).toEqual(1); + expect(responsePageOne.results[0]).toEqual(expect.objectContaining({ + ...defaultFill, + eventId: defaultTendermintEventId2, + })); + expect(responsePageOne.offset).toEqual(0); + expect(responsePageOne.total).toEqual(2); + + const responsePageTwo = await FillTable.findAll({ + page: 2, + limit: 1, + }, [], { + orderBy: [[FillColumns.eventId, Ordering.DESC]], + }); + + expect(responsePageTwo.results.length).toEqual(1); + expect(responsePageTwo.results[0]).toEqual(expect.objectContaining(defaultFill)); + expect(responsePageTwo.offset).toEqual(1); + expect(responsePageTwo.total).toEqual(2); + + const responsePageAllPages = await FillTable.findAll({ + page: 1, + limit: 2, + }, [], { + orderBy: [[FillColumns.eventId, Ordering.DESC]], + }); + + expect(responsePageAllPages.results.length).toEqual(2); + expect(responsePageAllPages.results[0]).toEqual(expect.objectContaining({ + ...defaultFill, + eventId: defaultTendermintEventId2, + })); + expect(responsePageAllPages.results[1]).toEqual(expect.objectContaining(defaultFill)); + expect(responsePageAllPages.offset).toEqual(0); + expect(responsePageAllPages.total).toEqual(2); + }); + it('Successfully finds Fill with eventId', async () => { await Promise.all([ FillTable.create(defaultFill), @@ -112,7 +165,7 @@ describe('Fill store', () => { }), ]); - const fills: FillFromDatabase[] = await FillTable.findAll( + const { results: fills } = await FillTable.findAll( { eventId: defaultFill.eventId, }, @@ -134,7 +187,7 @@ describe('Fill store', () => { ) => { await FillTable.create(defaultFill); - const fills: FillFromDatabase[] = await FillTable.findAll( + const { results: fills } = await FillTable.findAll( { createdBeforeOrAt: createdDateTime.plus({ seconds: deltaSeconds }).toISO(), }, @@ -155,7 +208,7 @@ describe('Fill store', () => { ) => { await FillTable.create(defaultFill); - const fills: FillFromDatabase[] = await FillTable.findAll( + const { results: fills } = await FillTable.findAll( { createdBeforeOrAtHeight: Big(createdHeight).plus(deltaBlocks).toFixed(), }, @@ -177,7 +230,7 @@ describe('Fill store', () => { ) => { await FillTable.create(defaultFill); - const fills: FillFromDatabase[] = await FillTable.findAll( + const { results: fills } = await FillTable.findAll( { createdOnOrAfter: createdDateTime.minus({ seconds: deltaSeconds }).toISO(), }, @@ -199,7 +252,7 @@ describe('Fill store', () => { ) => { await FillTable.create(defaultFill); - const fills: FillFromDatabase[] = await FillTable.findAll( + const { results: fills } = await FillTable.findAll( { createdOnOrAfterHeight: Big(createdHeight).minus(deltaBlocks).toFixed(), }, diff --git a/indexer/packages/postgres/__tests__/stores/firebase-notification-token-table.test.ts b/indexer/packages/postgres/__tests__/stores/firebase-notification-token-table.test.ts new file mode 100644 index 00000000000..66c12905178 --- /dev/null +++ b/indexer/packages/postgres/__tests__/stores/firebase-notification-token-table.test.ts @@ -0,0 +1,122 @@ +import { FirebaseNotificationTokenFromDatabase } from '../../src/types'; +import { clearData, migrate, teardown } from '../../src/helpers/db-helpers'; +import { defaultAddress2, defaultFirebaseNotificationToken, defaultWallet } from '../helpers/constants'; +import * as FirebaseNotificationTokenTable from '../../src/stores/firebase-notification-token-table'; +import * as WalletTable from '../../src/stores/wallet-table'; + +describe('FirebaseNotificationToken store', () => { + beforeAll(async () => { + await migrate(); + }); + + beforeEach(async () => { + // Default wallet is required in the DB for token creation + // As token has a foreign key constraint on wallet + await WalletTable.create(defaultWallet); + }); + + afterEach(async () => { + await clearData(); + }); + + afterAll(async () => { + await teardown(); + }); + + it('Successfully creates a Token', async () => { + await FirebaseNotificationTokenTable.create(defaultFirebaseNotificationToken); + const token = await FirebaseNotificationTokenTable.findByToken( + defaultFirebaseNotificationToken.token, + ); + expect(token).toEqual(expect.objectContaining(defaultFirebaseNotificationToken)); + }); + + it('Successfully upserts a Token multiple times', async () => { + await FirebaseNotificationTokenTable.upsert(defaultFirebaseNotificationToken); + let token: FirebaseNotificationTokenFromDatabase | undefined = await + FirebaseNotificationTokenTable.findByToken( + defaultFirebaseNotificationToken.token, + ); + + expect(token).toEqual(expect.objectContaining(defaultFirebaseNotificationToken)); + + // Upsert again to test update functionality + const updatedToken = { ...defaultFirebaseNotificationToken, updatedAt: new Date().toISOString(), language: 'es' }; + await FirebaseNotificationTokenTable.upsert(updatedToken); + token = await FirebaseNotificationTokenTable.findByToken( + defaultFirebaseNotificationToken.token, + ); + + expect(token).toEqual(expect.objectContaining(updatedToken)); + }); + + it('Successfully finds all Tokens', async () => { + await WalletTable.create({ ...defaultWallet, address: defaultAddress2 }); + const additionalToken = { + token: 'fake_token', + address: defaultAddress2, + language: 'en', + updatedAt: new Date().toISOString(), + }; + + await Promise.all([ + FirebaseNotificationTokenTable.create(defaultFirebaseNotificationToken), + FirebaseNotificationTokenTable.create(additionalToken), + ]); + + const tokens: FirebaseNotificationTokenFromDatabase[] = await FirebaseNotificationTokenTable + .findAll( + {}, + [], + { readReplica: true }, + ); + + expect(tokens.length).toEqual(2); + expect(tokens[0]).toEqual(expect.objectContaining(defaultFirebaseNotificationToken)); + expect(tokens[1]).toEqual(expect.objectContaining(additionalToken)); + }); + + it('Successfully finds a Token by token', async () => { + await FirebaseNotificationTokenTable.create(defaultFirebaseNotificationToken); + + const token: FirebaseNotificationTokenFromDatabase | undefined = await + FirebaseNotificationTokenTable.findByToken( + defaultFirebaseNotificationToken.token, + ); + + expect(token).toEqual(expect.objectContaining(defaultFirebaseNotificationToken)); + }); + + describe('deleteMany', () => { + it('should delete multiple tokens successfully', async () => { + const token1 = { ...defaultFirebaseNotificationToken, token: 'token1todelete' }; + const token2 = { ...defaultFirebaseNotificationToken, token: 'token2todelete' }; + const token3 = { ...defaultFirebaseNotificationToken, token: 'token3todelete' }; + + await Promise.all([ + FirebaseNotificationTokenTable.create(token1), + FirebaseNotificationTokenTable.create(token2), + FirebaseNotificationTokenTable.create(token3), + ]); + + // Delete the tokens + const tokensToDelete = ['token1todelete', 'token2todelete', 'token3todelete']; + await FirebaseNotificationTokenTable.deleteMany(tokensToDelete); + + // Check if the tokens were deleted + const remainingTokens = await FirebaseNotificationTokenTable.findAll({}, []); + expect(remainingTokens.length).toEqual(0); + }); + + it('should handle an empty array of token IDs', async () => { + await FirebaseNotificationTokenTable.create(defaultFirebaseNotificationToken); + const result = await FirebaseNotificationTokenTable.deleteMany([]); + expect(result).toEqual(0); + + // Verify the token still exists + const remainingTokens = await FirebaseNotificationTokenTable.findAll({}, []); + expect(remainingTokens.length).toEqual(1); + expect(remainingTokens[0]).toEqual(expect.objectContaining(defaultFirebaseNotificationToken)); + }); + }); +}); diff --git a/indexer/packages/postgres/__tests__/stores/funding-index-updates-table.test.ts b/indexer/packages/postgres/__tests__/stores/funding-index-updates-table.test.ts index b824605b775..de7daaa34eb 100644 --- a/indexer/packages/postgres/__tests__/stores/funding-index-updates-table.test.ts +++ b/indexer/packages/postgres/__tests__/stores/funding-index-updates-table.test.ts @@ -8,7 +8,6 @@ import { defaultFundingIndexUpdateId, defaultPerpetualMarket, defaultPerpetualMarket2, - defaultPerpetualMarket3, defaultTendermintEventId2, defaultTendermintEventId3, } from '../helpers/constants'; @@ -212,11 +211,10 @@ describe('funding index update store', () => { '3', ); - expect(fundingIndexMap).toEqual({ - [defaultFundingIndexUpdate.perpetualId]: Big(defaultFundingIndexUpdate.fundingIndex), - [fundingIndexUpdates3.perpetualId]: Big(fundingIndexUpdates3.fundingIndex), - [defaultPerpetualMarket3.id]: Big(0), - }); + expect(fundingIndexMap[defaultFundingIndexUpdate.perpetualId]) + .toEqual(Big(defaultFundingIndexUpdate.fundingIndex)); + expect(fundingIndexMap[fundingIndexUpdates3.perpetualId]) + .toEqual(Big(fundingIndexUpdates3.fundingIndex)); }); it('Gets default funding index of 0 in funding index map if no funding indexes', async () => { @@ -225,11 +223,8 @@ describe('funding index update store', () => { '3', ); - expect(fundingIndexMap).toEqual({ - [defaultPerpetualMarket.id]: Big(0), - [defaultPerpetualMarket2.id]: Big(0), - [defaultPerpetualMarket3.id]: Big(0), - }); + expect(fundingIndexMap[defaultPerpetualMarket.id]).toEqual(Big(0)); + expect(fundingIndexMap[defaultPerpetualMarket2.id]).toEqual(Big(0)); }); it( @@ -242,11 +237,43 @@ describe('funding index update store', () => { '3', ); - expect(fundingIndexMap).toEqual({ - [defaultPerpetualMarket.id]: Big(defaultFundingIndexUpdate.fundingIndex), - [defaultPerpetualMarket2.id]: Big(0), - [defaultPerpetualMarket3.id]: Big(0), - }); + expect(fundingIndexMap[defaultPerpetualMarket.id]) + .toEqual(Big(defaultFundingIndexUpdate.fundingIndex)); + expect(fundingIndexMap[defaultPerpetualMarket2.id]).toEqual(Big(0)); }, ); + + it('Successfully finds funding index maps for multiple effectiveBeforeOrAtHeights', async () => { + const fundingIndexUpdates2: FundingIndexUpdatesCreateObject = { + ...defaultFundingIndexUpdate, + fundingIndex: '124', + effectiveAtHeight: updatedHeight, + effectiveAt: '1982-05-25T00:00:00.000Z', + eventId: defaultTendermintEventId2, + }; + const fundingIndexUpdates3: FundingIndexUpdatesCreateObject = { + ...defaultFundingIndexUpdate, + eventId: defaultTendermintEventId3, + perpetualId: defaultPerpetualMarket2.id, + }; + await Promise.all([ + FundingIndexUpdatesTable.create(defaultFundingIndexUpdate), + FundingIndexUpdatesTable.create(fundingIndexUpdates2), + FundingIndexUpdatesTable.create(fundingIndexUpdates3), + ]); + + const fundingIndexMaps: {[blockHeight:string]: FundingIndexMap} = await FundingIndexUpdatesTable + .findFundingIndexMaps( + ['3', '6'], + ); + + expect(fundingIndexMaps['3'][defaultFundingIndexUpdate.perpetualId]) + .toEqual(Big(defaultFundingIndexUpdate.fundingIndex)); + expect(fundingIndexMaps['3'][fundingIndexUpdates3.perpetualId]) + .toEqual(Big(fundingIndexUpdates3.fundingIndex)); + expect(fundingIndexMaps['6'][defaultFundingIndexUpdate.perpetualId]) + .toEqual(Big(fundingIndexUpdates2.fundingIndex)); + expect(fundingIndexMaps['6'][fundingIndexUpdates3.perpetualId]) + .toEqual(Big(fundingIndexUpdates3.fundingIndex)); + }); }); diff --git a/indexer/packages/postgres/__tests__/stores/leaderboard-pnl-table.test.ts b/indexer/packages/postgres/__tests__/stores/leaderboard-pnl-table.test.ts new file mode 100644 index 00000000000..30680c73e14 --- /dev/null +++ b/indexer/packages/postgres/__tests__/stores/leaderboard-pnl-table.test.ts @@ -0,0 +1,98 @@ +import { LeaderboardPnlFromDatabase } from '../../src/types'; +import * as LeaderboardPnlTable from '../../src/stores/leaderboard-pnl-table'; +import { clearData, migrate, teardown } from '../../src/helpers/db-helpers'; +import { + defaultLeaderboardPnl2OneDay, + defaultLeaderboardPnlOneDay, + defaultLeaderboardPnl1AllTime, + defaultLeaderboardPnlOneDayToUpsert, + defaultWallet3, +} from '../helpers/constants'; +import { seedData } from '../helpers/mock-generators'; +import { WalletTable } from '../../src'; + +describe('LeaderboardPnl store', () => { + beforeEach(async () => { + await seedData(); + await WalletTable.create(defaultWallet3); + }); + + beforeAll(async () => { + await migrate(); + }); + + afterEach(async () => { + await clearData(); + }); + + afterAll(async () => { + await teardown(); + }); + + it('Successfully creates a LeaderboardPnl', async () => { + await LeaderboardPnlTable.create(defaultLeaderboardPnlOneDay); + }); + + it('Successfully creates multiple LeaderboardPnls', async () => { + await Promise.all([ + LeaderboardPnlTable.create(defaultLeaderboardPnlOneDay), + LeaderboardPnlTable.create(defaultLeaderboardPnl2OneDay), + LeaderboardPnlTable.create(defaultLeaderboardPnl1AllTime), + ]); + + const leaderboardPnls: LeaderboardPnlFromDatabase[] = await LeaderboardPnlTable.findAll( + {}, + [], + ); + + expect(leaderboardPnls.length).toEqual(3); + }); + + it('Successfully finds LeaderboardPnl with address and timespan', async () => { + await Promise.all([ + LeaderboardPnlTable.create(defaultLeaderboardPnlOneDay), + LeaderboardPnlTable.create(defaultLeaderboardPnl2OneDay), + LeaderboardPnlTable.create(defaultLeaderboardPnl1AllTime), + ]); + + const leaderboardPnl: LeaderboardPnlFromDatabase[] = await LeaderboardPnlTable.findAll( + { + address: [defaultLeaderboardPnlOneDay.address], + timeSpan: [defaultLeaderboardPnlOneDay.timeSpan], + }, + [], + ); + + expect(leaderboardPnl.length).toEqual(1); + expect(leaderboardPnl[0]).toEqual(expect.objectContaining(defaultLeaderboardPnlOneDay)); + }); + + it('Successfully upserts a LeaderboardPnl', async () => { + await LeaderboardPnlTable.upsert(defaultLeaderboardPnlOneDay); + + await LeaderboardPnlTable.upsert(defaultLeaderboardPnlOneDayToUpsert); + + const leaderboardPnls: LeaderboardPnlFromDatabase[] = await LeaderboardPnlTable.findAll( + {}, + [], + ); + + expect(leaderboardPnls.length).toEqual(1); + expect(leaderboardPnls[0]).toEqual( + expect.objectContaining(defaultLeaderboardPnlOneDayToUpsert)); + }); + + it('Successfully bulk upserts LeaderboardPnls', async () => { + await LeaderboardPnlTable.bulkUpsert( + [defaultLeaderboardPnlOneDay, defaultLeaderboardPnl2OneDay]); + + const leaderboardPnls: LeaderboardPnlFromDatabase[] = await LeaderboardPnlTable.findAll( + {}, + [], + ); + + expect(leaderboardPnls.length).toEqual(2); + expect(leaderboardPnls[0]).toEqual(expect.objectContaining(defaultLeaderboardPnlOneDay)); + expect(leaderboardPnls[1]).toEqual(expect.objectContaining(defaultLeaderboardPnl2OneDay)); + }); +}); diff --git a/indexer/packages/postgres/__tests__/stores/order-table.test.ts b/indexer/packages/postgres/__tests__/stores/order-table.test.ts index 1cd9e26a60e..a5449405dff 100644 --- a/indexer/packages/postgres/__tests__/stores/order-table.test.ts +++ b/indexer/packages/postgres/__tests__/stores/order-table.test.ts @@ -4,6 +4,7 @@ import { OrderFromDatabase, Ordering, OrderStatus, + PaginationFromDatabase, TimeInForce, } from '../../src/types'; import * as OrderTable from '../../src/stores/order-table'; @@ -49,7 +50,9 @@ describe('Order store', () => { it('Successfully creates an Order with goodTilBlockTime', async () => { await OrderTable.create(defaultOrderGoodTilBlockTime); - const orders: OrderFromDatabase[] = await OrderTable.findAll({}, [], {}); + const { + results: orders, + }: PaginationFromDatabase = await OrderTable.findAll({}, [], {}); expect(orders).toHaveLength(1); expect(orders[0]).toEqual(expect.objectContaining({ @@ -67,7 +70,9 @@ describe('Order store', () => { }), ]); - const orders: OrderFromDatabase[] = await OrderTable.findAll({}, [], { + const { + results: orders, + }: PaginationFromDatabase = await OrderTable.findAll({}, [], { orderBy: [[OrderColumns.clientId, Ordering.ASC]], }); @@ -79,6 +84,66 @@ describe('Order store', () => { })); }); + it('Successfully finds all Orders using pagination', async () => { + await Promise.all([ + OrderTable.create(defaultOrder), + OrderTable.create({ + ...defaultOrder, + clientId: '2', + }), + ]); + + const responsePageOne: PaginationFromDatabase = await OrderTable.findAll({ + page: 1, + limit: 1, + }, + [], + { + orderBy: [[OrderColumns.clientId, Ordering.ASC]], + }); + + expect(responsePageOne.results.length).toEqual(1); + expect(responsePageOne.results[0]).toEqual(expect.objectContaining(defaultOrder)); + expect(responsePageOne.offset).toEqual(0); + expect(responsePageOne.total).toEqual(2); + + const responsePageTwo: PaginationFromDatabase = await OrderTable.findAll({ + page: 2, + limit: 1, + }, + [], + { + orderBy: [[OrderColumns.clientId, Ordering.ASC]], + }); + + expect(responsePageTwo.results.length).toEqual(1); + expect(responsePageTwo.results[0]).toEqual(expect.objectContaining({ + ...defaultOrder, + clientId: '2', + })); + expect(responsePageTwo.offset).toEqual(1); + expect(responsePageTwo.total).toEqual(2); + + const responsePageAllPages: PaginationFromDatabase = await OrderTable + .findAll({ + page: 1, + limit: 2, + }, + [], + { + orderBy: [[OrderColumns.clientId, Ordering.ASC]], + }); + + expect(responsePageAllPages.results.length).toEqual(2); + expect(responsePageAllPages.results[0]).toEqual(expect.objectContaining(defaultOrder)); + expect(responsePageAllPages.results[1]).toEqual(expect.objectContaining({ + ...defaultOrder, + clientId: '2', + })); + expect(responsePageAllPages.offset).toEqual(0); + expect(responsePageAllPages.total).toEqual(2); + }); + it('findOpenLongTermOrConditionalOrders', async () => { await Promise.all([ OrderTable.create(defaultOrder), @@ -106,7 +171,7 @@ describe('Order store', () => { }), ]); - const orders: OrderFromDatabase[] = await OrderTable.findAll( + const { results: orders }: PaginationFromDatabase = await OrderTable.findAll( { clientId: '1', }, @@ -202,11 +267,12 @@ describe('Order store', () => { OrderTable.create(defaultOrderGoodTilBlockTime), ]); - const orders: OrderFromDatabase[] = await OrderTable.findAll( - filter, - [], - { readReplica: true }, - ); + const { results: orders }: PaginationFromDatabase = await OrderTable + .findAll( + filter, + [], + { readReplica: true }, + ); expect(orders).toHaveLength(1); expect(orders[0]).toEqual(expect.objectContaining(expectedOrder)); diff --git a/indexer/packages/postgres/__tests__/stores/perpetual-position-table.test.ts b/indexer/packages/postgres/__tests__/stores/perpetual-position-table.test.ts index 68c2b3153d2..da274dfdf46 100644 --- a/indexer/packages/postgres/__tests__/stores/perpetual-position-table.test.ts +++ b/indexer/packages/postgres/__tests__/stores/perpetual-position-table.test.ts @@ -81,13 +81,13 @@ describe('PerpetualPosition store', () => { it('Successfully finds all PerpetualPositions', async () => { await PerpetualMarketTable.create({ ...defaultPerpetualMarket, - id: '3', + id: '100', }); await Promise.all([ PerpetualPositionTable.create(defaultPerpetualPosition), PerpetualPositionTable.create({ ...defaultPerpetualPosition, - perpetualId: '3', + perpetualId: '100', openEventId: defaultTendermintEventId2, }), ]); @@ -107,7 +107,7 @@ describe('PerpetualPosition store', () => { expect(perpetualPositions[0]).toEqual(expect.objectContaining(defaultPerpetualPosition)); expect(perpetualPositions[1]).toEqual(expect.objectContaining({ ...defaultPerpetualPosition, - perpetualId: '3', + perpetualId: '100', openEventId: defaultTendermintEventId2, })); }); @@ -115,13 +115,13 @@ describe('PerpetualPosition store', () => { it('Successfully finds PerpetualPosition with perpetualId', async () => { await PerpetualMarketTable.create({ ...defaultPerpetualMarket, - id: '3', + id: '100', }); await Promise.all([ PerpetualPositionTable.create(defaultPerpetualPosition), PerpetualPositionTable.create({ ...defaultPerpetualPosition, - perpetualId: '3', + perpetualId: '100', openEventId: defaultTendermintEventId2, }), ]); @@ -307,28 +307,28 @@ describe('PerpetualPosition store', () => { await Promise.all([ PerpetualMarketTable.create({ ...defaultPerpetualMarket, - id: '3', + id: '100', }), PerpetualMarketTable.create({ ...defaultPerpetualMarket, - id: '4', + id: '101', }), ]); const perpetualPosition2: PerpetualPositionCreateObject = { ...defaultPerpetualPosition, - perpetualId: '3', + perpetualId: '100', openEventId: defaultTendermintEventId2, }; const perpetualPosition3: PerpetualPositionCreateObject = { ...defaultPerpetualPosition, subaccountId: defaultSubaccountId2, - perpetualId: '4', + perpetualId: '101', openEventId: defaultTendermintEventId2, }; const perpetualPosition4: PerpetualPositionCreateObject = { ...defaultPerpetualPosition, subaccountId: defaultSubaccountId2, - perpetualId: '3', + perpetualId: '100', openEventId: defaultTendermintEventId, status: PerpetualPositionStatus.CLOSED, }; diff --git a/indexer/packages/postgres/__tests__/stores/persistent-cache-table.test.ts b/indexer/packages/postgres/__tests__/stores/persistent-cache-table.test.ts new file mode 100644 index 00000000000..02a926dd043 --- /dev/null +++ b/indexer/packages/postgres/__tests__/stores/persistent-cache-table.test.ts @@ -0,0 +1,72 @@ +import { PersistentCacheFromDatabase } from '../../src/types'; +import { clearData, migrate, teardown } from '../../src/helpers/db-helpers'; +import { defaultKV, defaultKV2 } from '../helpers/constants'; +import * as PersistentCacheTable from '../../src/stores/persistent-cache-table'; + +describe('Persistent cache store', () => { + beforeAll(async () => { + await migrate(); + }); + + afterEach(async () => { + await clearData(); + }); + + afterAll(async () => { + await teardown(); + }); + + it('Successfully creates a key value pair', async () => { + await PersistentCacheTable.create(defaultKV); + }); + + it('Successfully upserts a kv pair multiple times', async () => { + const newKv = { + ...defaultKV, + value: 'someOtherValue', + }; + await PersistentCacheTable.upsert(newKv); + let kv: PersistentCacheFromDatabase | undefined = await PersistentCacheTable.findById( + defaultKV.key, + ); + expect(kv).toEqual(expect.objectContaining(newKv)); + + const newKv2 = { + ...defaultKV, + value: 'someOtherValue2', + }; + await PersistentCacheTable.upsert(newKv2); + kv = await PersistentCacheTable.findById(defaultKV.key); + + expect(kv).toEqual(expect.objectContaining(newKv2)); + }); + + it('Successfully finds all kv pairs', async () => { + await Promise.all([ + PersistentCacheTable.create(defaultKV), + PersistentCacheTable.create(defaultKV2), + ]); + + const kvs: PersistentCacheFromDatabase[] = await PersistentCacheTable.findAll( + {}, + [], + { readReplica: true }, + ); + + expect(kvs.length).toEqual(2); + expect(kvs).toEqual(expect.arrayContaining([ + expect.objectContaining(defaultKV), + expect.objectContaining(defaultKV2), + ])); + }); + + it('Successfully finds a kv pair', async () => { + await PersistentCacheTable.create(defaultKV); + + const kv: PersistentCacheFromDatabase | undefined = await PersistentCacheTable.findById( + defaultKV.key, + ); + + expect(kv).toEqual(expect.objectContaining(defaultKV)); + }); +}); diff --git a/indexer/packages/postgres/__tests__/stores/pnl-ticks-table.test.ts b/indexer/packages/postgres/__tests__/stores/pnl-ticks-table.test.ts index cf2fd12ef3f..25fd8f09590 100644 --- a/indexer/packages/postgres/__tests__/stores/pnl-ticks-table.test.ts +++ b/indexer/packages/postgres/__tests__/stores/pnl-ticks-table.test.ts @@ -1,6 +1,8 @@ import { IsoString, + LeaderboardPnlCreateObject, Ordering, + PnlTickInterval, PnlTicksColumns, PnlTicksCreateObject, PnlTicksFromDatabase, @@ -9,11 +11,22 @@ import * as PnlTicksTable from '../../src/stores/pnl-ticks-table'; import * as BlockTable from '../../src/stores/block-table'; import { clearData, migrate, teardown } from '../../src/helpers/db-helpers'; import { seedData } from '../helpers/mock-generators'; +import * as WalletTable from '../../src/stores/wallet-table'; +import * as SubaccountTable from '../../src/stores/subaccount-table'; import { - defaultBlock, defaultBlock2, + defaultAddress, + defaultAddress2, + defaultBlock, + defaultBlock2, defaultPnlTick, defaultSubaccountId, defaultSubaccountId2, + defaultSubaccountIdWithAlternateAddress, + defaultSubaccountWithAlternateAddress, + defaultWallet2, + vaultSubaccount, + vaultSubaccountId, + vaultWallet, } from '../helpers/constants'; import { DateTime } from 'luxon'; import { ZERO_TIME_ISO_8601 } from '../../src/constants'; @@ -21,6 +34,8 @@ import { ZERO_TIME_ISO_8601 } from '../../src/constants'; describe('PnlTicks store', () => { beforeEach(async () => { await seedData(); + await WalletTable.create(defaultWallet2); + await SubaccountTable.create(defaultSubaccountWithAlternateAddress); }); beforeAll(async () => { @@ -58,7 +73,7 @@ describe('PnlTicks store', () => { PnlTicksTable.create(pnlTick2), ]); - const pnlTicks: PnlTicksFromDatabase[] = await PnlTicksTable.findAll({}, [], { + const { results: pnlTicks } = await PnlTicksTable.findAll({}, [], { orderBy: [[PnlTicksColumns.blockHeight, Ordering.ASC]], }); @@ -78,7 +93,7 @@ describe('PnlTicks store', () => { blockTime: defaultBlock.time, }; await PnlTicksTable.createMany([defaultPnlTick, pnlTick2]); - const pnlTicks: PnlTicksFromDatabase[] = await PnlTicksTable.findAll({}, [], { + const { results: pnlTicks } = await PnlTicksTable.findAll({}, [], { orderBy: [[PnlTicksColumns.blockHeight, Ordering.ASC]], }); @@ -101,7 +116,7 @@ describe('PnlTicks store', () => { }), ]); - const pnlTicks: PnlTicksFromDatabase[] = await PnlTicksTable.findAll( + const { results: pnlTicks } = await PnlTicksTable.findAll( { subaccountId: [defaultSubaccountId], }, @@ -112,6 +127,66 @@ describe('PnlTicks store', () => { expect(pnlTicks.length).toEqual(2); }); + it('Successfully finds PnlTicks using pagination', async () => { + const blockTime: IsoString = '2023-01-01T00:00:00.000Z'; + await Promise.all([ + PnlTicksTable.create(defaultPnlTick), + PnlTicksTable.create({ + ...defaultPnlTick, + createdAt: '2020-01-01T00:00:00.000Z', + blockHeight: '1000', + blockTime, + }), + ]); + + const responsePageOne = await PnlTicksTable.findAll({ + page: 1, + limit: 1, + }, [], { + orderBy: [[PnlTicksColumns.blockHeight, Ordering.DESC]], + }); + + expect(responsePageOne.results.length).toEqual(1); + expect(responsePageOne.results[0]).toEqual(expect.objectContaining({ + ...defaultPnlTick, + createdAt: '2020-01-01T00:00:00.000Z', + blockHeight: '1000', + blockTime, + })); + expect(responsePageOne.offset).toEqual(0); + expect(responsePageOne.total).toEqual(2); + + const responsePageTwo = await PnlTicksTable.findAll({ + page: 2, + limit: 1, + }, [], { + orderBy: [[PnlTicksColumns.blockHeight, Ordering.DESC]], + }); + + expect(responsePageTwo.results.length).toEqual(1); + expect(responsePageTwo.results[0]).toEqual(expect.objectContaining(defaultPnlTick)); + expect(responsePageTwo.offset).toEqual(1); + expect(responsePageTwo.total).toEqual(2); + + const responsePageAllPages = await PnlTicksTable.findAll({ + page: 1, + limit: 2, + }, [], { + orderBy: [[PnlTicksColumns.blockHeight, Ordering.DESC]], + }); + + expect(responsePageAllPages.results.length).toEqual(2); + expect(responsePageAllPages.results[0]).toEqual(expect.objectContaining({ + ...defaultPnlTick, + createdAt: '2020-01-01T00:00:00.000Z', + blockHeight: '1000', + blockTime, + })); + expect(responsePageAllPages.results[1]).toEqual(expect.objectContaining(defaultPnlTick)); + expect(responsePageAllPages.offset).toEqual(0); + expect(responsePageAllPages.total).toEqual(2); + }); + it('Successfully finds latest block time', async () => { const blockTime: IsoString = '2023-01-01T00:00:00.000Z'; await Promise.all([ @@ -124,14 +199,27 @@ describe('PnlTicks store', () => { }), ]); - const latestBlocktime: string = await PnlTicksTable.findLatestProcessedBlocktime(); + const { + maxBlockTime, count, + }: { + maxBlockTime: string, + count: number, + } = await PnlTicksTable.findLatestProcessedBlocktimeAndCount(); - expect(latestBlocktime).toEqual(blockTime); + expect(maxBlockTime).toEqual(blockTime); + expect(count).toEqual(1); }); it('Successfully finds latest block time without any pnl ticks', async () => { - const latestBlocktime: string = await PnlTicksTable.findLatestProcessedBlocktime(); - expect(latestBlocktime).toEqual(ZERO_TIME_ISO_8601); + const { + maxBlockTime, count, + }: { + maxBlockTime: string, + count: number, + } = await PnlTicksTable.findLatestProcessedBlocktimeAndCount(); + + expect(maxBlockTime).toEqual(ZERO_TIME_ISO_8601); + expect(count).toEqual(0); }); it('createMany PnlTicks, find most recent pnl ticks for each account', async () => { @@ -202,12 +290,443 @@ describe('PnlTicks store', () => { }, ]); - const mostRecent: { - [accountId: string]: PnlTicksCreateObject + const leaderboardRankedData: { + [accountId: string]: PnlTicksCreateObject, } = await PnlTicksTable.findMostRecentPnlTickForEachAccount( '3', ); - expect(mostRecent[defaultSubaccountId].equity).toEqual('1014'); - expect(mostRecent[defaultSubaccountId2].equity).toEqual('200'); + expect(leaderboardRankedData[defaultSubaccountId].equity).toEqual('1014'); + expect(leaderboardRankedData[defaultSubaccountId2].equity).toEqual('200'); }); + + const testCases = [ + { + description: 'Get all time ranked pnl ticks', + timeSpan: 'ALL_TIME', + expectedLength: 2, + expectedResults: [ + { + address: defaultAddress, + pnl: '1200', + currentEquity: '1100', + timeSpan: 'ALL_TIME', + rank: '1', + }, + { + address: defaultAddress2, + pnl: '300', + currentEquity: '200', + timeSpan: 'ALL_TIME', + rank: '2', + }, + ], + }, + { + description: 'Get one year ranked pnl ticks with missing pnl for one subaccount', + timeSpan: 'ONE_YEAR', + expectedLength: 2, + expectedResults: [ + { + address: defaultAddress2, + pnl: '300', + currentEquity: '200', + timeSpan: 'ONE_YEAR', + rank: '1', + }, + { + address: defaultAddress, + pnl: '40', + currentEquity: '1100', + timeSpan: 'ONE_YEAR', + rank: '2', + }, + ], + }, + { + description: 'Get thirty days ranked pnl ticks', + timeSpan: 'THIRTY_DAYS', + expectedLength: 2, + expectedResults: [ + { + address: defaultAddress, + pnl: '30', + currentEquity: '1100', + timeSpan: 'THIRTY_DAYS', + rank: '1', + }, + { + address: defaultAddress2, + pnl: '-30', + currentEquity: '200', + timeSpan: 'THIRTY_DAYS', + rank: '2', + }, + ], + }, + { + description: 'Get seven days ranked pnl ticks', + timeSpan: 'SEVEN_DAYS', + expectedLength: 2, + expectedResults: [ + { + address: defaultAddress, + pnl: '20', + currentEquity: '1100', + timeSpan: 'SEVEN_DAYS', + rank: '1', + }, + { + address: defaultAddress2, + pnl: '-20', + currentEquity: '200', + timeSpan: 'SEVEN_DAYS', + rank: '2', + }, + ], + }, + { + description: 'Get one day ranked pnl ticks', + timeSpan: 'ONE_DAY', + expectedLength: 2, + expectedResults: [ + { + address: defaultAddress, + pnl: '10', + currentEquity: '1100', + timeSpan: 'ONE_DAY', + rank: '1', + }, + { + address: defaultAddress2, + pnl: '-10', + currentEquity: '200', + timeSpan: 'ONE_DAY', + rank: '2', + }, + ], + }, + ]; + + it.each(testCases)('$description', async ({ timeSpan, expectedLength, expectedResults }) => { + await setupRankedPnlTicksData(); + + const leaderboardRankedData = await PnlTicksTable.getRankedPnlTicks(timeSpan); + + expect(leaderboardRankedData.length).toEqual(expectedLength); + + expectedResults.forEach((expectedResult, index) => { + expect(leaderboardRankedData[index]).toEqual(expect.objectContaining(expectedResult)); + }); + }); + + it('Ensure that vault addresses are not included in the leaderboard', async () => { + await setupRankedPnlTicksData(); + + await WalletTable.create(vaultWallet); + await SubaccountTable.create(vaultSubaccount); + await PnlTicksTable.create({ + subaccountId: vaultSubaccountId, + equity: '100', + createdAt: DateTime.utc().toISO(), + totalPnl: '100', + netTransfers: '50', + blockHeight: '9', + blockTime: defaultBlock.time, + }); + + const leaderboardRankedData: LeaderboardPnlCreateObject[] = await + PnlTicksTable.getRankedPnlTicks( + 'ALL_TIME', + ); + expect(leaderboardRankedData.length).toEqual(2); + }); + + it.each([ + { + description: 'Get hourly pnl ticks', + interval: PnlTickInterval.hour, + }, + { + description: 'Get daily pnl ticks', + interval: PnlTickInterval.day, + }, + ])('$description', async ({ + interval, + }: { + interval: PnlTickInterval, + }) => { + const createdTicks: PnlTicksFromDatabase[] = await setupIntervalPnlTicks(); + const pnlTicks: PnlTicksFromDatabase[] = await PnlTicksTable.getPnlTicksAtIntervals( + interval, + 7 * 24 * 60 * 60, // 1 week + [defaultSubaccountId, defaultSubaccountIdWithAlternateAddress], + DateTime.fromISO(createdTicks[8].blockTime).plus({ seconds: 1 }), + ); + // See setup function for created ticks. + // Should exclude tick that is within the same hour except the first. + const expectedHourlyTicks: PnlTicksFromDatabase[] = [ + createdTicks[7], + createdTicks[5], + createdTicks[2], + createdTicks[0], + ]; + // Should exclude ticks that is within the same day except for the first. + const expectedDailyTicks: PnlTicksFromDatabase[] = [ + createdTicks[7], + createdTicks[2], + ]; + + if (interval === PnlTickInterval.day) { + expect(pnlTicks).toEqual(expectedDailyTicks); + } else if (interval === PnlTickInterval.hour) { + expect(pnlTicks).toEqual(expectedHourlyTicks); + } + }); + + it('Gets latest pnl ticks for subaccounts before or at given date', async () => { + const createdTicks: PnlTicksFromDatabase[] = await setupIntervalPnlTicks(); + const latestTicks: PnlTicksFromDatabase[] = await PnlTicksTable.getLatestPnlTick( + [defaultSubaccountId, defaultSubaccountIdWithAlternateAddress], + DateTime.fromISO(createdTicks[8].blockTime).plus({ seconds: 1 }), + ); + expect(latestTicks).toEqual([createdTicks[8], createdTicks[3]]); + }); + + it('Gets empty pnl ticks for subaccounts before or at date earlier than all pnl data', async () => { + const createdTicks: PnlTicksFromDatabase[] = await setupIntervalPnlTicks(); + const latestTicks: PnlTicksFromDatabase[] = await PnlTicksTable.getLatestPnlTick( + [defaultSubaccountId, defaultSubaccountIdWithAlternateAddress], + DateTime.fromISO(createdTicks[0].blockTime).minus({ years: 1 }), + ); + expect(latestTicks).toEqual([]); + }); + + it('Gets empty pnl ticks for subaccounts before or at date if no subaccounts given', async () => { + const createdTicks: PnlTicksFromDatabase[] = await setupIntervalPnlTicks(); + const latestTicks: PnlTicksFromDatabase[] = await PnlTicksTable.getLatestPnlTick( + [], + DateTime.fromISO(createdTicks[0].blockTime).plus({ years: 1 }), + ); + expect(latestTicks).toEqual([]); + }); + }); + +async function setupRankedPnlTicksData() { + await Promise.all([ + BlockTable.create({ + blockHeight: '3', + time: defaultBlock.time, + }), + BlockTable.create({ + blockHeight: '5', + time: defaultBlock.time, + }), + BlockTable.create({ + blockHeight: '7', + time: defaultBlock.time, + }), + BlockTable.create({ + blockHeight: '9', + time: defaultBlock.time, + }), + ]); + await PnlTicksTable.createMany([ + { + subaccountId: defaultSubaccountId, + equity: '1100', + createdAt: DateTime.utc().toISO(), + totalPnl: '1200', + netTransfers: '50', + blockHeight: '9', + blockTime: defaultBlock.time, + }, + { + subaccountId: defaultSubaccountId, + equity: '1090', + createdAt: DateTime.utc().minus({ day: 1 }).toISO(), + totalPnl: '1190', + netTransfers: '50', + blockHeight: '7', + blockTime: defaultBlock.time, + }, + { + subaccountId: defaultSubaccountId, + equity: '1080', + createdAt: DateTime.utc().minus({ day: 7 }).toISO(), + totalPnl: '1180', + netTransfers: '50', + blockHeight: '5', + blockTime: defaultBlock.time, + }, + { + subaccountId: defaultSubaccountId, + equity: '1070', + createdAt: DateTime.utc().minus({ day: 30 }).toISO(), + totalPnl: '1170', + netTransfers: '50', + blockHeight: '3', + blockTime: defaultBlock.time, + }, + { + subaccountId: defaultSubaccountId, + equity: '1060', + createdAt: DateTime.utc().minus({ day: 365 }).toISO(), + totalPnl: '1160', + netTransfers: '50', + blockHeight: '1', + blockTime: defaultBlock.time, + }, + { + subaccountId: defaultSubaccountIdWithAlternateAddress, + equity: '200', + createdAt: DateTime.utc().toISO(), + totalPnl: '300', + netTransfers: '50', + blockHeight: '9', + blockTime: defaultBlock.time, + }, + { + subaccountId: defaultSubaccountIdWithAlternateAddress, + equity: '210', + createdAt: DateTime.utc().minus({ day: 1 }).toISO(), + totalPnl: '310', + netTransfers: '50', + blockHeight: '7', + blockTime: defaultBlock.time, + }, + { + subaccountId: defaultSubaccountIdWithAlternateAddress, + equity: '220', + createdAt: DateTime.utc().minus({ week: 1 }).toISO(), + totalPnl: '320', + netTransfers: '50', + blockHeight: '5', + blockTime: defaultBlock.time, + }, + { + subaccountId: defaultSubaccountIdWithAlternateAddress, + equity: '230', + createdAt: DateTime.utc().minus({ month: 1 }).toISO(), + totalPnl: '330', + netTransfers: '50', + blockHeight: '3', + blockTime: defaultBlock.time, + }, + ]); +} + +async function setupIntervalPnlTicks(): Promise { + const currentTime: DateTime = DateTime.utc().startOf('day'); + const tenMinAgo: string = currentTime.minus({ minute: 10 }).toISO(); + const almostTenMinAgo: string = currentTime.minus({ second: 603 }).toISO(); + const twoHoursAgo: string = currentTime.minus({ hour: 2 }).toISO(); + const twoDaysAgo: string = currentTime.minus({ day: 2 }).toISO(); + const monthAgo: string = currentTime.minus({ day: 30 }).toISO(); + await Promise.all([ + BlockTable.create({ + blockHeight: '3', + time: monthAgo, + }), + BlockTable.create({ + blockHeight: '4', + time: twoDaysAgo, + }), + BlockTable.create({ + blockHeight: '6', + time: twoHoursAgo, + }), + BlockTable.create({ + blockHeight: '8', + time: almostTenMinAgo, + }), + BlockTable.create({ + blockHeight: '10', + time: tenMinAgo, + }), + ]); + const createdTicks: PnlTicksFromDatabase[] = await PnlTicksTable.createMany([ + { + subaccountId: defaultSubaccountId, + equity: '1100', + createdAt: almostTenMinAgo, + totalPnl: '1200', + netTransfers: '50', + blockHeight: '10', + blockTime: almostTenMinAgo, + }, + { + subaccountId: defaultSubaccountId, + equity: '1090', + createdAt: tenMinAgo, + totalPnl: '1190', + netTransfers: '50', + blockHeight: '8', + blockTime: tenMinAgo, + }, + { + subaccountId: defaultSubaccountId, + equity: '1080', + createdAt: twoHoursAgo, + totalPnl: '1180', + netTransfers: '50', + blockHeight: '6', + blockTime: twoHoursAgo, + }, + { + subaccountId: defaultSubaccountId, + equity: '1070', + createdAt: twoDaysAgo, + totalPnl: '1170', + netTransfers: '50', + blockHeight: '4', + blockTime: twoDaysAgo, + }, + { + subaccountId: defaultSubaccountId, + equity: '1200', + createdAt: monthAgo, + totalPnl: '1170', + netTransfers: '50', + blockHeight: '3', + blockTime: monthAgo, + }, + { + subaccountId: defaultSubaccountIdWithAlternateAddress, + equity: '200', + createdAt: almostTenMinAgo, + totalPnl: '300', + netTransfers: '50', + blockHeight: '10', + blockTime: almostTenMinAgo, + }, + { + subaccountId: defaultSubaccountIdWithAlternateAddress, + equity: '210', + createdAt: tenMinAgo, + totalPnl: '310', + netTransfers: '50', + blockHeight: '8', + blockTime: tenMinAgo, + }, + { + subaccountId: defaultSubaccountIdWithAlternateAddress, + equity: '220', + createdAt: twoHoursAgo, + totalPnl: '320', + netTransfers: '50', + blockHeight: '6', + blockTime: twoHoursAgo, + }, + { + subaccountId: defaultSubaccountIdWithAlternateAddress, + equity: '230', + createdAt: twoDaysAgo, + totalPnl: '330', + netTransfers: '50', + blockHeight: '4', + blockTime: twoDaysAgo, + }, + ]); + return createdTicks; +} diff --git a/indexer/packages/postgres/__tests__/stores/subaccount-usernames-table.test.ts b/indexer/packages/postgres/__tests__/stores/subaccount-usernames-table.test.ts new file mode 100644 index 00000000000..71db35e9432 --- /dev/null +++ b/indexer/packages/postgres/__tests__/stores/subaccount-usernames-table.test.ts @@ -0,0 +1,107 @@ +import { SubaccountFromDatabase, SubaccountUsernamesFromDatabase, SubaccountsWithoutUsernamesResult } from '../../src/types'; +import * as SubaccountUsernamesTable from '../../src/stores/subaccount-usernames-table'; +import * as WalletTable from '../../src/stores/wallet-table'; +import * as SubaccountsTable from '../../src/stores/subaccount-table'; +import { clearData, migrate, teardown } from '../../src/helpers/db-helpers'; +import { + defaultSubaccountUsername, + defaultSubaccountUsername2, + defaultWallet, + defaultWallet2, + duplicatedSubaccountUsername, + subaccountUsernameWithAlternativeAddress, +} from '../helpers/constants'; +import { seedData, seedAdditionalSubaccounts } from '../helpers/mock-generators'; + +describe('SubaccountUsernames store', () => { + beforeEach(async () => { + await seedData(); + await seedAdditionalSubaccounts(); + }); + + beforeAll(async () => { + await migrate(); + }); + + afterEach(async () => { + await clearData(); + }); + + afterAll(async () => { + await teardown(); + }); + + it('Successfully creates a SubaccountUsername', async () => { + await SubaccountUsernamesTable.create(defaultSubaccountUsername); + }); + + it('Successfully finds all SubaccountUsernames', async () => { + await Promise.all([ + SubaccountUsernamesTable.create(defaultSubaccountUsername), + SubaccountUsernamesTable.create(defaultSubaccountUsername2), + ]); + + const subaccountUsernames: + SubaccountUsernamesFromDatabase[] = await SubaccountUsernamesTable.findAll( + {}, + [], + {}, + ); + + expect(subaccountUsernames.length).toEqual(2); + expect(subaccountUsernames[0]).toEqual(expect.objectContaining(defaultSubaccountUsername)); + expect(subaccountUsernames[1]).toEqual(expect.objectContaining(defaultSubaccountUsername2)); + }); + + it('Successfully finds SubaccountUsername with subaccountId', async () => { + await Promise.all([ + SubaccountUsernamesTable.create(defaultSubaccountUsername), + SubaccountUsernamesTable.create(defaultSubaccountUsername2), + ]); + + const subaccountUsername: + SubaccountUsernamesFromDatabase | undefined = await SubaccountUsernamesTable.findByUsername( + defaultSubaccountUsername.username, + ); + expect(subaccountUsername).toEqual(expect.objectContaining(defaultSubaccountUsername)); + }); + + it('Duplicate SubaccountUsername creation fails', async () => { + await SubaccountUsernamesTable.create(defaultSubaccountUsername); + await expect(SubaccountUsernamesTable.create(duplicatedSubaccountUsername)).rejects.toThrow(); + }); + + it('Creation of row without subaccountId fails', async () => { + await expect(SubaccountUsernamesTable.create({ ...defaultSubaccountUsername, subaccountId: '' })).rejects.toThrow(); + }); + + it('Get subaccount ids which arent in the subaccount usernames table', async () => { + const subaccounts: SubaccountFromDatabase[] = await SubaccountsTable.findAll({ + subaccountNumber: 0, + }, [], {}); + const subaccountLength = subaccounts.length; + await SubaccountUsernamesTable.create(defaultSubaccountUsername); + const subaccountIds: SubaccountsWithoutUsernamesResult[] = await + SubaccountUsernamesTable.getSubaccountZerosWithoutUsernames(1000); + expect(subaccountIds.length).toEqual(subaccountLength - 1); + }); + + it('Get username using address', async () => { + await Promise.all([ + // Add username for defaultWallet + SubaccountUsernamesTable.create(defaultSubaccountUsername), + SubaccountUsernamesTable.create(defaultSubaccountUsername2), + // Add one username for alternativeWallet + WalletTable.create(defaultWallet2), + SubaccountUsernamesTable.create(subaccountUsernameWithAlternativeAddress), + ]); + + // Should only get username for defaultWallet's subaccount 0 + const usernames = await SubaccountUsernamesTable.findByAddress([defaultWallet.address]); + expect(usernames.length).toEqual(1); + expect(usernames[0]).toEqual(expect.objectContaining({ + address: defaultWallet.address, + username: defaultSubaccountUsername.username, + })); + }); +}); diff --git a/indexer/packages/postgres/__tests__/stores/transfer-table.test.ts b/indexer/packages/postgres/__tests__/stores/transfer-table.test.ts index a2c52739cee..d5d6c84e745 100644 --- a/indexer/packages/postgres/__tests__/stores/transfer-table.test.ts +++ b/indexer/packages/postgres/__tests__/stores/transfer-table.test.ts @@ -23,6 +23,7 @@ import { defaultSubaccountId3, defaultTendermintEventId, defaultTendermintEventId2, + defaultTendermintEventId3, defaultTransfer, defaultTransfer2, defaultTransfer3, @@ -31,6 +32,8 @@ import { } from '../helpers/constants'; import Big from 'big.js'; import { CheckViolationError } from 'objection'; +import { DateTime } from 'luxon'; +import { USDC_ASSET_ID } from '../../src'; describe('Transfer store', () => { beforeEach(async () => { @@ -115,7 +118,7 @@ describe('Transfer store', () => { TransferTable.create(transfer2), ]); - const transfers: TransferFromDatabase[] = await TransferTable.findAllToOrFromSubaccountId( + const { results: transfers } = await TransferTable.findAllToOrFromSubaccountId( { subaccountId: [defaultSubaccountId] }, [], { orderBy: [[TransferColumns.id, Ordering.ASC]], @@ -142,7 +145,7 @@ describe('Transfer store', () => { TransferTable.create(transfer2), ]); - const transfers: TransferFromDatabase[] = await TransferTable.findAllToOrFromSubaccountId( + const { results: transfers } = await TransferTable.findAllToOrFromSubaccountId( { subaccountId: [defaultSubaccountId], eventId: [defaultTendermintEventId], @@ -155,6 +158,57 @@ describe('Transfer store', () => { expect(transfers[0]).toEqual(expect.objectContaining(defaultTransfer)); }); + it('Successfully finds all transfers to and from subaccount using pagination', async () => { + const transfer2: TransferCreateObject = { + senderSubaccountId: defaultSubaccountId2, + recipientSubaccountId: defaultSubaccountId, + assetId: defaultAsset2.id, + size: '5', + eventId: defaultTendermintEventId2, + transactionHash: '', // TODO: Add a real transaction Hash + createdAt: createdDateTime.toISO(), + createdAtHeight: createdHeight, + }; + await Promise.all([ + TransferTable.create(defaultTransfer), + TransferTable.create(transfer2), + ]); + + const responsePageOne = await TransferTable.findAllToOrFromSubaccountId( + { subaccountId: [defaultSubaccountId], page: 1, limit: 1 }, + [], { + orderBy: [[TransferColumns.id, Ordering.ASC]], + }); + + expect(responsePageOne.results.length).toEqual(1); + expect(responsePageOne.results[0]).toEqual(expect.objectContaining(defaultTransfer)); + expect(responsePageOne.offset).toEqual(0); + expect(responsePageOne.total).toEqual(2); + + const responsePageTwo = await TransferTable.findAllToOrFromSubaccountId( + { subaccountId: [defaultSubaccountId], page: 2, limit: 1 }, + [], { + orderBy: [[TransferColumns.id, Ordering.ASC]], + }); + + expect(responsePageTwo.results.length).toEqual(1); + expect(responsePageTwo.results[0]).toEqual(expect.objectContaining(transfer2)); + expect(responsePageTwo.offset).toEqual(1); + expect(responsePageTwo.total).toEqual(2); + + const responsePageAllPages = await TransferTable.findAllToOrFromSubaccountId( + { subaccountId: [defaultSubaccountId], page: 1, limit: 2 }, + [], { + orderBy: [[TransferColumns.id, Ordering.ASC]], + }); + + expect(responsePageAllPages.results.length).toEqual(2); + expect(responsePageAllPages.results[0]).toEqual(expect.objectContaining(defaultTransfer)); + expect(responsePageAllPages.results[1]).toEqual(expect.objectContaining(transfer2)); + expect(responsePageAllPages.offset).toEqual(0); + expect(responsePageAllPages.total).toEqual(2); + }); + it('Successfully finds Transfer with eventId', async () => { await Promise.all([ TransferTable.create(defaultTransfer), @@ -234,7 +288,7 @@ describe('Transfer store', () => { TransferTable.create(transfer2), ]); - const transfers: TransferFromDatabase[] = await TransferTable.findAllToOrFromSubaccountId( + const { results: transfers } = await TransferTable.findAllToOrFromSubaccountId( { subaccountId: [defaultSubaccountId], createdBeforeOrAt: '2000-05-25T00:00:00.000Z', @@ -268,6 +322,7 @@ describe('Transfer store', () => { await WalletTable.create({ address: defaultWalletAddress, totalTradingRewards: '0', + totalVolume: '0', }); const invalidDeposit: TransferCreateObject = { ...defaultDeposit, @@ -299,6 +354,7 @@ describe('Transfer store', () => { await WalletTable.create({ address: defaultWalletAddress, totalTradingRewards: '0', + totalVolume: '0', }); await Promise.all([ TransferTable.create(defaultTransfer), @@ -515,4 +571,94 @@ describe('Transfer store', () => { [defaultAsset2.id]: Big('-5.3'), }); }); + + it('Successfully gets the latest createdAt for subaccounts', async () => { + const now = DateTime.utc(); + + const transfer2 = { + senderSubaccountId: defaultSubaccountId2, + recipientSubaccountId: defaultSubaccountId, + assetId: defaultAsset2.id, + size: '5', + eventId: defaultTendermintEventId3, + transactionHash: '', // TODO: Add a real transaction Hash + createdAt: now.minus({ hours: 2 }).toISO(), + createdAtHeight: createdHeight, + }; + + const transfer3 = { + senderSubaccountId: defaultSubaccountId2, + recipientSubaccountId: defaultSubaccountId, + assetId: defaultAsset2.id, + size: '5', + eventId: defaultTendermintEventId2, + transactionHash: '', // TODO: Add a real transaction Hash + createdAt: now.minus({ hours: 1 }).toISO(), + createdAtHeight: createdHeight, + }; + + await Promise.all([ + TransferTable.create(defaultTransfer), + TransferTable.create(transfer2), + TransferTable.create(transfer3), + ]); + + const transferTimes: { [subaccountId: string]: string } = await + TransferTable.getLastTransferTimeForSubaccounts( + [defaultSubaccountId, defaultSubaccountId2], + ); + + expect(transferTimes[defaultSubaccountId]).toEqual(defaultTransfer.createdAt); + expect(transferTimes[defaultSubaccountId2]).toEqual(defaultTransfer.createdAt); + }); + + describe('getNetTransfersBetweenSubaccountIds', () => { + it('Successfully gets total net Transfers between two subaccounts', async () => { + await Promise.all([ + TransferTable.create({ + ...defaultTransfer, + size: '20', + }), + TransferTable.create({ + ...defaultTransfer, + size: '30', + eventId: defaultTendermintEventId2, + }), + TransferTable.create({ + ...defaultTransfer, + senderSubaccountId: defaultSubaccountId2, + recipientSubaccountId: defaultSubaccountId, + size: '10', + eventId: defaultTendermintEventId3, + }), + ]); + + const netTransfers: string = await TransferTable.getNetTransfersBetweenSubaccountIds( + defaultSubaccountId, + defaultSubaccountId2, + USDC_ASSET_ID, + ); + + expect(netTransfers).toEqual('40'); // 20 + 30 - 10 + + // Test the other way around + const negativeNetTransfers: string = await TransferTable.getNetTransfersBetweenSubaccountIds( + defaultSubaccountId2, + defaultSubaccountId, + USDC_ASSET_ID, + ); + + expect(negativeNetTransfers).toEqual('-40'); // 10 - 20 - 30 + }); + + it('Successfully gets total net Transfers between two subaccounts with no transfers', async () => { + const netTransfers: string = await TransferTable.getNetTransfersBetweenSubaccountIds( + defaultSubaccountId, + defaultSubaccountId2, + USDC_ASSET_ID, + ); + + expect(netTransfers).toEqual('0'); + }); + }); }); diff --git a/indexer/packages/postgres/__tests__/stores/vault-pnl-ticks-view.test.ts b/indexer/packages/postgres/__tests__/stores/vault-pnl-ticks-view.test.ts new file mode 100644 index 00000000000..eaf69d671f2 --- /dev/null +++ b/indexer/packages/postgres/__tests__/stores/vault-pnl-ticks-view.test.ts @@ -0,0 +1,209 @@ +import { + PnlTickInterval, + PnlTicksFromDatabase, +} from '../../src/types'; +import * as VaultPnlTicksView from '../../src/stores/vault-pnl-ticks-view'; +import * as PnlTicksTable from '../../src/stores/pnl-ticks-table'; +import * as BlockTable from '../../src/stores/block-table'; +import * as VaultTable from '../../src/stores/vault-table'; +import { clearData, migrate, teardown } from '../../src/helpers/db-helpers'; +import { seedData } from '../helpers/mock-generators'; +import * as WalletTable from '../../src/stores/wallet-table'; +import * as SubaccountTable from '../../src/stores/subaccount-table'; +import { + defaultSubaccountId, + defaultSubaccountIdWithAlternateAddress, + defaultSubaccountWithAlternateAddress, + defaultWallet2, + defaultVault, + defaultSubaccount, +} from '../helpers/constants'; +import { DateTime } from 'luxon'; + +describe('PnlTicks store', () => { + beforeEach(async () => { + await seedData(); + await WalletTable.create(defaultWallet2); + await SubaccountTable.create(defaultSubaccountWithAlternateAddress); + await Promise.all([ + VaultTable.create({ + ...defaultVault, + address: defaultSubaccount.address, + }), + VaultTable.create({ + ...defaultVault, + address: defaultSubaccountWithAlternateAddress.address, + }), + ]); + }); + + beforeAll(async () => { + await migrate(); + }); + + afterEach(async () => { + await clearData(); + }); + + afterAll(async () => { + await teardown(); + }); + + it.each([ + { + description: 'Get hourly pnl ticks', + interval: PnlTickInterval.hour, + }, + { + description: 'Get daily pnl ticks', + interval: PnlTickInterval.day, + }, + ])('$description', async ({ + interval, + }: { + interval: PnlTickInterval, + }) => { + const createdTicks: PnlTicksFromDatabase[] = await setupIntervalPnlTicks(); + await VaultPnlTicksView.refreshDailyView(); + await VaultPnlTicksView.refreshHourlyView(); + const pnlTicks: PnlTicksFromDatabase[] = await VaultPnlTicksView.getVaultsPnl( + interval, + 7 * 24 * 60 * 60, // 1 week + DateTime.fromISO(createdTicks[8].blockTime).plus({ seconds: 1 }), + ); + // See setup function for created ticks. + // Should exclude tick that is within the same hour except the first. + const expectedHourlyTicks: PnlTicksFromDatabase[] = [ + createdTicks[7], + createdTicks[5], + createdTicks[2], + createdTicks[0], + ]; + // Should exclude ticks that is within the same day except for the first. + const expectedDailyTicks: PnlTicksFromDatabase[] = [ + createdTicks[7], + createdTicks[2], + ]; + + if (interval === PnlTickInterval.day) { + expect(pnlTicks).toEqual(expectedDailyTicks); + } else if (interval === PnlTickInterval.hour) { + expect(pnlTicks).toEqual(expectedHourlyTicks); + } + }); + + async function setupIntervalPnlTicks(): Promise { + const currentTime: DateTime = DateTime.utc().startOf('day'); + const tenMinAgo: string = currentTime.minus({ minute: 10 }).toISO(); + const almostTenMinAgo: string = currentTime.minus({ second: 603 }).toISO(); + const twoHoursAgo: string = currentTime.minus({ hour: 2 }).toISO(); + const twoDaysAgo: string = currentTime.minus({ day: 2 }).toISO(); + const monthAgo: string = currentTime.minus({ day: 30 }).toISO(); + await Promise.all([ + BlockTable.create({ + blockHeight: '3', + time: monthAgo, + }), + BlockTable.create({ + blockHeight: '4', + time: twoDaysAgo, + }), + BlockTable.create({ + blockHeight: '6', + time: twoHoursAgo, + }), + BlockTable.create({ + blockHeight: '8', + time: almostTenMinAgo, + }), + BlockTable.create({ + blockHeight: '10', + time: tenMinAgo, + }), + ]); + const createdTicks: PnlTicksFromDatabase[] = await PnlTicksTable.createMany([ + { + subaccountId: defaultSubaccountId, + equity: '1100', + createdAt: almostTenMinAgo, + totalPnl: '1200', + netTransfers: '50', + blockHeight: '10', + blockTime: almostTenMinAgo, + }, + { + subaccountId: defaultSubaccountId, + equity: '1090', + createdAt: tenMinAgo, + totalPnl: '1190', + netTransfers: '50', + blockHeight: '8', + blockTime: tenMinAgo, + }, + { + subaccountId: defaultSubaccountId, + equity: '1080', + createdAt: twoHoursAgo, + totalPnl: '1180', + netTransfers: '50', + blockHeight: '6', + blockTime: twoHoursAgo, + }, + { + subaccountId: defaultSubaccountId, + equity: '1070', + createdAt: twoDaysAgo, + totalPnl: '1170', + netTransfers: '50', + blockHeight: '4', + blockTime: twoDaysAgo, + }, + { + subaccountId: defaultSubaccountId, + equity: '1200', + createdAt: monthAgo, + totalPnl: '1170', + netTransfers: '50', + blockHeight: '3', + blockTime: monthAgo, + }, + { + subaccountId: defaultSubaccountIdWithAlternateAddress, + equity: '200', + createdAt: almostTenMinAgo, + totalPnl: '300', + netTransfers: '50', + blockHeight: '10', + blockTime: almostTenMinAgo, + }, + { + subaccountId: defaultSubaccountIdWithAlternateAddress, + equity: '210', + createdAt: tenMinAgo, + totalPnl: '310', + netTransfers: '50', + blockHeight: '8', + blockTime: tenMinAgo, + }, + { + subaccountId: defaultSubaccountIdWithAlternateAddress, + equity: '220', + createdAt: twoHoursAgo, + totalPnl: '320', + netTransfers: '50', + blockHeight: '6', + blockTime: twoHoursAgo, + }, + { + subaccountId: defaultSubaccountIdWithAlternateAddress, + equity: '230', + createdAt: twoDaysAgo, + totalPnl: '330', + netTransfers: '50', + blockHeight: '4', + blockTime: twoDaysAgo, + }, + ]); + return createdTicks; + } +}); diff --git a/indexer/packages/postgres/__tests__/stores/vault-table.test.ts b/indexer/packages/postgres/__tests__/stores/vault-table.test.ts new file mode 100644 index 00000000000..134e45f3376 --- /dev/null +++ b/indexer/packages/postgres/__tests__/stores/vault-table.test.ts @@ -0,0 +1,81 @@ +import * as VaultTable from '../../src/stores/vault-table'; +import { + clearData, + migrate, + teardown, +} from '../../src/helpers/db-helpers'; +import { defaultVault, defaultAddress } from '../helpers/constants'; +import { VaultFromDatabase, VaultStatus } from '../../src/types'; + +describe('Vault store', () => { + beforeAll(async () => { + await migrate(); + }); + + afterEach(async () => { + await clearData(); + }); + + afterAll(async () => { + await teardown(); + }); + + it('Successfully creates a vault', async () => { + await VaultTable.create(defaultVault); + }); + + it('Successfully finds all vaults', async () => { + await Promise.all([ + VaultTable.create(defaultVault), + VaultTable.create({ + ...defaultVault, + address: defaultAddress, + clobPairId: '1', + }), + ]); + + const vaults: VaultFromDatabase[] = await VaultTable.findAll( + {}, + [], + { readReplica: true }, + ); + + expect(vaults.length).toEqual(2); + expect(vaults[0]).toEqual(expect.objectContaining(defaultVault)); + expect(vaults[1]).toEqual(expect.objectContaining({ + ...defaultVault, + address: defaultAddress, + clobPairId: '1', + })); + }); + + it('Succesfully upserts a vault', async () => { + await VaultTable.create(defaultVault); + + let vaults: VaultFromDatabase[] = await VaultTable.findAll( + {}, + [], + { readReplica: true }, + ); + + expect(vaults.length).toEqual(1); + expect(vaults[0]).toEqual(expect.objectContaining(defaultVault)); + + await VaultTable.upsert({ + ...defaultVault, + status: VaultStatus.CLOSE_ONLY, + }); + + vaults = await VaultTable.findAll( + {}, + [], + { readReplica: true }, + ); + + expect(vaults.length).toEqual(1); + expect(vaults[0]).toEqual(expect.objectContaining({ + ...defaultVault, + status: VaultStatus.CLOSE_ONLY, + })); + }); +}); diff --git a/indexer/packages/postgres/__tests__/stores/wallet-table.test.ts b/indexer/packages/postgres/__tests__/stores/wallet-table.test.ts index 84cbabf639c..39381cc3b7d 100644 --- a/indexer/packages/postgres/__tests__/stores/wallet-table.test.ts +++ b/indexer/packages/postgres/__tests__/stores/wallet-table.test.ts @@ -1,7 +1,23 @@ import { WalletFromDatabase } from '../../src/types'; import { clearData, migrate, teardown } from '../../src/helpers/db-helpers'; -import { defaultWallet2 } from '../helpers/constants'; +import { DateTime } from 'luxon'; +import { + defaultFill, + defaultOrder, + defaultTendermintEventId, + defaultTendermintEventId2, + defaultTendermintEventId3, + defaultTendermintEventId4, + defaultWallet, + defaultWallet2, + isolatedMarketOrder, + defaultSubaccountId, + isolatedSubaccountId, +} from '../helpers/constants'; +import * as FillTable from '../../src/stores/fill-table'; +import * as OrderTable from '../../src/stores/order-table'; import * as WalletTable from '../../src/stores/wallet-table'; +import { seedData } from '../helpers/mock-generators'; describe('Wallet store', () => { beforeAll(async () => { @@ -27,10 +43,16 @@ describe('Wallet store', () => { ); expect(wallet).toEqual(expect.objectContaining(defaultWallet2)); - await WalletTable.upsert(defaultWallet2); + await WalletTable.upsert({ + ...defaultWallet2, + totalVolume: '100.1', + }); wallet = await WalletTable.findById(defaultWallet2.address); - expect(wallet).toEqual(expect.objectContaining(defaultWallet2)); + expect(wallet).toEqual(expect.objectContaining({ + ...defaultWallet2, + totalVolume: '100.1', + })); }); it('Successfully finds all Wallets', async () => { @@ -39,6 +61,7 @@ describe('Wallet store', () => { WalletTable.create({ address: 'fake_address', totalTradingRewards: '0', + totalVolume: '0', }), ]); @@ -64,4 +87,89 @@ describe('Wallet store', () => { expect(wallet).toEqual(expect.objectContaining(defaultWallet2)); }); + + describe('updateTotalVolume', () => { + it('Successfully updates totalVolume for time window multiple times', async () => { + const firstFillTime: DateTime = await populateWalletSubaccountFill(); + + // Update totalVolume for a time window that covers all fills + await WalletTable.updateTotalVolume( + firstFillTime.minus({ hours: 1 }).toISO(), // need to minus because left bound is exclusive + firstFillTime.plus({ hours: 1 }).toISO(), + ); + const wallet1: WalletFromDatabase | undefined = await WalletTable + .findById(defaultWallet.address); + expect(wallet1).toEqual(expect.objectContaining({ + ...defaultWallet, + totalVolume: '103', + })); + + // Update totalVolume for a time window that excludes some fills + // For convenience, we will reuse the existing fills data. The total volume calculated in this + // window should be added to the total volume above. + await WalletTable.updateTotalVolume( + firstFillTime.toISO(), // exclusive -> filters out first fill from each subaccount + firstFillTime.plus({ minutes: 2 }).toISO(), + ); + const wallet2 = await WalletTable.findById(defaultWallet.address); + expect(wallet2).toEqual(expect.objectContaining({ + ...defaultWallet, + totalVolume: '105', // 103 + 2 + })); + }); + }); }); + +/** + * Helper function to add entries into wallet, subaccount, fill tables. + * Create a wallet with 2 subaccounts; one subaccount has 3 fills and the other has 1 fill. + * The fills are at t=0,1,2 and t=1 for the subaccounts respectively. + * This setup allows us to test that the totalVolume is correctly calculated for a time window. + * @returns first fill time in ISO format + */ +async function populateWalletSubaccountFill(): Promise { + await seedData(); + await Promise.all([ + OrderTable.create(defaultOrder), + OrderTable.create(isolatedMarketOrder), + ]); + + const referenceDt: DateTime = DateTime.utc().minus({ hours: 1 }); + const eventIds = [ + defaultTendermintEventId, + defaultTendermintEventId2, + defaultTendermintEventId3, + defaultTendermintEventId4, + ]; + let eventIdx = 0; + + const fillPromises: Promise[] = []; + // Create 3 fills with 1 min increments for defaultSubaccount + for (let i = 0; i < 3; i++) { + fillPromises.push( + FillTable.create({ + ...defaultFill, + subaccountId: defaultSubaccountId, + createdAt: referenceDt.plus({ minutes: i }).toISO(), + eventId: eventIds[eventIdx], + price: '1', + size: '1', + }), + ); + eventIdx += 1; + } + // Create 1 fill at referenceDt for isolatedSubaccount + fillPromises.push( + FillTable.create({ + ...defaultFill, + subaccountId: isolatedSubaccountId, + createdAt: referenceDt.toISO(), + eventId: eventIds[eventIdx], + price: '10', + size: '10', + }), + ); + await Promise.all(fillPromises); + + return referenceDt; +} diff --git a/indexer/packages/postgres/src/config.ts b/indexer/packages/postgres/src/config.ts index 9222259eea0..8d4cb920eba 100644 --- a/indexer/packages/postgres/src/config.ts +++ b/indexer/packages/postgres/src/config.ts @@ -35,6 +35,7 @@ export const postgresConfigSchema = { ASSET_REFRESHER_INTERVAL_MS: parseInteger({ default: 30_000 }), // 30 seconds MARKET_REFRESHER_INTERVAL_MS: parseInteger({ default: 30_000 }), // 30 seconds LIQUIDITY_TIER_REFRESHER_INTERVAL_MS: parseInteger({ default: 30_000 }), // 30 seconds + BLOCK_HEIGHT_REFRESHER_INTERVAL_MS: parseInteger({ default: 1_000 }), // 1 second USE_READ_REPLICA: parseBoolean({ default: false }), // Optional environment variables. diff --git a/indexer/packages/postgres/src/constants.ts b/indexer/packages/postgres/src/constants.ts index 16e36f7e2fe..aa3dbc9bda2 100644 --- a/indexer/packages/postgres/src/constants.ts +++ b/indexer/packages/postgres/src/constants.ts @@ -1,6 +1,7 @@ import { CandleMessage_Resolution, ClobPairStatus } from '@dydxprotocol-indexer/v4-protos'; import config from './config'; +import AffiliateReferredUsersModel from './models/affiliate-referred-users-model'; import AssetModel from './models/asset-model'; import AssetPositionModel from './models/asset-position-model'; import FillModel from './models/fill-model'; @@ -14,6 +15,7 @@ import PerpetualPositionModel from './models/perpetual-position-model'; import SubaccountModel from './models/subaccount-model'; import TradingRewardModel from './models/trading-reward-model'; import TransferModel from './models/transfer-model'; +import VaultModel from './models/vault-model'; import { APITimeInForce, CandleResolution, @@ -88,6 +90,7 @@ export const TIME_IN_FORCE_TO_API_TIME_IN_FORCE: Record { + return knex.schema.table('perpetual_markets', (table) => { + table.enum('marketType', ['CROSS', 'ISOLATED']).notNullable().defaultTo('CROSS'); + }); +} + +export async function down(knex: Knex): Promise { + return knex.schema.table('perpetual_markets', (table) => { + table.dropColumn('marketType'); + }); +} diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20240329154240_liquidity_tier_update.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20240329154240_liquidity_tier_update.ts new file mode 100644 index 00000000000..fded421e6c7 --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20240329154240_liquidity_tier_update.ts @@ -0,0 +1,16 @@ +import * as Knex from 'knex'; + +export async function up(knex: Knex): Promise { + await knex.schema.alterTable('liquidity_tiers', (table) => { + table.string('openInterestLowerCap').nullable().defaultTo(null); + table.string('openInterestUpperCap').nullable().defaultTo(null); + }); + +} + +export async function down(knex: Knex): Promise { + await knex.schema.alterTable('liquidity_tiers', (table) => { + table.dropColumn('openInterestLowerCap'); + table.dropColumn('openInterestUpperCap'); + }); +} diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20240410111632_add_compliance_status_status_first_strike_close_only.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20240410111632_add_compliance_status_status_first_strike_close_only.ts new file mode 100644 index 00000000000..4f5eeceac4a --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20240410111632_add_compliance_status_status_first_strike_close_only.ts @@ -0,0 +1,19 @@ +import * as Knex from 'knex'; + +import { formatAlterTableEnumSql } from '../helpers'; + +export async function up(knex: Knex): Promise { + return knex.raw(formatAlterTableEnumSql( + 'compliance_status', + 'status', + ['COMPLIANT', 'FIRST_STRIKE_CLOSE_ONLY', 'FIRST_STRIKE', 'CLOSE_ONLY', 'BLOCKED'], + )); +} + +export async function down(knex: Knex): Promise { + return knex.raw(formatAlterTableEnumSql( + 'compliance_status', + 'status', + ['COMPLIANT', 'FIRST_STRIKE', 'CLOSE_ONLY', 'BLOCKED'], + )); +} diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20240422012837_base_open_interest_for_perp.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20240422012837_base_open_interest_for_perp.ts new file mode 100644 index 00000000000..3eb504060e9 --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20240422012837_base_open_interest_for_perp.ts @@ -0,0 +1,13 @@ +import * as Knex from 'knex'; + +export async function up(knex: Knex): Promise { + await knex.schema.alterTable('perpetual_markets', (table) => { + table.decimal('baseOpenInterest', null).defaultTo(0); + }); +} + +export async function down(knex: Knex): Promise { + await knex.schema.alterTable('perpetual_markets', (table) => { + table.dropColumn('baseOpenInterest'); + }); +} diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20240613174832_add_gb_to_compliance_reasons.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20240613174832_add_gb_to_compliance_reasons.ts new file mode 100644 index 00000000000..c83af715f37 --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20240613174832_add_gb_to_compliance_reasons.ts @@ -0,0 +1,19 @@ +import * as Knex from 'knex'; + +import { formatAlterTableEnumSql } from '../helpers'; + +export async function up(knex: Knex): Promise { + return knex.raw(formatAlterTableEnumSql( + 'compliance_status', + 'reason', + ['MANUAL', 'US_GEO', 'CA_GEO', 'GB_GEO', 'SANCTIONED_GEO', 'COMPLIANCE_PROVIDER'], + )); +} + +export async function down(knex: Knex): Promise { + return knex.raw(formatAlterTableEnumSql( + 'compliance_status', + 'reason', + ['MANUAL', 'US_GEO', 'CA_GEO', 'SANCTIONED_GEO', 'COMPLIANCE_PROVIDER'], + )); +} diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20240627152937_candles_add_mid_book_price_open_and_close.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20240627152937_candles_add_mid_book_price_open_and_close.ts new file mode 100644 index 00000000000..c97c972d3fc --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20240627152937_candles_add_mid_book_price_open_and_close.ts @@ -0,0 +1,19 @@ +import * as Knex from 'knex'; + +export async function up(knex: Knex): Promise { + return knex + .schema + .alterTable('candles', (table) => { + table.decimal('orderbookMidPriceOpen', null).nullable(); + table.decimal('orderbookMidPriceClose', null).nullable(); + }); +} + +export async function down(knex: Knex): Promise { + return knex + .schema + .alterTable('candles', (table) => { + table.dropColumn('orderbookMidPriceOpen'); + table.dropColumn('orderbookMidPriceClose'); + }); +} diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20240628141851_create_pnl_ticks_block_time_index.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20240628141851_create_pnl_ticks_block_time_index.ts new file mode 100644 index 00000000000..265da633437 --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20240628141851_create_pnl_ticks_block_time_index.ts @@ -0,0 +1,17 @@ +import * as Knex from 'knex'; + +export async function up(knex: Knex): Promise { + await knex.raw(` + CREATE INDEX CONCURRENTLY IF NOT EXISTS "pnl_ticks_blocktime_index" ON "pnl_ticks" ("blockTime"); + `); +} + +export async function down(knex: Knex): Promise { + await knex.raw(` + DROP INDEX CONCURRENTLY IF EXISTS "pnl_ticks_blocktime_index"; + `); +} + +export const config = { + transaction: false, +}; diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20240715155120_subaccount_usernames.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20240715155120_subaccount_usernames.ts new file mode 100644 index 00000000000..08362a300a4 --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20240715155120_subaccount_usernames.ts @@ -0,0 +1,14 @@ +import * as Knex from 'knex'; + +export async function up(knex: Knex): Promise { + await knex.schema.createTable('subaccount_usernames', (table) => { + // username is primary key and is unique across the table + table.string('username').notNullable().primary(); + // subaccounts is a foreign key to the subaccounts table subaccounts.id + table.uuid('subaccountId').notNullable().references('id').inTable('subaccounts'); + }); +} + +export async function down(knex: Knex): Promise { + await knex.schema.dropTable('subaccount_usernames'); +} diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20240717142808_pnl_ticks_subaccount_id_index.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20240717142808_pnl_ticks_subaccount_id_index.ts new file mode 100644 index 00000000000..77b9f4dbaca --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20240717142808_pnl_ticks_subaccount_id_index.ts @@ -0,0 +1,17 @@ +import * as Knex from 'knex'; + +export async function up(knex: Knex): Promise { + await knex.raw(` + CREATE INDEX CONCURRENTLY IF NOT EXISTS "pnl_ticks_subaccountid_index" ON "pnl_ticks" ("subaccountId"); + `); +} + +export async function down(knex: Knex): Promise { + await knex.raw(` + DROP INDEX CONCURRENTLY IF EXISTS "pnl_ticks_subaccountid_index"; + `); +} + +export const config = { + transaction: false, +}; diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20240717160024_create_leaderboard_pnl_table.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20240717160024_create_leaderboard_pnl_table.ts new file mode 100644 index 00000000000..0db3a7f8c76 --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20240717160024_create_leaderboard_pnl_table.ts @@ -0,0 +1,25 @@ +import * as Knex from 'knex'; + +export async function up(knex: Knex): Promise { + return knex.schema.createTable('leaderboard_pnl', (table) => { + table.string('address').notNullable().references('address').inTable('wallets'); + table.enum( + 'timeSpan', + [ + 'ONE_DAY', + 'SEVEN_DAYS', + 'THIRTY_DAYS', + 'ONE_YEAR', + 'ALL_TIME', + ], + ); + table.string('pnl').notNullable(); + table.string('currentEquity').notNullable(); + table.integer('rank').notNullable(); + table.primary(['address', 'timeSpan']); + }); +} + +export async function down(knex: Knex): Promise { + return knex.schema.dropTable('leaderboard_pnl'); +} diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20240717171246_create_leaderboard_pnl_rank_timespan_index.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20240717171246_create_leaderboard_pnl_rank_timespan_index.ts new file mode 100644 index 00000000000..3c9b83ab9ac --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20240717171246_create_leaderboard_pnl_rank_timespan_index.ts @@ -0,0 +1,17 @@ +import * as Knex from 'knex'; + +export async function up(knex: Knex): Promise { + await knex.raw(` + CREATE INDEX CONCURRENTLY IF NOT EXISTS "leaderboard_pnl_rank_timespan_index" ON leaderboard_pnl("rank", "timeSpan"); + `); +} + +export async function down(knex: Knex): Promise { + await knex.raw(` + DROP INDEX CONCURRENTLY IF EXISTS "leaderboard_pnl_rank_timespan_index"; + `); +} + +export const config = { + transaction: false, +}; diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20240809153326_create_firebase_notification_tokens_table.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20240809153326_create_firebase_notification_tokens_table.ts new file mode 100644 index 00000000000..bb42cea94db --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20240809153326_create_firebase_notification_tokens_table.ts @@ -0,0 +1,16 @@ +import * as Knex from 'knex'; + +export async function up(knex: Knex): Promise { + return knex.schema.createTable('firebase_notification_tokens', (table) => { + table.increments('id').primary(); + table.string('token').notNullable().unique(); + table.string('address').notNullable(); + table.foreign('address').references('wallets.address').onDelete('CASCADE'); + table.string('language').notNullable(); + table.timestamp('updatedAt').notNullable().defaultTo(knex.fn.now()); + }); +} + +export async function down(knex: Knex): Promise { + return knex.schema.dropTable('firebase_notification_tokens'); +} diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20240827162119_add_wallets_total_volume_and_affiliates_whitelist.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20240827162119_add_wallets_total_volume_and_affiliates_whitelist.ts new file mode 100644 index 00000000000..59c026b2a0e --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20240827162119_add_wallets_total_volume_and_affiliates_whitelist.ts @@ -0,0 +1,19 @@ +import * as Knex from 'knex'; + +export async function up(knex: Knex): Promise { + return knex + .schema + .alterTable('wallets', (table) => { + table.decimal('totalVolume', null).defaultTo(0).notNullable(); + table.boolean('isWhitelistAffiliate').defaultTo(false).notNullable(); + }); +} + +export async function down(knex: Knex): Promise { + return knex + .schema + .alterTable('wallets', (table) => { + table.dropColumn('totalVolume'); + table.dropColumn('isWhitelistAffiliate'); + }); +} diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20240829161445_add_fills_affiliateearnedrevshare.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20240829161445_add_fills_affiliateearnedrevshare.ts new file mode 100644 index 00000000000..694eac939b9 --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20240829161445_add_fills_affiliateearnedrevshare.ts @@ -0,0 +1,13 @@ +import * as Knex from 'knex'; + +export async function up(knex: Knex): Promise { + await knex.schema.table('fills', (table) => { + table.string('affiliateEarnedRevShare'); + }); +} + +export async function down(knex: Knex): Promise { + await knex.schema.table('fills', (table) => { + table.dropColumn('affiliateEarnedRevShare'); + }); +} diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20240829161450_add_fills_subaccountid_index.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20240829161450_add_fills_subaccountid_index.ts new file mode 100644 index 00000000000..cc6304adfcd --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20240829161450_add_fills_subaccountid_index.ts @@ -0,0 +1,17 @@ +import * as Knex from 'knex'; + +export async function up(knex: Knex): Promise { + await knex.raw(` + CREATE INDEX CONCURRENTLY IF NOT EXISTS "fills_subaccountid_index" ON "fills" ("subaccountId"); + `); +} + +export async function down(knex: Knex): Promise { + await knex.raw(` + DROP INDEX CONCURRENTLY IF EXISTS "fills_subaccountid_index"; + `); +} + +export const config = { + transaction: false, +}; diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20240829171730_create_persistent_cache_table.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20240829171730_create_persistent_cache_table.ts new file mode 100644 index 00000000000..0bd053a82a2 --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20240829171730_create_persistent_cache_table.ts @@ -0,0 +1,12 @@ +import * as Knex from 'knex'; + +export async function up(knex: Knex): Promise { + return knex.schema.createTable('persistent_cache', (table) => { + table.string('key').primary().notNullable(); + table.string('value').notNullable(); + }); +} + +export async function down(knex: Knex): Promise { + return knex.schema.dropTable('persistent_cache'); +} diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20240830154741_create_affiliate_referred_users_table.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20240830154741_create_affiliate_referred_users_table.ts new file mode 100644 index 00000000000..a438766d466 --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20240830154741_create_affiliate_referred_users_table.ts @@ -0,0 +1,16 @@ +import * as Knex from 'knex'; + +export async function up(knex: Knex): Promise { + return knex.schema.createTable('affiliate_referred_users', (table) => { + table.string('refereeAddress').primary().notNullable(); + table.string('affiliateAddress').notNullable(); + table.bigInteger('referredAtBlock').notNullable(); + + // Index on affiliateAddress for faster queries + table.index(['affiliateAddress']); + }); +} + +export async function down(knex: Knex): Promise { + return knex.schema.dropTable('affiliate_referred_users'); +} diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20240830165511_create_affiliate_info_table.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20240830165511_create_affiliate_info_table.ts new file mode 100644 index 00000000000..f409fa69616 --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20240830165511_create_affiliate_info_table.ts @@ -0,0 +1,18 @@ +import * as Knex from 'knex'; + +export async function up(knex: Knex): Promise { + return knex.schema.createTable('affiliate_info', (table) => { + table.string('address').primary().notNullable(); + table.decimal('affiliateEarnings').notNullable(); + table.integer('referredMakerTrades').notNullable(); + table.integer('referredTakerTrades').notNullable(); + table.decimal('totalReferredFees').notNullable(); + table.integer('totalReferredUsers').notNullable(); + table.decimal('referredNetProtocolEarnings').notNullable(); + table.bigInteger('firstReferralBlockHeight').notNullable(); + }); +} + +export async function down(knex: Knex): Promise { + return knex.schema.dropTable('affiliate_info'); +} diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20240904161445_fills_rename_affiliateearnedrevshare.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20240904161445_fills_rename_affiliateearnedrevshare.ts new file mode 100644 index 00000000000..b5bc4694afb --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20240904161445_fills_rename_affiliateearnedrevshare.ts @@ -0,0 +1,13 @@ +import * as Knex from 'knex'; + +export async function up(knex: Knex): Promise { + await knex.schema.table('fills', (table) => { + table.renameColumn('affiliateEarnedRevShare', 'affiliateRevShare'); + }); +} + +export async function down(knex: Knex): Promise { + await knex.schema.table('fills', (table) => { + table.renameColumn('affiliateRevShare', 'affiliateEarnedRevShare'); + }); +} diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20240906134410_add_fills_created_at_index.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20240906134410_add_fills_created_at_index.ts new file mode 100644 index 00000000000..3c610724030 --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20240906134410_add_fills_created_at_index.ts @@ -0,0 +1,17 @@ +import * as Knex from 'knex'; + +export async function up(knex: Knex): Promise { + await knex.raw(` + CREATE INDEX CONCURRENTLY IF NOT EXISTS "fills_createdat_index" ON "fills" ("createdAt"); + `); +} + +export async function down(knex: Knex): Promise { + await knex.raw(` + DROP INDEX CONCURRENTLY IF EXISTS "fills_createdat_index"; + `); +} + +export const config = { + transaction: false, +}; diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20240910101410_change_fills_affiliaterevshare_type.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20240910101410_change_fills_affiliaterevshare_type.ts new file mode 100644 index 00000000000..96d7b480835 --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20240910101410_change_fills_affiliaterevshare_type.ts @@ -0,0 +1,22 @@ +import * as Knex from 'knex'; + +// No data has been stored added at time of commit +export async function up(knex: Knex): Promise { + // decimal('columnName') has is 8,2 precision and scale + // decimal('columnName', null) has variable precision and scale + await knex.schema.alterTable('fills', (table) => { + table.dropColumn('affiliateRevShare'); + }); + await knex.schema.alterTable('fills', (table) => { + table.decimal('affiliateRevShare', null).notNullable().defaultTo(0); + }); +} + +export async function down(knex: Knex): Promise { + await knex.schema.alterTable('fills', (table) => { + table.dropColumn('affiliateRevShare'); + }); + await knex.schema.alterTable('fills', (table) => { + table.string('affiliateRevShare').notNullable().defaultTo('0'); + }); +} diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20240911144027_drop_wallets_iswhitelist_column.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20240911144027_drop_wallets_iswhitelist_column.ts new file mode 100644 index 00000000000..112195e713f --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20240911144027_drop_wallets_iswhitelist_column.ts @@ -0,0 +1,17 @@ +import * as Knex from 'knex'; + +export async function up(knex: Knex): Promise { + return knex + .schema + .alterTable('wallets', (table) => { + table.dropColumn('isWhitelistAffiliate'); + }); +} + +export async function down(knex: Knex): Promise { + return knex + .schema + .alterTable('wallets', (table) => { + table.boolean('isWhitelistAffiliate').defaultTo(false).notNullable(); + }); +} diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20240912180829_create_vaults_table.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20240912180829_create_vaults_table.ts new file mode 100644 index 00000000000..a8153021279 --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20240912180829_create_vaults_table.ts @@ -0,0 +1,20 @@ +import * as Knex from 'knex'; + +export async function up(knex: Knex): Promise { + return knex.schema.createTable('vaults', (table) => { + table.string('address').primary().notNullable(); // address of vault + table.bigInteger('clobPairId').notNullable(); // clob pair id for vault + table.enum('status', [ + 'DEACTIVATED', + 'STAND_BY', + 'QUOTING', + 'CLOSE_ONLY', + ]).notNullable(); // quoting status of vault + table.timestamp('createdAt').notNullable(); + table.timestamp('updatedAt').notNullable(); + }); +} + +export async function down(knex: Knex): Promise { + return knex.schema.dropTable('vaults'); +} diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20240913142157_change_affiliate_info_decimal_precision_and_add_total_referred_volume.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20240913142157_change_affiliate_info_decimal_precision_and_add_total_referred_volume.ts new file mode 100644 index 00000000000..53f6d6d3733 --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20240913142157_change_affiliate_info_decimal_precision_and_add_total_referred_volume.ts @@ -0,0 +1,22 @@ +import * as Knex from 'knex'; + +export async function up(knex: Knex): Promise { + return knex.schema.alterTable('affiliate_info', (table) => { + // null indicates variable precision whereas not specifying will result in 8,2 precision,scale + table.decimal('affiliateEarnings', null).alter(); + table.decimal('totalReferredFees', null).alter(); + table.decimal('referredNetProtocolEarnings', null).alter(); + + table.decimal('referredTotalVolume', null).notNullable(); + }); +} + +export async function down(knex: Knex): Promise { + return knex.schema.alterTable('affiliate_info', (table) => { + table.decimal('affiliateEarnings').alter(); + table.decimal('totalReferredFees').alter(); + table.decimal('referredNetProtocolEarnings').alter(); + + table.dropColumn('referredTotalVolume'); + }); +} diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20240919142157_change_affiliate_info_default_value.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20240919142157_change_affiliate_info_default_value.ts new file mode 100644 index 00000000000..9b19769be71 --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20240919142157_change_affiliate_info_default_value.ts @@ -0,0 +1,17 @@ +import * as Knex from 'knex'; + +export async function up(knex: Knex): Promise { + return knex.schema.alterTable('affiliate_info', (table) => { + table.decimal('affiliateEarnings', null).notNullable().defaultTo(0).alter(); + table.decimal('totalReferredFees', null).notNullable().defaultTo(0).alter(); + table.decimal('referredNetProtocolEarnings', null).notNullable().defaultTo(0).alter(); + }); +} + +export async function down(knex: Knex): Promise { + return knex.schema.alterTable('affiliate_info', (table) => { + table.decimal('affiliateEarnings', null).alter(); + table.decimal('totalReferredFees', null).alter(); + table.decimal('referredNetProtocolEarnings', null).alter(); + }); +} diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20240926133526_create_oracle_prices_market_id_effective_at_height_index.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20240926133526_create_oracle_prices_market_id_effective_at_height_index.ts new file mode 100644 index 00000000000..8f80339ebf6 --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20240926133526_create_oracle_prices_market_id_effective_at_height_index.ts @@ -0,0 +1,17 @@ +import * as Knex from 'knex'; + +export async function up(knex: Knex): Promise { + await knex.raw(` + CREATE INDEX CONCURRENTLY IF NOT EXISTS "oracle_prices_marketid_effectiveatheight_index" ON "oracle_prices" ("marketId", "effectiveAtHeight"); + `); +} + +export async function down(knex: Knex): Promise { + await knex.raw(` + DROP INDEX CONCURRENTLY IF EXISTS "oracle_prices_marketid_effectiveatheight_index"; + `); +} + +export const config = { + transaction: false, +}; diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20241002144813_change_affiliate_info_fee_columns.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20241002144813_change_affiliate_info_fee_columns.ts new file mode 100644 index 00000000000..877b2524c0f --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20241002144813_change_affiliate_info_fee_columns.ts @@ -0,0 +1,25 @@ +import * as Knex from 'knex'; + +export async function up(knex: Knex): Promise { + return knex + .schema + .alterTable('affiliate_info', (table) => { + table.dropColumn('totalReferredFees'); + table.dropColumn('referredNetProtocolEarnings'); + table.decimal('totalReferredTakerFees', null).notNullable().defaultTo(0); + table.decimal('totalReferredMakerFees', null).notNullable().defaultTo(0); + table.decimal('totalReferredMakerRebates', null).notNullable().defaultTo(0); + }); +} + +export async function down(knex: Knex): Promise { + return knex + .schema + .alterTable('affiliate_info', (table) => { + table.decimal('totalReferredFees', null).notNullable().defaultTo(0); + table.decimal('referredNetProtocolEarnings', null).notNullable().defaultTo(0); + table.dropColumn('totalReferredTakerFees'); + table.dropColumn('totalReferredMakerFees'); + table.dropColumn('totalReferredMakerRebates'); + }); +} diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20241119162238_create_vault_hourly_view.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20241119162238_create_vault_hourly_view.ts new file mode 100644 index 00000000000..fe0e4336d11 --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20241119162238_create_vault_hourly_view.ts @@ -0,0 +1,47 @@ +import * as Knex from 'knex'; + +const RAW_VAULTS_PNL_HOURLY_QUERY: string = ` +CREATE MATERIALIZED VIEW IF NOT EXISTS vaults_hourly_pnl AS WITH vault_subaccounts AS +( + SELECT subaccounts.id + FROM vaults, + subaccounts + WHERE vaults.address = subaccounts.address + AND subaccounts."subaccountNumber" = 0), pnl_subaccounts AS +( + SELECT * + FROM vault_subaccounts + UNION + SELECT id + FROM subaccounts + WHERE address = 'dydx18tkxrnrkqc2t0lr3zxr5g6a4hdvqksylxqje4r' + AND "subaccountNumber" = 0) +SELECT "id", + "subaccountId", + "equity", + "totalPnl", + "netTransfers", + "createdAt", + "blockHeight", + "blockTime" +FROM ( + SELECT pnl_ticks.*, + ROW_NUMBER() OVER ( partition BY "subaccountId", DATE_TRUNC( 'hour', "blockTime" ) ORDER BY "blockTime" ) AS r + FROM pnl_ticks + WHERE "subaccountId" IN + ( + SELECT * + FROM pnl_subaccounts) + AND "blockTime" >= NOW() - interval '604800 second' ) AS pnl_intervals +WHERE r = 1 +ORDER BY "subaccountId"; +`; + +export async function up(knex: Knex): Promise { + await knex.raw(RAW_VAULTS_PNL_HOURLY_QUERY); + await knex.raw('CREATE UNIQUE INDEX ON vaults_hourly_pnl (id);'); +} + +export async function down(knex: Knex): Promise { + await knex.raw('DROP MATERIALIZED VIEW IF EXISTS vaults_hourly_pnl;'); +} diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20241119163402_create_vault_daily_view.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20241119163402_create_vault_daily_view.ts new file mode 100644 index 00000000000..4469a8bec52 --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20241119163402_create_vault_daily_view.ts @@ -0,0 +1,47 @@ +import * as Knex from 'knex'; + +const RAW_VAULTS_PNL_DAILY_QUERY: string = ` +CREATE MATERIALIZED VIEW IF NOT EXISTS vaults_daily_pnl AS WITH vault_subaccounts AS +( + SELECT subaccounts.id + FROM vaults, + subaccounts + WHERE vaults.address = subaccounts.address + AND subaccounts."subaccountNumber" = 0), pnl_subaccounts AS +( + SELECT * + FROM vault_subaccounts + UNION + SELECT id + FROM subaccounts + WHERE address = 'dydx18tkxrnrkqc2t0lr3zxr5g6a4hdvqksylxqje4r' + AND "subaccountNumber" = 0) +SELECT "id", + "subaccountId", + "equity", + "totalPnl", + "netTransfers", + "createdAt", + "blockHeight", + "blockTime" +FROM ( + SELECT pnl_ticks.*, + ROW_NUMBER() OVER ( partition BY "subaccountId", DATE_TRUNC( 'day', "blockTime" ) ORDER BY "blockTime" ) AS r + FROM pnl_ticks + WHERE "subaccountId" IN + ( + SELECT * + FROM pnl_subaccounts) + AND "blockTime" >= NOW() - interval '7776000 second' ) AS pnl_intervals +WHERE r = 1 +ORDER BY "subaccountId"; +`; + +export async function up(knex: Knex): Promise { + await knex.raw(RAW_VAULTS_PNL_DAILY_QUERY); + await knex.raw('CREATE UNIQUE INDEX ON vaults_daily_pnl (id);'); +} + +export async function down(knex: Knex): Promise { + await knex.raw('DROP MATERIALIZED VIEW IF EXISTS vaults_daily_pnl;'); +} diff --git a/indexer/packages/postgres/src/db/migrations/migration_files/20250107145033_default_1hr_funding_for_perp.ts b/indexer/packages/postgres/src/db/migrations/migration_files/20250107145033_default_1hr_funding_for_perp.ts new file mode 100644 index 00000000000..466bcba80c0 --- /dev/null +++ b/indexer/packages/postgres/src/db/migrations/migration_files/20250107145033_default_1hr_funding_for_perp.ts @@ -0,0 +1,13 @@ +import * as Knex from 'knex'; + +export async function up(knex: Knex): Promise { + await knex.schema.alterTable('perpetual_markets', (table) => { + table.decimal('defaultFundingRate1H', null).defaultTo(0); + }); +} + +export async function down(knex: Knex): Promise { + await knex.schema.alterTable('perpetual_markets', (table) => { + table.dropColumn('defaultFundingRate1H'); + }); +} diff --git a/indexer/packages/postgres/src/helpers/db-helpers.ts b/indexer/packages/postgres/src/helpers/db-helpers.ts index 2ebf2f0bc90..cba32483ba8 100644 --- a/indexer/packages/postgres/src/helpers/db-helpers.ts +++ b/indexer/packages/postgres/src/helpers/db-helpers.ts @@ -8,6 +8,8 @@ import { rawQuery } from './stores-helpers'; const layer2Tables = [ 'perpetual_positions', 'fills', + 'subaccount_usernames', + 'leaderboard_pnl', ]; const layer1Tables = [ @@ -26,6 +28,10 @@ const layer1Tables = [ 'trading_rewards', 'trading_reward_aggregations', 'compliance_status', + 'affiliate_referred_users', + 'persistent_cache', + 'affiliate_info', + 'vaults', ]; /** diff --git a/indexer/packages/postgres/src/index.ts b/indexer/packages/postgres/src/index.ts index 9d10366af3c..43637830ea7 100644 --- a/indexer/packages/postgres/src/index.ts +++ b/indexer/packages/postgres/src/index.ts @@ -16,6 +16,11 @@ export { default as PerpetualPositionModel } from './models/perpetual-position-m export { default as TransferModel } from './models/transfer-model'; export { default as TradingRewardModel } from './models/trading-reward-model'; export { default as TradingRewardAggregationModel } from './models/trading-reward-aggregation-model'; +export { default as SubaccountUsernamesModel } from './models/subaccount-usernames-model'; +export { default as LeaderboardPnlModel } from './models/leaderboard-pnl-model'; +export { default as PersistentCacheModel } from './models/persistent-cache-model'; +export { default as AffiliateReferredUsersModel } from './models/affiliate-referred-users-model'; +export { default as AffiliateInfoModel } from './models/affiliate-info-model'; export * as AssetTable from './stores/asset-table'; export * as AssetPositionTable from './stores/asset-position-table'; @@ -39,15 +44,25 @@ export * as ComplianceTable from './stores/compliance-table'; export * as ComplianceStatusTable from './stores/compliance-status-table'; export * as TradingRewardTable from './stores/trading-reward-table'; export * as TradingRewardAggregationTable from './stores/trading-reward-aggregation-table'; +export * as LeaderboardPnlTable from './stores/leaderboard-pnl-table'; +export * as SubaccountUsernamesTable from './stores/subaccount-usernames-table'; +export * as PersistentCacheTable from './stores/persistent-cache-table'; +export * as AffiliateReferredUsersTable from './stores/affiliate-referred-users-table'; +export * as FirebaseNotificationTokenTable from './stores/firebase-notification-token-table'; +export * as AffiliateInfoTable from './stores/affiliate-info-table'; +export * as VaultTable from './stores/vault-table'; +export * as VaultPnlTicksView from './stores/vault-pnl-ticks-view'; export * as perpetualMarketRefresher from './loops/perpetual-market-refresher'; export * as assetRefresher from './loops/asset-refresher'; +export * as blockHeightRefresher from './loops/block-height-refresher'; export * as liquidityTierRefresher from './loops/liquidity-tier-refresher'; export * as uuid from './helpers/uuid'; export * as protocolTranslations from './lib/protocol-translations'; export * as orderTranslations from './lib/order-translations'; export * as apiTranslations from './lib/api-translations'; +export * as parentSubaccountHelpers from './lib/parent-subaccount-helpers'; export * as dbHelpers from './helpers/db-helpers'; export * as storeHelpers from './helpers/stores-helpers'; @@ -56,3 +71,4 @@ export * as testConstants from '../__tests__/helpers/constants'; export * as testConversionHelpers from '../__tests__/helpers/conversion-helpers'; export * as helpers from './db/helpers'; +export * as loopHelpers from './loops/loopHelper'; diff --git a/indexer/packages/postgres/src/lib/api-translations.ts b/indexer/packages/postgres/src/lib/api-translations.ts index c6e69e248f8..27c776b91e5 100644 --- a/indexer/packages/postgres/src/lib/api-translations.ts +++ b/indexer/packages/postgres/src/lib/api-translations.ts @@ -1,4 +1,4 @@ -import { TIME_IN_FORCE_TO_API_TIME_IN_FORCE, CHILD_SUBACCOUNT_MULTIPLIER, MAX_PARENT_SUBACCOUNTS } from '../constants'; +import { TIME_IN_FORCE_TO_API_TIME_IN_FORCE } from '../constants'; import { APITimeInForce, TimeInForce } from '../types'; /** @@ -20,30 +20,3 @@ export function isOrderTIFPostOnly(timeInForce: TimeInForce): boolean { export function orderTIFToAPITIF(timeInForce: TimeInForce): APITimeInForce { return TIME_IN_FORCE_TO_API_TIME_IN_FORCE[timeInForce]; } - -/** - * Gets a list of all possible child subaccount numbers for a parent subaccount number - * Child subaccounts = [128*0+parentSubaccount, 128*1+parentSubaccount ... 128*999+parentSubaccount] - * @param parentSubaccount - * @returns - */ -export function getChildSubaccountNums(parentSubaccountNum: number): number[] { - if (parentSubaccountNum >= MAX_PARENT_SUBACCOUNTS) { - throw new Error(`Parent subaccount number must be less than ${MAX_PARENT_SUBACCOUNTS}`); - } - return Array.from({ length: CHILD_SUBACCOUNT_MULTIPLIER }, - (_, i) => MAX_PARENT_SUBACCOUNTS * i + parentSubaccountNum); -} - -/** - * Gets the parent subaccount number from a child subaccount number - * Parent subaccount = childSubaccount % 128 - * @param childSubaccountNum - * @returns - */ -export function getParentSubaccountNum(childSubaccountNum: number): number { - if (childSubaccountNum > MAX_PARENT_SUBACCOUNTS * CHILD_SUBACCOUNT_MULTIPLIER) { - throw new Error(`Child subaccount number must be less than ${MAX_PARENT_SUBACCOUNTS * CHILD_SUBACCOUNT_MULTIPLIER}`); - } - return childSubaccountNum % MAX_PARENT_SUBACCOUNTS; -} diff --git a/indexer/packages/postgres/src/lib/helpers.ts b/indexer/packages/postgres/src/lib/helpers.ts index 5e3ec1fdf79..9746c61ca66 100644 --- a/indexer/packages/postgres/src/lib/helpers.ts +++ b/indexer/packages/postgres/src/lib/helpers.ts @@ -2,8 +2,13 @@ import { DateTime } from 'luxon'; import { IsoString } from '../types'; +import vaultAddresses from './vault-addresses.json'; export function blockTimeFromIsoString(isoString: IsoString): number { const dateTime: DateTime = DateTime.fromISO(isoString, { zone: 'utc' }); return Math.floor(dateTime.toMillis() / 1000); } + +export function getVaultAddresses(): string[] { + return vaultAddresses; +} diff --git a/indexer/packages/postgres/src/lib/parent-subaccount-helpers.ts b/indexer/packages/postgres/src/lib/parent-subaccount-helpers.ts new file mode 100644 index 00000000000..b58ebee12c0 --- /dev/null +++ b/indexer/packages/postgres/src/lib/parent-subaccount-helpers.ts @@ -0,0 +1,11 @@ +import { + CHILD_SUBACCOUNT_MULTIPLIER, + MAX_PARENT_SUBACCOUNTS, +} from '../constants'; + +export function getParentSubaccountNum(childSubaccountNum: number): number { + if (childSubaccountNum > MAX_PARENT_SUBACCOUNTS * CHILD_SUBACCOUNT_MULTIPLIER) { + throw new Error(`Child subaccount number must be less than or equal to ${MAX_PARENT_SUBACCOUNTS * CHILD_SUBACCOUNT_MULTIPLIER}`); + } + return childSubaccountNum % MAX_PARENT_SUBACCOUNTS; +} diff --git a/indexer/packages/postgres/src/lib/protocol-translations.ts b/indexer/packages/postgres/src/lib/protocol-translations.ts index c1e894214d4..ba98992faf6 100644 --- a/indexer/packages/postgres/src/lib/protocol-translations.ts +++ b/indexer/packages/postgres/src/lib/protocol-translations.ts @@ -62,10 +62,7 @@ const ORDER_TYPE_TO_CONDITION_TYPE_MAP: Record { + await startUpdateLoop( + updateBlockHeight, + config.BLOCK_HEIGHT_REFRESHER_INTERVAL_MS, + 'updateBlockHeight', + ); +} + +/** + * Updates in-memory latest block height. + */ +export async function updateBlockHeight(options?: Options): Promise { + const startTime: number = Date.now(); + try { + const latestBlock: BlockFromDatabase = await BlockTable.getLatest( + options || { readReplica: true }, + ); + latestBlockHeight = latestBlock.blockHeight; + stats.timing(`${config.SERVICE_NAME}.loops.update_block_height`, Date.now() - startTime); + // eslint-disable-next-line no-empty + } catch (error) { } +} + +/** + * Gets the latest block height. + */ +export function getLatestBlockHeight(): string { + if (!latestBlockHeight) { + const message: string = 'Unable to find latest block height'; + logger.error({ + at: 'block-height-refresher#getLatestBlockHeight', + message, + }); + throw new Error(message); + } + return latestBlockHeight; +} + +export function clear(): void { + if (config.NODE_ENV !== NodeEnv.TEST) { + throw new Error('clear cannot be used in non-test env'); + } + + latestBlockHeight = ''; +} diff --git a/indexer/packages/postgres/src/models/affiliate-info-model.ts b/indexer/packages/postgres/src/models/affiliate-info-model.ts new file mode 100644 index 00000000000..49dd0dfdbdc --- /dev/null +++ b/indexer/packages/postgres/src/models/affiliate-info-model.ts @@ -0,0 +1,86 @@ +import { NonNegativeNumericPattern, NumericPattern } from '../lib/validators'; +import UpsertQueryBuilder from '../query-builders/upsert'; +import BaseModel from './base-model'; + +export default class AffiliateInfoModel extends BaseModel { + static get tableName() { + return 'affiliate_info'; + } + + static get idColumn() { + return 'address'; + } + + static get jsonSchema() { + return { + type: 'object', + required: [ + 'address', + 'affiliateEarnings', + 'referredMakerTrades', + 'referredTakerTrades', + 'totalReferredMakerFees', + 'totalReferredTakerFees', + 'totalReferredMakerRebates', + 'totalReferredUsers', + 'firstReferralBlockHeight', + 'referredTotalVolume', + ], + properties: { + address: { type: 'string' }, + affiliateEarnings: { type: 'string', pattern: NonNegativeNumericPattern }, + referredMakerTrades: { type: 'int' }, + referredTakerTrades: { type: 'int' }, + totalReferredMakerFees: { type: 'string', pattern: NonNegativeNumericPattern }, + totalReferredTakerFees: { type: 'string', pattern: NonNegativeNumericPattern }, + totalReferredMakerRebates: { type: 'string', pattern: NumericPattern }, + totalReferredUsers: { type: 'int' }, + firstReferralBlockHeight: { type: 'string', pattern: NonNegativeNumericPattern }, + referredTotalVolume: { type: 'string', pattern: NonNegativeNumericPattern }, + }, + }; + } + + /** + * A mapping from column name to JSON conversion expected. + * See getSqlConversionForDydxModelTypes for valid conversions. + * + * TODO(IND-239): Ensure that jsonSchema() / sqlToJsonConversions() / model fields match. + */ + static get sqlToJsonConversions() { + return { + address: 'string', + affiliateEarnings: 'string', + referredMakerTrades: 'int', + referredTakerTrades: 'int', + totalReferredMakerFees: 'string', + totalReferredTakerFees: 'string', + totalReferredMakerRebates: 'string', + totalReferredUsers: 'int', + firstReferralBlockHeight: 'string', + referredTotalVolume: 'string', + }; + } + + QueryBuilderType!: UpsertQueryBuilder; + + address!: string; + + affiliateEarnings!: string; + + referredMakerTrades!: number; + + referredTakerTrades!: number; + + totalReferredMakerFees!: string; + + totalReferredTakerFees!: string; + + totalReferredMakerRebates!: string; + + totalReferredUsers!: number; + + firstReferralBlockHeight!: string; + + referredTotalVolume!: string; +} diff --git a/indexer/packages/postgres/src/models/affiliate-referred-users-model.ts b/indexer/packages/postgres/src/models/affiliate-referred-users-model.ts new file mode 100644 index 00000000000..559e499ecea --- /dev/null +++ b/indexer/packages/postgres/src/models/affiliate-referred-users-model.ts @@ -0,0 +1,48 @@ +import { NonNegativeNumericPattern } from '../lib/validators'; +import BaseModel from './base-model'; + +export default class AffiliateReferredUsersModel extends BaseModel { + static get tableName() { + return 'affiliate_referred_users'; + } + + static get idColumn() { + return 'refereeAddress'; + } + + static get jsonSchema() { + return { + type: 'object', + required: [ + 'affiliateAddress', + 'refereeAddress', + 'referredAtBlock', + ], + properties: { + affiliateAddress: { type: 'string' }, + refereeAddress: { type: 'string' }, + referredAtBlock: { type: 'string', pattern: NonNegativeNumericPattern }, + }, + }; + } + + /** + * A mapping from column name to JSON conversion expected. + * See getSqlConversionForDydxModelTypes for valid conversions. + * + * TODO(IND-239): Ensure that jsonSchema() / sqlToJsonConversions() / model fields match. + */ + static get sqlToJsonConversions() { + return { + affiliateAddress: 'string', + refereeAddress: 'string', + referredAtBlock: 'string', + }; + } + + affiliateAddress!: string; + + refereeAddress!: string; + + referredAtBlock!: string; +} diff --git a/indexer/packages/postgres/src/models/candle-model.ts b/indexer/packages/postgres/src/models/candle-model.ts index ec256318be1..a17cd2f4e89 100644 --- a/indexer/packages/postgres/src/models/candle-model.ts +++ b/indexer/packages/postgres/src/models/candle-model.ts @@ -50,6 +50,8 @@ export default class CandleModel extends Model { usdVolume: { type: 'string', pattern: NonNegativeNumericPattern }, trades: { type: 'integer' }, startingOpenInterest: { type: 'string', pattern: NonNegativeNumericPattern }, + orderbookMidPriceOpen: { type: ['string', 'null'], pattern: NonNegativeNumericPattern }, + orderbookMidPriceClose: { type: ['string', 'null'], pattern: NonNegativeNumericPattern }, }, }; } @@ -77,4 +79,8 @@ export default class CandleModel extends Model { trades!: number; startingOpenInterest!: string; + + orderbookMidPriceOpen?: string; + + orderbookMidPriceClose?: string; } diff --git a/indexer/packages/postgres/src/models/compliance-data-model.ts b/indexer/packages/postgres/src/models/compliance-data-model.ts index 1ba8fbd5e1f..eee0fb71d3b 100644 --- a/indexer/packages/postgres/src/models/compliance-data-model.ts +++ b/indexer/packages/postgres/src/models/compliance-data-model.ts @@ -1,10 +1,9 @@ -import { Model } from 'objection'; - import { NumericPattern } from '../lib/validators'; import UpsertQueryBuilder from '../query-builders/upsert'; import { ComplianceProvider, IsoString } from '../types'; +import BaseModel from './base-model'; -export default class ComplianceDataModel extends Model { +export default class ComplianceDataModel extends BaseModel { static get tableName() { return 'compliance_data'; } diff --git a/indexer/packages/postgres/src/models/fill-model.ts b/indexer/packages/postgres/src/models/fill-model.ts index 30927a6d57c..afc6dc3bddb 100644 --- a/indexer/packages/postgres/src/models/fill-model.ts +++ b/indexer/packages/postgres/src/models/fill-model.ts @@ -69,6 +69,7 @@ export default class FillModel extends Model { 'createdAt', 'createdAtHeight', 'fee', + 'affiliateRevShare', ], properties: { id: { type: 'string', format: 'uuid' }, @@ -86,6 +87,7 @@ export default class FillModel extends Model { createdAtHeight: { type: 'string', pattern: IntegerPattern }, clientMetadata: { type: ['string', 'null'], pattern: IntegerPattern }, fee: { type: 'string', pattern: NumericPattern }, + affiliateRevShare: { type: 'string', pattern: NonNegativeNumericPattern }, }, }; } @@ -114,6 +116,7 @@ export default class FillModel extends Model { createdAtHeight: 'string', clientMetadata: 'string', fee: 'string', + affiliateRevShare: 'string', }; } @@ -148,4 +151,6 @@ export default class FillModel extends Model { clientMetadata!: string; fee!: string; + + affiliateRevShare!: string; } diff --git a/indexer/packages/postgres/src/models/firebase-notification-token-model.ts b/indexer/packages/postgres/src/models/firebase-notification-token-model.ts new file mode 100644 index 00000000000..a8807aba8bd --- /dev/null +++ b/indexer/packages/postgres/src/models/firebase-notification-token-model.ts @@ -0,0 +1,47 @@ +import { Model } from 'objection'; + +import { IsoString } from '../types'; +import WalletModel from './wallet-model'; + +class FirebaseNotificationTokenModel extends Model { + static get tableName() { + return 'firebase_notification_tokens'; + } + + static get idColumn() { + return 'id'; + } + + static get jsonSchema() { + return { + }; + } + + static get sqlToJsonConversions() { + return { + }; + } + + static relationMappings = { + wallet: { + relation: Model.BelongsToOneRelation, + modelClass: WalletModel, + join: { + from: 'firebase_notification_tokens.address', + to: 'wallets.address', + }, + }, + }; + + id!: number; + + token!: string; + + address!: string; + + updatedAt!: IsoString; + + language!: string; +} + +export default FirebaseNotificationTokenModel; diff --git a/indexer/packages/postgres/src/models/leaderboard-pnl-model.ts b/indexer/packages/postgres/src/models/leaderboard-pnl-model.ts new file mode 100644 index 00000000000..60e54aaa02a --- /dev/null +++ b/indexer/packages/postgres/src/models/leaderboard-pnl-model.ts @@ -0,0 +1,61 @@ +import path from 'path'; + +import { Model } from 'objection'; + +import { NumericPattern } from '../lib/validators'; +import UpsertQueryBuilder from '../query-builders/upsert'; +import BaseModel from './base-model'; + +export default class LeaderboardPnlModel extends BaseModel { + + static get tableName() { + return 'leaderboard_pnl'; + } + + static get idColumn() { + return ['address', 'timeSpan']; + } + + static relationMappings = { + wallets: { + relation: Model.BelongsToOneRelation, + modelClass: path.join(__dirname, 'wallet-model'), + join: { + from: 'leaderboard_pnl.address', + to: 'wallets.address', + }, + }, + }; + + static get jsonSchema() { + return { + type: 'object', + required: [ + 'address', + 'timeSpan', + 'pnl', + 'currentEquity', + 'rank', + ], + properties: { + address: { type: 'string' }, + timeSpan: { type: 'string' }, + pnl: { type: 'string', pattern: NumericPattern }, + currentEquity: { type: 'string', pattern: NumericPattern }, + rank: { type: 'integer' }, + }, + }; + } + + address!: string; + + timeSpan!: string; + + QueryBuilderType!: UpsertQueryBuilder; + + pnl!: string; + + currentEquity!: string; + + rank!: number; +} diff --git a/indexer/packages/postgres/src/models/liquidity-tiers-model.ts b/indexer/packages/postgres/src/models/liquidity-tiers-model.ts index 5bb40d66f81..fac287056ff 100644 --- a/indexer/packages/postgres/src/models/liquidity-tiers-model.ts +++ b/indexer/packages/postgres/src/models/liquidity-tiers-model.ts @@ -1,4 +1,4 @@ -import { IntegerPattern } from '../lib/validators'; +import { IntegerPattern, NumericPattern } from '../lib/validators'; import UpsertQueryBuilder from '../query-builders/upsert'; import BaseModel from './base-model'; @@ -27,6 +27,10 @@ export default class LiquidityTiersModel extends BaseModel { name: { type: 'string' }, initialMarginPpm: { type: 'string', pattern: IntegerPattern }, maintenanceFractionPpm: { type: 'string', pattern: IntegerPattern }, + // Uppper cap for open interest in human readable format(USDC) + openInterestLowerCap: { type: ['string', 'null'], pattern: NumericPattern }, + // Lower cap for open interest in human readable format(USDC) + openInterestUpperCap: { type: ['string', 'null'], pattern: NumericPattern }, }, }; } @@ -43,6 +47,8 @@ export default class LiquidityTiersModel extends BaseModel { name: 'string', initialMarginPpm: 'string', maintenanceFractionPpm: 'string', + openInterestLowerCap: 'string', + openInterestUpperCap: 'string', }; } @@ -55,4 +61,8 @@ export default class LiquidityTiersModel extends BaseModel { initialMarginPpm!: string; maintenanceFractionPpm!: string; + + openInterestLowerCap?: string; + + openInterestUpperCap?: string; } diff --git a/indexer/packages/postgres/src/models/perpetual-market-model.ts b/indexer/packages/postgres/src/models/perpetual-market-model.ts index 124383d11b6..972dae899d9 100644 --- a/indexer/packages/postgres/src/models/perpetual-market-model.ts +++ b/indexer/packages/postgres/src/models/perpetual-market-model.ts @@ -8,7 +8,7 @@ import { NumericPattern, } from '../lib/validators'; import { - PerpetualMarketStatus, + PerpetualMarketStatus, PerpetualMarketType, } from '../types'; export default class PerpetualMarketModel extends Model { @@ -66,6 +66,7 @@ export default class PerpetualMarketModel extends Model { 'subticksPerTick', 'stepBaseQuantums', 'liquidityTierId', + 'marketType', ], properties: { id: { type: 'string', pattern: IntegerPattern }, @@ -83,6 +84,9 @@ export default class PerpetualMarketModel extends Model { subticksPerTick: { type: 'integer' }, stepBaseQuantums: { type: 'integer' }, liquidityTierId: { type: 'integer' }, + marketType: { type: 'string' }, + baseOpenInterest: { type: 'string', pattern: NumericPattern }, + defaultFundingRate1H: { type: ['string', 'null'], default: null, pattern: NumericPattern }, }, }; } @@ -110,6 +114,9 @@ export default class PerpetualMarketModel extends Model { subticksPerTick: 'integer', stepBaseQuantums: 'integer', liquidityTierId: 'integer', + marketType: 'string', + baseOpenInterest: 'string', + defaultFundingRate1H: 'string', }; } @@ -142,4 +149,10 @@ export default class PerpetualMarketModel extends Model { stepBaseQuantums!: number; liquidityTierId!: number; + + marketType!: PerpetualMarketType; + + baseOpenInterest!: string; + + defaultFundingRate1H?: string; } diff --git a/indexer/packages/postgres/src/models/persistent-cache-model.ts b/indexer/packages/postgres/src/models/persistent-cache-model.ts new file mode 100644 index 00000000000..cd4c3480bc1 --- /dev/null +++ b/indexer/packages/postgres/src/models/persistent-cache-model.ts @@ -0,0 +1,31 @@ +import UpsertQueryBuilder from '../query-builders/upsert'; +import BaseModel from './base-model'; + +export default class PersistentCacheModel extends BaseModel { + static get tableName() { + return 'persistent_cache'; + } + + static get idColumn() { + return 'key'; + } + + static relationMappings = {}; + + static get jsonSchema() { + return { + type: 'object', + required: ['key', 'value'], + properties: { + key: { type: 'string' }, + value: { type: 'string' }, + }, + }; + } + + QueryBuilderType!: UpsertQueryBuilder; + + key!: string; + + value!: string; +} diff --git a/indexer/packages/postgres/src/models/subaccount-usernames-model.ts b/indexer/packages/postgres/src/models/subaccount-usernames-model.ts new file mode 100644 index 00000000000..69cb5fc8a78 --- /dev/null +++ b/indexer/packages/postgres/src/models/subaccount-usernames-model.ts @@ -0,0 +1,42 @@ +import path from 'path'; + +import { Model } from 'objection'; + +export default class SubaccountUsernames extends Model { + + static get tableName() { + return 'subaccount_usernames'; + } + + static get idColumn() { + return 'subaccountId'; + } + + static relationMappings = { + subaccount: { + relation: Model.BelongsToOneRelation, + modelClass: path.join(__dirname, 'subaccount-model'), + join: { + from: 'subaccount_usernames.subaccountId', + to: 'subaccounts.id', + }, + }, + }; + + static get jsonSchema() { + return { + type: 'object', + required: [ + 'username', + 'subaccountId'], + properties: { + username: { type: 'string' }, + subaccountId: { type: 'string' }, + }, + }; + } + + username!: string; + + subaccountId!: string; +} diff --git a/indexer/packages/postgres/src/models/vault-model.ts b/indexer/packages/postgres/src/models/vault-model.ts new file mode 100644 index 00000000000..e2f42258543 --- /dev/null +++ b/indexer/packages/postgres/src/models/vault-model.ts @@ -0,0 +1,60 @@ +import { IntegerPattern } from '../lib/validators'; +import { IsoString, VaultStatus } from '../types'; +import BaseModel from './base-model'; + +export default class VaultModel extends BaseModel { + + static get tableName() { + return 'vaults'; + } + + static get idColumn() { + return ['address']; + } + + static get jsonSchema() { + return { + type: 'object', + required: [ + 'address', + 'clobPairId', + 'status', + 'createdAt', + 'updatedAt', + ], + properties: { + address: { type: 'string' }, + clobPairId: { type: 'string', pattern: IntegerPattern }, + status: { type: 'string' }, + createdAt: { type: 'string', format: 'date-time' }, + updatedAt: { type: 'string', format: 'date-time' }, + }, + }; + } + + /** + * A mapping from column name to JSON conversion expected. + * See getSqlConversionForDydxModelTypes for valid conversions. + * + * TODO(IND-239): Ensure that jsonSchema() / sqlToJsonConversions() / model fields match. + */ + static get sqlToJsonConversions() { + return { + address: 'string', + clobPairId: 'string', + status: 'string', + createdAt: 'date-time', + updatedAt: 'date-time', + }; + } + + address!: string; + + clobPairId!: string; + + status!: VaultStatus; + + createdAt!: IsoString; + + updatedAt!: IsoString; +} diff --git a/indexer/packages/postgres/src/models/wallet-model.ts b/indexer/packages/postgres/src/models/wallet-model.ts index a588a2d275f..35197e21dc6 100644 --- a/indexer/packages/postgres/src/models/wallet-model.ts +++ b/indexer/packages/postgres/src/models/wallet-model.ts @@ -40,10 +40,12 @@ export default class WalletModel extends BaseModel { required: [ 'address', 'totalTradingRewards', + 'totalVolume', ], properties: { address: { type: 'string' }, totalTradingRewards: { type: 'string', pattern: NonNegativeNumericPattern }, + totalVolume: { type: 'string', pattern: NonNegativeNumericPattern }, }, }; } @@ -57,6 +59,8 @@ export default class WalletModel extends BaseModel { static get sqlToJsonConversions() { return { address: 'string', + totalTradingRewards: 'string', + totalVolume: 'string', }; } @@ -65,4 +69,6 @@ export default class WalletModel extends BaseModel { address!: string; totalTradingRewards!: string; + + totalVolume!: string; } diff --git a/indexer/packages/postgres/src/query-builders/upsert.ts b/indexer/packages/postgres/src/query-builders/upsert.ts index 3b7dd2bfce1..e0350dac386 100644 --- a/indexer/packages/postgres/src/query-builders/upsert.ts +++ b/indexer/packages/postgres/src/query-builders/upsert.ts @@ -14,9 +14,8 @@ export default class UpsertQueryBuilder extends QueryB upsert(object: any) { const modelClass = this.modelClass(); - const idColumn: string = modelClass.idColumn as string; - - const tableDefinedId = `${modelClass.tableName}.${idColumn}`; + const idColumn = modelClass.idColumn; + const idColumns: string[] = Array.isArray(idColumn) ? idColumn : [idColumn]; const knex = modelClass.knex(); @@ -26,11 +25,16 @@ export default class UpsertQueryBuilder extends QueryB const colBindings = cols.map(() => '??').join(', '); const valBindings = cols.map(() => '?').join(', '); const setBindings = cols.map(() => '?? = ?').join(', '); + const idConditionBindings = idColumns.map(() => '?? = ?').join(' AND '); const setValues: string[] = []; for (let i = 0; i < cols.length; ++i) { setValues.push(cols[i], values[i]); } + const idValues: string[] = []; + for (let i = 0; i < idColumns.length; ++i) { + idValues.push(`${modelClass.tableName}.${idColumns[i]}`, object[idColumns[i]]); + } // eslint-disable-next-line @typescript-eslint/no-explicit-any return this.onBuildKnex((query: any) => { @@ -40,15 +44,14 @@ export default class UpsertQueryBuilder extends QueryB `(${colBindings}) VALUES (${valBindings})`, 'ON CONFLICT (??) DO', `UPDATE SET ${setBindings}`, - 'WHERE ?? = ?', + `WHERE ${idConditionBindings}`, ].join(' '), [ ...cols, ...values, modelClass.idColumn, ...setValues, - tableDefinedId, - object[idColumn], + ...idValues, ], ), ); diff --git a/indexer/packages/postgres/src/stores/affiliate-info-table.ts b/indexer/packages/postgres/src/stores/affiliate-info-table.ts new file mode 100644 index 00000000000..5ea785bc700 --- /dev/null +++ b/indexer/packages/postgres/src/stores/affiliate-info-table.ts @@ -0,0 +1,316 @@ +import Knex from 'knex'; +import { QueryBuilder } from 'objection'; + +import { DEFAULT_POSTGRES_OPTIONS } from '../constants'; +import { knexPrimary } from '../helpers/knex'; +import { setupBaseQuery, verifyAllRequiredFields } from '../helpers/stores-helpers'; +import Transaction from '../helpers/transaction'; +import AffiliateInfoModel from '../models/affiliate-info-model'; +import { + Options, + Ordering, + QueryableField, + QueryConfig, + AffiliateInfoColumns, + AffiliateInfoCreateObject, + AffiliateInfoFromDatabase, + AffiliateInfoQueryConfig, + Liquidity, + FillType, +} from '../types'; + +export async function findAll( + { + address, + limit, + }: AffiliateInfoQueryConfig, + requiredFields: QueryableField[], + options: Options = DEFAULT_POSTGRES_OPTIONS, +): Promise { + verifyAllRequiredFields( + { + address, + limit, + } as QueryConfig, + requiredFields, + ); + + let baseQuery: QueryBuilder = setupBaseQuery( + AffiliateInfoModel, + options, + ); + + if (address) { + baseQuery = baseQuery.where(AffiliateInfoColumns.address, address); + } + + if (options.orderBy !== undefined) { + for (const [column, order] of options.orderBy) { + baseQuery = baseQuery.orderBy( + column, + order, + ); + } + } else { + baseQuery = baseQuery.orderBy( + AffiliateInfoColumns.address, + Ordering.ASC, + ); + } + + if (limit) { + baseQuery = baseQuery.limit(limit); + } + + return baseQuery.returning('*'); +} + +export async function create( + AffiliateInfoToCreate: AffiliateInfoCreateObject, + options: Options = { txId: undefined }, +): Promise { + return AffiliateInfoModel.query( + Transaction.get(options.txId), + ).insert(AffiliateInfoToCreate).returning('*'); +} + +export async function upsert( + AffiliateInfoToUpsert: AffiliateInfoCreateObject, + options: Options = { txId: undefined }, +): Promise { + const AffiliateInfos: AffiliateInfoModel[] = await AffiliateInfoModel.query( + Transaction.get(options.txId), + ).upsert(AffiliateInfoToUpsert).returning('*'); + // should only ever be one AffiliateInfo + return AffiliateInfos[0]; +} + +export async function findById( + address: string, + options: Options = DEFAULT_POSTGRES_OPTIONS, +): Promise { + const baseQuery: QueryBuilder = setupBaseQuery( + AffiliateInfoModel, + options, + ); + return baseQuery + .findById(address) + .returning('*'); +} + +/** + * Updates affiliate information in the database based on the provided time window. + * + * This function aggregates affiliate-related metadata and fill statistics + * from various tables. Then it upserts the aggregated data into the `affiliate_info` table. + * + * @async + * @function updateInfo + * @param {string} windowStartTs - The exclusive start timestamp for filtering fills. + * @param {string} windowEndTs - The inclusive end timestamp for filtering fill. + * @param {number} [txId] - Optional transaction ID. + * @returns {Promise} + */ +export async function updateInfo( + windowStartTs: string, // exclusive + windowEndTs: string, // inclusive + txId: number | undefined = undefined, +) : Promise { + const transaction: Knex.Transaction | undefined = Transaction.get(txId); + + const query = ` +-- Get metadata for all affiliates +-- STEP 1: Aggregate affiliate_referred_users +WITH affiliate_metadata AS ( + SELECT + "affiliateAddress", + COUNT(*) AS "totalReferredUsers", + MIN("referredAtBlock") AS "firstReferralBlockHeight" + FROM + affiliate_referred_users + GROUP BY + "affiliateAddress" +), + +-- Calculate fill related stats for affiliates +-- Step 2a: Inner join affiliate_referred_users with subaccounts to get subaccounts referred by the affiliate +affiliate_referred_subaccounts AS ( + SELECT + affiliate_referred_users."affiliateAddress", + affiliate_referred_users."referredAtBlock", + subaccounts."id" + FROM + affiliate_referred_users + INNER JOIN + subaccounts + ON + affiliate_referred_users."refereeAddress" = subaccounts."address" +), + +-- Step 2b: Filter fills by time window +filtered_fills AS ( + SELECT + fills."subaccountId", + fills."liquidity", + fills."createdAt", + CAST(fills."fee" AS decimal) AS "fee", + fills."affiliateRevShare", + fills."createdAtHeight", + fills."price", + fills."size", + fills."type" + FROM + fills + WHERE + fills."createdAt" > '${windowStartTs}' + AND fills."createdAt" <= '${windowEndTs}' +), + +-- Step 2c: Inner join filtered_fills with affiliate_referred_subaccounts and filter +affiliate_fills AS ( + SELECT + filtered_fills."subaccountId", + filtered_fills."liquidity", + filtered_fills."createdAt", + filtered_fills."fee", + filtered_fills."affiliateRevShare", + filtered_fills."price", + filtered_fills."size", + filtered_fills."type", + affiliate_referred_subaccounts."affiliateAddress", + affiliate_referred_subaccounts."referredAtBlock" + FROM + filtered_fills + INNER JOIN + affiliate_referred_subaccounts + ON + filtered_fills."subaccountId" = affiliate_referred_subaccounts."id" + WHERE + filtered_fills."createdAtHeight" >= affiliate_referred_subaccounts."referredAtBlock" +), + +-- Step 2d: Groupby to get affiliate level stats +affiliate_stats AS ( + SELECT + affiliate_fills."affiliateAddress", + SUM(affiliate_fills."fee") AS "totalReferredFees", + SUM(affiliate_fills."affiliateRevShare") AS "affiliateEarnings", + SUM(CASE WHEN affiliate_fills."liquidity" = '${Liquidity.MAKER}' AND affiliate_fills."fee" > 0 THEN affiliate_fills."fee" ELSE 0 END) AS "totalReferredMakerFees", + SUM(CASE WHEN affiliate_fills."liquidity" = '${Liquidity.TAKER}' AND affiliate_fills."type" = '${FillType.LIMIT}' THEN affiliate_fills."fee" ELSE 0 END) AS "totalReferredTakerFees", + SUM(CASE WHEN affiliate_fills."liquidity" = '${Liquidity.MAKER}' AND affiliate_fills."fee" < 0 THEN affiliate_fills."fee" ELSE 0 END) AS "totalReferredMakerRebates", + COUNT(CASE WHEN affiliate_fills."liquidity" = '${Liquidity.MAKER}' THEN 1 END) AS "referredMakerTrades", + COUNT(CASE WHEN affiliate_fills."liquidity" = '${Liquidity.TAKER}' THEN 1 END) AS "referredTakerTrades", + SUM(affiliate_fills."price" * affiliate_fills."size") AS "referredTotalVolume" + FROM + affiliate_fills + GROUP BY + affiliate_fills."affiliateAddress" +), + +-- Prepare to update affiliate_info +-- STEP 3a: Left join affiliate_stats onto affiliate_metadata. affiliate_stats only has values for +-- addresses with fills in the time window +affiliate_info_update AS ( + SELECT + affiliate_metadata."affiliateAddress", + affiliate_metadata."totalReferredUsers", + affiliate_metadata."firstReferralBlockHeight", + COALESCE(affiliate_stats."totalReferredMakerFees", 0) AS "totalReferredMakerFees", + COALESCE(affiliate_stats."totalReferredTakerFees", 0) AS "totalReferredTakerFees", + COALESCE(affiliate_stats."totalReferredMakerRebates", 0) AS "totalReferredMakerRebates", + COALESCE(affiliate_stats."affiliateEarnings", 0) AS "affiliateEarnings", + COALESCE(affiliate_stats."referredMakerTrades", 0) AS "referredMakerTrades", + COALESCE(affiliate_stats."referredTakerTrades", 0) AS "referredTakerTrades", + COALESCE(affiliate_stats."referredTotalVolume", 0) AS "referredTotalVolume" + FROM + affiliate_metadata + LEFT JOIN + affiliate_stats + ON affiliate_metadata."affiliateAddress" = affiliate_stats."affiliateAddress" +) + +-- Step 3b: Update/upsert the affiliate info table with the new stats +INSERT INTO affiliate_info ( + "address", + "totalReferredUsers", + "firstReferralBlockHeight", + "affiliateEarnings", + "referredMakerTrades", + "referredTakerTrades", + "totalReferredMakerFees", + "totalReferredTakerFees", + "totalReferredMakerRebates", + "referredTotalVolume" +) +SELECT + "affiliateAddress", + "totalReferredUsers", + "firstReferralBlockHeight", + "affiliateEarnings", + "referredMakerTrades", + "referredTakerTrades", + "totalReferredMakerFees", + "totalReferredTakerFees", + "totalReferredMakerRebates", + "referredTotalVolume" +FROM + affiliate_info_update +ON CONFLICT ("address") +DO UPDATE SET + "totalReferredUsers" = EXCLUDED."totalReferredUsers", + "firstReferralBlockHeight" = EXCLUDED."firstReferralBlockHeight", + "affiliateEarnings" = affiliate_info."affiliateEarnings" + EXCLUDED."affiliateEarnings", + "referredMakerTrades" = affiliate_info."referredMakerTrades" + EXCLUDED."referredMakerTrades", + "referredTakerTrades" = affiliate_info."referredTakerTrades" + EXCLUDED."referredTakerTrades", + "totalReferredMakerFees" = affiliate_info."totalReferredMakerFees" + EXCLUDED."totalReferredMakerFees", + "totalReferredTakerFees" = affiliate_info."totalReferredTakerFees" + EXCLUDED."totalReferredTakerFees", + "totalReferredMakerRebates" = affiliate_info."totalReferredMakerRebates" + EXCLUDED."totalReferredMakerRebates", + "referredTotalVolume" = affiliate_info."referredTotalVolume" + EXCLUDED."referredTotalVolume"; + `; + + return transaction + ? knexPrimary.raw(query).transacting(transaction) + : knexPrimary.raw(query); +} + +/** + * Finds affiliate information from the database with optional address filtering, sorting, + * and offset based pagination. + * + * @async + * @function paginatedFindWithAddressFilter + * @param {string[]} addressFilter - An array of affiliate addresses to filter by. + * @param {number} offset - The offset for pagination. + * @param {number} limit - The maximum number of records to return. + * @param {boolean} sortByAffiliateEarning - Sort the results by affiliate earnings in desc order. + * @param {Options} [options=DEFAULT_POSTGRES_OPTIONS] - Optional config for database interaction. + * @returns {Promise} + */ +export async function paginatedFindWithAddressFilter( + addressFilter: string[], + offset: number, + limit: number, + sortByAffiliateEarning: boolean, + options: Options = DEFAULT_POSTGRES_OPTIONS, +): Promise { + let baseQuery: QueryBuilder = setupBaseQuery( + AffiliateInfoModel, + options, + ); + + // Apply address filter if provided + if (addressFilter.length > 0) { + baseQuery = baseQuery.whereIn(AffiliateInfoColumns.address, addressFilter); + } + + // Sorting by affiliate earnings or default sorting by address + if (sortByAffiliateEarning || offset !== 0) { + baseQuery = baseQuery.orderBy(AffiliateInfoColumns.affiliateEarnings, Ordering.DESC) + .orderBy(AffiliateInfoColumns.address, Ordering.ASC); + } + + // Apply pagination using offset and limit + baseQuery = baseQuery.offset(offset).limit(limit); + + return baseQuery.returning('*'); +} diff --git a/indexer/packages/postgres/src/stores/affiliate-referred-users-table.ts b/indexer/packages/postgres/src/stores/affiliate-referred-users-table.ts new file mode 100644 index 00000000000..fb50cc47e4e --- /dev/null +++ b/indexer/packages/postgres/src/stores/affiliate-referred-users-table.ts @@ -0,0 +1,110 @@ +import { QueryBuilder } from 'objection'; + +import { DEFAULT_POSTGRES_OPTIONS } from '../constants'; +import { setupBaseQuery, verifyAllRequiredFields } from '../helpers/stores-helpers'; +import Transaction from '../helpers/transaction'; +import AffiliateReferredUsersModel from '../models/affiliate-referred-users-model'; +import { + Options, + Ordering, + QueryableField, + QueryConfig, + AffiliateReferredUsersColumns, + AffiliateReferredUsersCreateObject, + AffiliateReferredUserFromDatabase, + AffiliateReferredUsersQueryConfig, +} from '../types'; + +export async function findAll( + { + affiliateAddress, + refereeAddress, + limit, + }: AffiliateReferredUsersQueryConfig, + requiredFields: QueryableField[], + options: Options = DEFAULT_POSTGRES_OPTIONS, +): Promise { + verifyAllRequiredFields( + { + affiliateAddress, + refereeAddress, + limit, + } as QueryConfig, + requiredFields, + ); + + // splitting the line after = does not work because it is reformatted to one line by eslint + // eslint-disable-next-line max-len + let baseQuery: QueryBuilder = setupBaseQuery( + AffiliateReferredUsersModel, + options, + ); + + if (affiliateAddress) { + baseQuery = baseQuery.where(AffiliateReferredUsersColumns.affiliateAddress, affiliateAddress); + } + + if (refereeAddress) { + baseQuery = baseQuery.where(AffiliateReferredUsersColumns.refereeAddress, refereeAddress); + } + + if (options.orderBy !== undefined) { + for (const [column, order] of options.orderBy) { + baseQuery = baseQuery.orderBy( + column, + order, + ); + } + } else { + baseQuery = baseQuery.orderBy( + AffiliateReferredUsersColumns.referredAtBlock, + Ordering.ASC, + ); + } + + if (limit) { + baseQuery = baseQuery.limit(limit); + } + + return baseQuery.returning('*'); +} + +export async function create( + entryToCreate: AffiliateReferredUsersCreateObject, + options: Options = { txId: undefined }, +): Promise { + return AffiliateReferredUsersModel.query( + Transaction.get(options.txId), + ).insert(entryToCreate).returning('*'); +} + +export async function findByAffiliateAddress( + address: string, + options: Options = DEFAULT_POSTGRES_OPTIONS, +): Promise { + // splitting the line after = does not work because it is reformatted to one line by eslint + // eslint-disable-next-line max-len + const baseQuery: QueryBuilder = setupBaseQuery( + AffiliateReferredUsersModel, + options, + ); + return baseQuery + .where('affiliateAddress', address) + .returning('*'); +} + +export async function findByRefereeAddress( + address: string, + options: Options = DEFAULT_POSTGRES_OPTIONS, +): Promise { + // splitting the line after = does not work because it is reformatted to one line by eslint + // eslint-disable-next-line max-len + const baseQuery: QueryBuilder = setupBaseQuery( + AffiliateReferredUsersModel, + options, + ); + return baseQuery + .where('refereeAddress', address) + .returning('*') + .first(); // should only be one since refereeAddress is primary key +} diff --git a/indexer/packages/postgres/src/stores/candle-table.ts b/indexer/packages/postgres/src/stores/candle-table.ts index 5eae4ee242e..af6192e3877 100644 --- a/indexer/packages/postgres/src/stores/candle-table.ts +++ b/indexer/packages/postgres/src/stores/candle-table.ts @@ -1,7 +1,7 @@ -import _ from 'lodash'; import { PartialModelObject, QueryBuilder } from 'objection'; import { BUFFER_ENCODING_UTF_8, DEFAULT_POSTGRES_OPTIONS } from '../constants'; +import { knexReadReplica } from '../helpers/knex'; import { setupBaseQuery, verifyAllRequiredFields } from '../helpers/stores-helpers'; import Transaction from '../helpers/transaction'; import { getUuid } from '../helpers/uuid'; @@ -174,36 +174,66 @@ export async function findLatest( export async function findCandlesMap( tickers: string[], - resolutions: CandleResolution[], - options: Options = DEFAULT_POSTGRES_OPTIONS, ): Promise { + if (tickers.length === 0) { + return {}; + } + const candlesMap: CandlesMap = {}; + for (const ticker of tickers) { + candlesMap[ticker] = {}; + } - await Promise.all( - _.map( - tickers, - async (ticker: string) => { - candlesMap[ticker] = {}; - const findLatestCandles: Promise[] = resolutions.map( - (resolution: CandleResolution) => findLatest( - ticker, - resolution, - options, - ), - ); - - // Map each resolution to its respective candle - const allLatestCandles: (CandleFromDatabase | undefined)[] = await Promise.all( - findLatestCandles, - ); - _.forEach(allLatestCandles, (candle: CandleFromDatabase | undefined) => { - if (candle !== undefined) { - candlesMap[ticker][candle.resolution] = candle; - } - }); - }, - ), - ); + const minuteCandlesResult: { + rows: CandleFromDatabase[], + } = await knexReadReplica.getConnection().raw( + ` + SELECT DISTINCT ON ( + ticker, + resolution + ) candles.* FROM + candles + WHERE + "ticker" IN (${tickers.map((ticker) => { return `'${ticker}'`; }).join(',')}) AND + "startedAt" > NOW() - INTERVAL '3 hours' AND + resolution IN ('1MIN', '5MINS', '15MINS', '30MINS', '1HOUR') + ORDER BY + ticker, + resolution, + "startedAt" DESC; + `, + ) as unknown as { + rows: CandleFromDatabase[], + }; + const hourDayCandlesResult: { + rows: CandleFromDatabase[], + } = await knexReadReplica.getConnection().raw( + ` + SELECT DISTINCT ON ( + ticker, + resolution + ) candles.* FROM + candles + WHERE + "ticker" IN (${tickers.map((ticker) => { return `'${ticker}'`; }).join(',')}) AND + "startedAt" > NOW() - INTERVAL '2 days' AND + resolution IN ('4HOURS', '1DAY') + ORDER BY + ticker, + resolution, + "startedAt" DESC; + `, + ) as unknown as { + rows: CandleFromDatabase[], + }; + const latestCandles: CandleFromDatabase[] = minuteCandlesResult.rows + .concat(hourDayCandlesResult.rows); + for (const candle of latestCandles) { + if (candlesMap[candle.ticker] === undefined) { + candlesMap[candle.ticker] = {}; + } + candlesMap[candle.ticker][candle.resolution] = candle; + } return candlesMap; } diff --git a/indexer/packages/postgres/src/stores/compliance-status-table.ts b/indexer/packages/postgres/src/stores/compliance-status-table.ts index 2a8a4a1989a..4b01750962d 100644 --- a/indexer/packages/postgres/src/stores/compliance-status-table.ts +++ b/indexer/packages/postgres/src/stores/compliance-status-table.ts @@ -59,7 +59,7 @@ export async function findAll( } if (status !== undefined) { - baseQuery = baseQuery.where(ComplianceStatusColumns.status, status); + baseQuery = baseQuery.whereIn(ComplianceStatusColumns.status, status); } if (reason !== undefined) { diff --git a/indexer/packages/postgres/src/stores/compliance-table.ts b/indexer/packages/postgres/src/stores/compliance-table.ts index 6c1f8103e83..d3d53c9bb33 100644 --- a/indexer/packages/postgres/src/stores/compliance-table.ts +++ b/indexer/packages/postgres/src/stores/compliance-table.ts @@ -13,6 +13,7 @@ import { } from '../helpers/stores-helpers'; import Transaction from '../helpers/transaction'; import ComplianceDataModel from '../models/compliance-data-model'; +import WalletModel from '../models/wallet-model'; import { ComplianceDataFromDatabase, ComplianceDataQueryConfig, @@ -34,6 +35,7 @@ export async function findAll( provider, blocked, limit, + addressInWalletsTable, }: ComplianceDataQueryConfig, requiredFields: QueryableField[], options: Options = DEFAULT_POSTGRES_OPTIONS, @@ -45,6 +47,7 @@ export async function findAll( provider, blocked, limit, + addressInWalletsTable, } as QueryConfig, requiredFields, ); @@ -70,6 +73,14 @@ export async function findAll( baseQuery = baseQuery.where(ComplianceDataColumns.blocked, blocked); } + if (addressInWalletsTable === true) { + baseQuery = baseQuery.innerJoin( + WalletModel.tableName, + `${ComplianceDataModel.tableName}.${ComplianceDataColumns.address}`, + '=', + `${WalletModel.tableName}.${WalletModel.idColumn}`); + } + if (options.orderBy !== undefined) { for (const [column, order] of options.orderBy) { baseQuery = baseQuery.orderBy( @@ -123,25 +134,11 @@ export async function upsert( complianceDataToUpsert: ComplianceDataCreateObject, options: Options = { txId: undefined }, ): Promise { - const complianceData: ComplianceDataFromDatabase | undefined = await findByAddressAndProvider( - complianceDataToUpsert.address, - complianceDataToUpsert.provider, - ); - if (complianceData === undefined) { - return create({ - ...complianceDataToUpsert, - }, options); - } - - const updatedComplianceData: ComplianceDataFromDatabase | undefined = await update({ - ...complianceDataToUpsert, - }, options); - - if (updatedComplianceData === undefined) { - throw Error('order must exist after update'); - } + const updatedComplianceData: ComplianceDataModel[] = await ComplianceDataModel.query( + Transaction.get(options.txId), + ).upsert(complianceDataToUpsert).returning('*'); - return updatedComplianceData; + return updatedComplianceData[0]; } export async function findByAddressAndProvider( diff --git a/indexer/packages/postgres/src/stores/fill-table.ts b/indexer/packages/postgres/src/stores/fill-table.ts index 186f645b6ae..0ece3920457 100644 --- a/indexer/packages/postgres/src/stores/fill-table.ts +++ b/indexer/packages/postgres/src/stores/fill-table.ts @@ -25,6 +25,7 @@ import { CostOfFills, QueryableField, QueryConfig, + PaginationFromDatabase, } from '../types'; export function uuid(eventId: Buffer, liquidity: Liquidity): string { @@ -49,10 +50,11 @@ export async function findAll( createdOnOrAfter, clientMetadata, fee, + page, }: FillQueryConfig, requiredFields: QueryableField[], options: Options = DEFAULT_POSTGRES_OPTIONS, -): Promise { +): Promise> { verifyAllRequiredFields( { limit, @@ -156,11 +158,41 @@ export async function findAll( Ordering.DESC, ); - if (limit !== undefined) { + if (limit !== undefined && page === undefined) { baseQuery = baseQuery.limit(limit); } - return baseQuery.returning('*'); + /** + * If a query is made using a page number, then the limit property is used as 'page limit' + * TODO: Improve pagination by adding a required eventId for orderBy clause + */ + if (page !== undefined && limit !== undefined) { + /** + * We make sure that the page number is always >= 1 + */ + const currentPage: number = Math.max(1, page); + const offset: number = (currentPage - 1) * limit; + + /** + * Ensure sorting is applied to maintain consistent pagination results. + * Also a casting of the ts type is required since the infer of the type + * obtained from the count is not performed. + */ + const count: { count?: string } = await baseQuery.clone().clearOrder().count({ count: '*' }).first() as unknown as { count?: string }; + + baseQuery = baseQuery.offset(offset).limit(limit); + + return { + results: await baseQuery.returning('*'), + limit, + offset, + total: parseInt(count.count ?? '0', 10), + }; + } + + return { + results: await baseQuery.returning('*'), + }; } export async function create( diff --git a/indexer/packages/postgres/src/stores/firebase-notification-token-table.ts b/indexer/packages/postgres/src/stores/firebase-notification-token-table.ts new file mode 100644 index 00000000000..b767230b0b0 --- /dev/null +++ b/indexer/packages/postgres/src/stores/firebase-notification-token-table.ts @@ -0,0 +1,152 @@ +import { DateTime } from 'luxon'; +import { PartialModelObject, QueryBuilder } from 'objection'; + +import { DEFAULT_POSTGRES_OPTIONS } from '../constants'; +import { setupBaseQuery, verifyAllRequiredFields } from '../helpers/stores-helpers'; +import Transaction from '../helpers/transaction'; +import TokenModel from '../models/firebase-notification-token-model'; +import { + Options, + Ordering, + QueryableField, + QueryConfig, + FirebaseNotificationTokenColumns, + FirebaseNotificationTokenCreateObject, + FirebaseNotificationTokenFromDatabase, + FirebaseNotificationTokenQueryConfig, + FirebaseNotificationTokenUpdateObject, +} from '../types'; + +export async function findAll( + { + address, + limit, + updatedBeforeOrAt, + }: FirebaseNotificationTokenQueryConfig, + requiredFields: QueryableField[], + options: Options = DEFAULT_POSTGRES_OPTIONS, +): Promise { + verifyAllRequiredFields( + { + address, + limit, + } as QueryConfig, + requiredFields, + ); + + let baseQuery: QueryBuilder = setupBaseQuery( + TokenModel, + options, + ); + + if (address) { + baseQuery = baseQuery.where(FirebaseNotificationTokenColumns.address, address); + } + + if (updatedBeforeOrAt) { + baseQuery = baseQuery.where(FirebaseNotificationTokenColumns.updatedAt, '<=', updatedBeforeOrAt); + } + + if (options.orderBy !== undefined) { + for (const [column, order] of options.orderBy) { + baseQuery = baseQuery.orderBy( + column, + order, + ); + } + } else { + baseQuery = baseQuery.orderBy( + FirebaseNotificationTokenColumns.updatedAt, + Ordering.ASC, + ); + } + + if (limit) { + baseQuery = baseQuery.limit(limit); + } + + return baseQuery.returning('*'); +} + +export async function create( + tokenToCreate: FirebaseNotificationTokenCreateObject, + options: Options = { txId: undefined }, +): Promise { + return TokenModel.query( + Transaction.get(options.txId), + ).insert(tokenToCreate).returning('*'); +} + +export async function update( + { + token, + ...fields + }: FirebaseNotificationTokenUpdateObject, + options: Options = { txId: undefined }, +): Promise { + const existingToken = await TokenModel.query( + Transaction.get(options.txId), + ).findOne({ token }); + const updatedToken = await existingToken.$query().patch(fields as PartialModelObject).returning('*'); + return updatedToken as unknown as FirebaseNotificationTokenFromDatabase; +} + +export async function upsert( + tokenToUpsert: FirebaseNotificationTokenCreateObject, + options: Options = { txId: undefined }, +): Promise { + const existingToken = await TokenModel.query( + Transaction.get(options.txId), + ).findOne({ token: tokenToUpsert.token }); + + if (existingToken) { + return update(tokenToUpsert, options); + } else { + return create(tokenToUpsert, options); + } +} + +export async function deleteMany( + tokens: string[], + options: Options = { txId: undefined }, +): Promise { + const baseQuery: QueryBuilder = setupBaseQuery( + TokenModel, + options, + ); + + const result = await baseQuery + .delete() + .whereIn('token', tokens); + return result; +} + +export async function findByToken( + token: string, + options: Options = DEFAULT_POSTGRES_OPTIONS, +): Promise { + const baseQuery: QueryBuilder = setupBaseQuery( + TokenModel, + options, + ); + return baseQuery + .findOne({ token }) + .returning('*'); +} + +export async function registerToken( + token: string, + address: string, + language: string, + options: Options = { txId: undefined }, +): Promise { + return upsert( + { + token, + address, + updatedAt: DateTime.now().toISO(), + language, + }, + options, + ); +} diff --git a/indexer/packages/postgres/src/stores/funding-index-updates-table.ts b/indexer/packages/postgres/src/stores/funding-index-updates-table.ts index dce9a470287..785431ff93b 100644 --- a/indexer/packages/postgres/src/stores/funding-index-updates-table.ts +++ b/indexer/packages/postgres/src/stores/funding-index-updates-table.ts @@ -3,6 +3,7 @@ import _ from 'lodash'; import { QueryBuilder } from 'objection'; import { BUFFER_ENCODING_UTF_8, DEFAULT_POSTGRES_OPTIONS } from '../constants'; +import { knexReadReplica } from '../helpers/knex'; import { setupBaseQuery, verifyAllRequiredFields } from '../helpers/stores-helpers'; import Transaction from '../helpers/transaction'; import { getUuid } from '../helpers/uuid'; @@ -21,6 +22,14 @@ import { } from '../types'; import * as PerpetualMarketTable from './perpetual-market-table'; +// Assuming block time of 1 second, this should be 4 hours of blocks +const FOUR_HOUR_OF_BLOCKS = Big(3600).times(4); +// Type used for querying for funding index maps for multiple effective heights. +interface FundingIndexUpdatesFromDatabaseWithSearchHeight extends FundingIndexUpdatesFromDatabase { + // max effective height being queried for + searchHeight: string, +} + export function uuid( blockHeight: string, eventId: Buffer, @@ -193,8 +202,6 @@ export async function findFundingIndexMap( options, ); - // Assuming block time of 1 second, this should be 4 hours of blocks - const FOUR_HOUR_OF_BLOCKS = Big(3600).times(4); const fundingIndexUpdates: FundingIndexUpdatesFromDatabase[] = await baseQuery .distinctOn(FundingIndexUpdatesColumns.perpetualId) .where(FundingIndexUpdatesColumns.effectiveAtHeight, '<=', effectiveBeforeOrAtHeight) @@ -216,3 +223,69 @@ export async function findFundingIndexMap( initialFundingIndexMap, ); } + +/** + * Finds funding index maps for multiple effective before or at heights. Uses a SQL query unnesting + * an array of effective before or at heights and cross-joining with the funding index updates table + * to find the closest funding index update per effective before or at height. + * @param effectiveBeforeOrAtHeights Heights to get funding index maps for. + * @param options + * @returns Object mapping block heights to the respective funding index maps. + */ +export async function findFundingIndexMaps( + effectiveBeforeOrAtHeights: string[], + options: Options = DEFAULT_POSTGRES_OPTIONS, +): Promise<{[blockHeight: string]: FundingIndexMap}> { + const heightNumbers: number[] = effectiveBeforeOrAtHeights + .map((height: string):number => parseInt(height, 10)) + .filter((parsedHeight: number): boolean => { return !Number.isNaN(parsedHeight); }) + .sort(); + // Get the min height to limit the search to blocks 4 hours or before the min height. + const minHeight: number = heightNumbers[0]; + const maxheight: number = heightNumbers[heightNumbers.length - 1]; + + const result: { + rows: FundingIndexUpdatesFromDatabaseWithSearchHeight[], + } = await knexReadReplica.getConnection().raw( + ` + SELECT + DISTINCT ON ("perpetualId", "searchHeight") "perpetualId", "searchHeight", + "funding_index_updates".* + FROM + "funding_index_updates", + unnest(ARRAY[${heightNumbers.join(',')}]) AS "searchHeight" + WHERE + "effectiveAtHeight" > ${Big(minHeight).minus(FOUR_HOUR_OF_BLOCKS).toFixed()} AND + "effectiveAtHeight" <= ${Big(maxheight)} AND + "effectiveAtHeight" <= "searchHeight" + ORDER BY + "perpetualId", + "searchHeight", + "effectiveAtHeight" DESC + `, + ) as unknown as { + rows: FundingIndexUpdatesFromDatabaseWithSearchHeight[], + }; + + const perpetualMarkets: PerpetualMarketFromDatabase[] = await PerpetualMarketTable.findAll( + {}, + [], + options, + ); + + const fundingIndexMaps:{[blockHeight: string]: FundingIndexMap} = {}; + for (const height of effectiveBeforeOrAtHeights) { + fundingIndexMaps[height] = _.reduce(perpetualMarkets, + (acc: FundingIndexMap, perpetualMarket: PerpetualMarketFromDatabase): FundingIndexMap => { + acc[perpetualMarket.id] = Big(0); + return acc; + }, + {}, + ); + } + for (const funding of result.rows) { + fundingIndexMaps[funding.searchHeight][funding.perpetualId] = Big(funding.fundingIndex); + } + + return fundingIndexMaps; +} diff --git a/indexer/packages/postgres/src/stores/leaderboard-pnl-table.ts b/indexer/packages/postgres/src/stores/leaderboard-pnl-table.ts new file mode 100644 index 00000000000..15de08f02cd --- /dev/null +++ b/indexer/packages/postgres/src/stores/leaderboard-pnl-table.ts @@ -0,0 +1,145 @@ +import Knex from 'knex'; +import _ from 'lodash'; +import { QueryBuilder } from 'objection'; + +import { DEFAULT_POSTGRES_OPTIONS } from '../constants'; +import { knexPrimary } from '../helpers/knex'; +import { + verifyAllRequiredFields, + setupBaseQuery, + verifyAllInjectableVariables, + setBulkRowsForUpdate, + generateBulkUpsertString, +} from '../helpers/stores-helpers'; +import Transaction from '../helpers/transaction'; +import LeaderboardPnlModel from '../models/leaderboard-pnl-model'; +import { + QueryConfig, + LeaderboardPnlCreateObject, + LeaderboardPnlFromDatabase, + LeaderboardPnlColumns, + LeaderboardPnlQueryConfig, + Options, + Ordering, + QueryableField, +} from '../types'; + +export async function findAll( + { + address, + timeSpan, + rank, + limit, + }: LeaderboardPnlQueryConfig, + requiredFields: QueryableField[], + options: Options = DEFAULT_POSTGRES_OPTIONS, +): Promise { + verifyAllRequiredFields( + { + address, + timeSpan, + rank, + limit, + } as QueryConfig, + requiredFields, + ); + + let baseQuery: QueryBuilder = setupBaseQuery( + LeaderboardPnlModel, + options, + ); + + if (address) { + baseQuery = baseQuery.whereIn(LeaderboardPnlColumns.address, address); + } + + if (timeSpan) { + baseQuery = baseQuery.whereIn(LeaderboardPnlColumns.timeSpan, timeSpan); + } + + if (rank) { + baseQuery = baseQuery.whereIn(LeaderboardPnlColumns.rank, rank); + } + + if (options.orderBy !== undefined) { + for (const [column, order] of options.orderBy) { + baseQuery = baseQuery.orderBy( + column, + order, + ); + } + } else { + baseQuery = baseQuery.orderBy( + LeaderboardPnlColumns.rank, + Ordering.ASC, + ); + } + + if (limit) { + baseQuery = baseQuery.limit(limit); + } + + return baseQuery.returning('*'); +} + +export async function create( + leaderboardPnlToCreate: LeaderboardPnlCreateObject, + options: Options = { txId: undefined }, +): Promise { + return LeaderboardPnlModel.query( + Transaction.get(options.txId), + ).insert({ + ...leaderboardPnlToCreate, + }).returning('*'); +} + +export async function upsert( + LeaderboardPnlToUpsert: LeaderboardPnlCreateObject, + options: Options = { txId: undefined }, +): Promise { + const leaderboardPnls: LeaderboardPnlModel[] = await LeaderboardPnlModel.query( + Transaction.get(options.txId), + ).upsert({ + ...LeaderboardPnlToUpsert, + }).returning('*'); + return leaderboardPnls[0]; +} + +export async function bulkUpsert( + leaderboardPnlObjects: LeaderboardPnlCreateObject[], + options: Options = { txId: undefined }, +): Promise { + leaderboardPnlObjects.forEach( + (leaderboardPnlObject: LeaderboardPnlCreateObject) => verifyAllInjectableVariables( + Object.values(leaderboardPnlObject), + ), + ); + + const columns: LeaderboardPnlColumns[] = _.keys( + leaderboardPnlObjects[0]) as LeaderboardPnlColumns[]; + const rows: string[] = setBulkRowsForUpdate({ + objectArray: leaderboardPnlObjects, + columns, + numericColumns: [ + LeaderboardPnlColumns.rank, + ], + stringColumns: [ + LeaderboardPnlColumns.address, + LeaderboardPnlColumns.timeSpan, + LeaderboardPnlColumns.currentEquity, + LeaderboardPnlColumns.pnl, + ], + }); + + const query: string = generateBulkUpsertString({ + table: LeaderboardPnlModel.tableName, + objectRows: rows, + columns, + uniqueIdentifiers: [LeaderboardPnlColumns.address, LeaderboardPnlColumns.timeSpan], + }); + + const transaction: Knex.Transaction | undefined = Transaction.get(options.txId); + return transaction + ? knexPrimary.raw(query).transacting(transaction) + : knexPrimary.raw(query); +} diff --git a/indexer/packages/postgres/src/stores/oracle-price-table.ts b/indexer/packages/postgres/src/stores/oracle-price-table.ts index 000230ce6cd..41d85ffcea9 100644 --- a/indexer/packages/postgres/src/stores/oracle-price-table.ts +++ b/indexer/packages/postgres/src/stores/oracle-price-table.ts @@ -198,7 +198,7 @@ async function findLatestPricesByDateTime( .groupBy('marketId'); const oraclePrices: OraclePriceFromDatabase[] = await baseQuery - .innerJoin(subQuery.as('sub'), function () { + .innerJoin(subQuery.as('sub'), function joinConditions() { this .on('oracle_prices.marketId', '=', 'sub.marketId') .andOn('oracle_prices.effectiveAt', '=', 'sub.maxEffectiveAt'); diff --git a/indexer/packages/postgres/src/stores/order-table.ts b/indexer/packages/postgres/src/stores/order-table.ts index df59c4e27ca..a197dee1a9f 100644 --- a/indexer/packages/postgres/src/stores/order-table.ts +++ b/indexer/packages/postgres/src/stores/order-table.ts @@ -15,6 +15,7 @@ import { OrderQueryConfig, OrderStatus, OrderUpdateObject, + PaginationFromDatabase, QueryableField, QueryConfig, } from '../types'; @@ -67,10 +68,11 @@ export async function findAll( goodTilBlockTimeBeforeOrAt, clientMetadata, triggerPrice, + page, }: OrderQueryConfig, requiredFields: QueryableField[], options: Options = DEFAULT_POSTGRES_OPTIONS, -): Promise { +): Promise> { verifyAllRequiredFields( { limit, @@ -185,11 +187,41 @@ export async function findAll( } } - if (limit !== undefined) { + if (limit !== undefined && page === undefined) { baseQuery = baseQuery.limit(limit); } - return baseQuery.returning('*'); + /** + * If a query is made using a page number, then the limit property is used as 'page limit' + * TODO: Improve pagination by adding a required eventId for orderBy clause + */ + if (page !== undefined && limit !== undefined) { + /** + * We make sure that the page number is always >= 1 + */ + const currentPage: number = Math.max(1, page); + const offset: number = (currentPage - 1) * limit; + + /** + * Ensure sorting is applied to maintain consistent pagination results. + * Also a casting of the ts type is required since the infer of the type + * obtained from the count is not performed. + */ + const count: { count?: string } = await baseQuery.clone().clearOrder().count({ count: '*' }).first() as unknown as { count?: string }; + + baseQuery = baseQuery.offset(offset).limit(limit); + + return { + results: await baseQuery.returning('*'), + limit, + offset, + total: parseInt(count.count ?? '0', 10), + }; + } + + return { + results: await baseQuery.returning('*'), + }; } export async function create( diff --git a/indexer/packages/postgres/src/stores/persistent-cache-table.ts b/indexer/packages/postgres/src/stores/persistent-cache-table.ts new file mode 100644 index 00000000000..524e2ed05d0 --- /dev/null +++ b/indexer/packages/postgres/src/stores/persistent-cache-table.ts @@ -0,0 +1,95 @@ +import { QueryBuilder } from 'objection'; + +import { DEFAULT_POSTGRES_OPTIONS } from '../constants'; +import { setupBaseQuery, verifyAllRequiredFields } from '../helpers/stores-helpers'; +import Transaction from '../helpers/transaction'; +import PersistentCacheModel from '../models/persistent-cache-model'; +import { + Options, + Ordering, + QueryableField, + QueryConfig, + PersistentCacheColumns, + PersistentCacheCreateObject, + PersistentCacheFromDatabase, + PersistentCacheQueryConfig, +} from '../types'; + +export async function findAll( + { + key, + limit, + }: PersistentCacheQueryConfig, + requiredFields: QueryableField[], + options: Options = DEFAULT_POSTGRES_OPTIONS, +): Promise { + verifyAllRequiredFields( + { + key, + limit, + } as QueryConfig, + requiredFields, + ); + + let baseQuery: QueryBuilder = setupBaseQuery( + PersistentCacheModel, + options, + ); + + if (key) { + baseQuery = baseQuery.where(PersistentCacheColumns.key, key); + } + + if (options.orderBy !== undefined) { + for (const [column, order] of options.orderBy) { + baseQuery = baseQuery.orderBy( + column, + order, + ); + } + } else { + baseQuery = baseQuery.orderBy( + PersistentCacheColumns.key, + Ordering.ASC, + ); + } + + if (limit) { + baseQuery = baseQuery.limit(limit); + } + + return baseQuery.returning('*'); +} + +export async function create( + kvToCreate: PersistentCacheCreateObject, + options: Options = { txId: undefined }, +): Promise { + return PersistentCacheModel.query( + Transaction.get(options.txId), + ).insert(kvToCreate).returning('*'); +} + +export async function upsert( + kvToUpsert: PersistentCacheCreateObject, + options: Options = { txId: undefined }, +): Promise { + const kvs: PersistentCacheModel[] = await PersistentCacheModel.query( + Transaction.get(options.txId), + ).upsert(kvToUpsert).returning('*'); + // should only ever be one key value pair + return kvs[0]; +} + +export async function findById( + kv: string, + options: Options = DEFAULT_POSTGRES_OPTIONS, +): Promise { + const baseQuery: QueryBuilder = setupBaseQuery( + PersistentCacheModel, + options, + ); + return baseQuery + .findById(kv) + .returning('*'); +} diff --git a/indexer/packages/postgres/src/stores/pnl-ticks-table.ts b/indexer/packages/postgres/src/stores/pnl-ticks-table.ts index 9c78d44eaf8..99cffb05c34 100644 --- a/indexer/packages/postgres/src/stores/pnl-ticks-table.ts +++ b/indexer/packages/postgres/src/stores/pnl-ticks-table.ts @@ -1,11 +1,15 @@ import _ from 'lodash'; +import { DateTime } from 'luxon'; import { QueryBuilder } from 'objection'; -import { BUFFER_ENCODING_UTF_8, DEFAULT_POSTGRES_OPTIONS, ZERO_TIME_ISO_8601 } from '../constants'; +import { + BUFFER_ENCODING_UTF_8, DEFAULT_POSTGRES_OPTIONS, ZERO_TIME_ISO_8601, +} from '../constants'; import { knexReadReplica } from '../helpers/knex'; import { setupBaseQuery, verifyAllInjectableVariables, verifyAllRequiredFields } from '../helpers/stores-helpers'; import Transaction from '../helpers/transaction'; import { getUuid } from '../helpers/uuid'; +import { getVaultAddresses } from '../lib/helpers'; import PnlTicksModel from '../models/pnl-ticks-model'; import { Options, @@ -16,6 +20,10 @@ import { PnlTicksQueryConfig, QueryableField, QueryConfig, + PaginationFromDatabase, + LeaderboardPnlCreateObject, + LeaderboardPnlTimeSpan, + PnlTickInterval, } from '../types'; export function uuid( @@ -42,10 +50,11 @@ export async function findAll( createdBeforeOrAtBlockHeight, createdOnOrAfter, createdOnOrAfterBlockHeight, + page, }: PnlTicksQueryConfig, requiredFields: QueryableField[], options: Options = DEFAULT_POSTGRES_OPTIONS, -): Promise { +): Promise> { verifyAllRequiredFields( { limit, @@ -128,11 +137,40 @@ export async function findAll( ); } - if (limit !== undefined) { + if (limit !== undefined && page === undefined) { baseQuery = baseQuery.limit(limit); } - return baseQuery.returning('*'); + /** + * If a query is made using a page number, then the limit property is used as 'page limit' + */ + if (page !== undefined && limit !== undefined) { + /** + * We make sure that the page number is always >= 1 + */ + const currentPage: number = Math.max(1, page); + const offset: number = (currentPage - 1) * limit; + + /** + * Ensure sorting is applied to maintain consistent pagination results. + * Also a casting of the ts type is required since the infer of the type + * obtained from the count is not performed. + */ + const count: { count?: string } = await baseQuery.clone().clearOrder().count({ count: '*' }).first() as unknown as { count?: string }; + + baseQuery = baseQuery.offset(offset).limit(limit); + + return { + results: await baseQuery.returning('*'), + limit, + offset, + total: parseInt(count.count ?? '0', 10), + }; + } + + return { + results: await baseQuery.returning('*'), + }; } export async function create( @@ -181,28 +219,48 @@ function convertPnlTicksFromDatabaseToPnlTicksCreateObject( return _.omit(pnlTicksFromDatabase, PnlTicksColumns.id); } -export async function findLatestProcessedBlocktime(): Promise { +export async function findLatestProcessedBlocktimeAndCount(): Promise<{ + maxBlockTime: string, + count: number, +}> { const result: { - rows: [{ max: string }] + rows: [{ max: string, count: number }], } = await knexReadReplica.getConnection().raw( ` - SELECT MAX("blockTime") - FROM "pnl_ticks" - ` - , - ) as unknown as { rows: [{ max: string }] }; - return result.rows[0].max || ZERO_TIME_ISO_8601; + WITH maxBlockTime AS ( + SELECT MAX("blockTime") as "maxBlockTime" + FROM "pnl_ticks" + ) + SELECT + maxBlockTime."maxBlockTime" as max, + COUNT(*) as count + FROM + "pnl_ticks", + maxBlockTime + WHERE + "pnl_ticks"."blockTime" = maxBlockTime."maxBlockTime" + GROUP BY 1 + `, + ) as unknown as { rows: [{ max: string, count: number }] }; + + const maxBlockTime = result.rows[0]?.max || ZERO_TIME_ISO_8601; + const count = Number(result.rows[0]?.count) || 0; + + return { + maxBlockTime, + count, + }; } export async function findMostRecentPnlTickForEachAccount( createdOnOrAfterHeight: string, ): Promise<{ - [subaccountId: string]: PnlTicksCreateObject + [subaccountId: string]: PnlTicksCreateObject, }> { verifyAllInjectableVariables([createdOnOrAfterHeight]); const result: { - rows: PnlTicksFromDatabase[] + rows: PnlTicksFromDatabase[], } = await knexReadReplica.getConnection().raw( ` SELECT DISTINCT ON ("subaccountId") * @@ -217,3 +275,277 @@ export async function findMostRecentPnlTickForEachAccount( 'subaccountId', ); } + +export async function getRankedPnlTicks( + timeSpan: string, +): Promise { + if (timeSpan === 'ALL_TIME') { + return getAllTimeRankedPnlTicks(); + } + return getRankedPnlTicksForTimeSpan(timeSpan); +} + +function convertTimespanToSQL(timeSpan: string): string { + const timeSpanEnum: LeaderboardPnlTimeSpan = LeaderboardPnlTimeSpan[ + timeSpan as keyof typeof LeaderboardPnlTimeSpan]; + switch (timeSpanEnum) { + case LeaderboardPnlTimeSpan.ONE_DAY: + return '1 days'; + case LeaderboardPnlTimeSpan.SEVEN_DAYS: + return '7 days'; + case LeaderboardPnlTimeSpan.THIRTY_DAYS: + return '30 days'; + case LeaderboardPnlTimeSpan.ONE_YEAR: + return '365 days'; + default: + throw new Error(`Invalid time span: ${timeSpan}`); + } +} + +/** + * Constructs a complex SQL query to calculate the Pnl difference and current equity + * of subaccounts over a specified time span, ranking them by their PnL. + * + * This has 5 main parts + * 1. latest_subaccount_pnl_x_days_ago: Identifies the most recent PnL tick for each subaccount + * before the specified time span. It filters out subaccounts which are not parent + * subaccounts or associated child subaccounts. It also excludes any addresses + * that are vault addresses. + * + * 2. latest_pnl: Finds the latest PnL tick for each subaccount as of the current date, + * applying the same filters as latest_subaccount_pnl_x_days_ago. + * + * 3. subaccount_pnl_difference: Calculates the difference in PnL between the + * current date and the start of the specified time span for each subaccount. + * + * 4. aggregated_results: Aggregates the PnL differences and current equity for + * all subaccounts, grouping by address. + * + * 5. The final SELECT statement then ranks the addresses based on their total PnL + * in descending order, providing a snapshot of subaccount performance over the + * specified time span. + * +*/ +async function getRankedPnlTicksForTimeSpan( + timeSpan: string, +): Promise { + const vaultAddresses: string[] = getVaultAddresses(); + const vaultAddressesString: string = vaultAddresses.map((address) => `'${address}'`).join(','); + const intervalSqlString: string = convertTimespanToSQL(timeSpan); + const result: { + rows: LeaderboardPnlCreateObject[], + } = await knexReadReplica.getConnection().raw( + ` + WITH latest_subaccount_pnl_x_days_ago AS ( + SELECT DISTINCT ON (a."subaccountId") + a."subaccountId", + a."totalPnl", + b."address" + FROM + pnl_ticks a + LEFT JOIN + subaccounts b ON a."subaccountId" = b."id" + WHERE + a."createdAt"::date <= (CURRENT_DATE - INTERVAL '${intervalSqlString}') + AND (b."subaccountNumber" % 128) = 0 + AND b."address" NOT IN (${vaultAddressesString}) + ORDER BY a."subaccountId", a."blockHeight" DESC + ), + latest_pnl as ( + SELECT DISTINCT ON (a."subaccountId") + "subaccountId", + "totalPnl", + "equity" as "currentEquity", + "address" + FROM + pnl_ticks a left join subaccounts b ON a."subaccountId"=b."id" + WHERE + "createdAt"::date = CURRENT_DATE + AND (b."subaccountNumber" % 128) = 0 + AND b."address" NOT IN (${vaultAddressesString}) + ORDER BY a."subaccountId", "blockHeight" DESC + ), + subaccount_pnl_difference as( + SELECT + a."address", + a."totalPnl" - COALESCE(b."totalPnl", 0) as "pnlDifference", + a."currentEquity" as "currentEquity" + FROM latest_pnl a left join latest_subaccount_pnl_x_days_ago b + ON a."subaccountId"=b."subaccountId" + ), aggregated_results as( + SELECT + "address", + sum(subaccount_pnl_difference."pnlDifference") as "totalPnl", + sum(subaccount_pnl_difference."currentEquity") as "currentEquity" + FROM + subaccount_pnl_difference + GROUP BY address + ) + SELECT + "address", + "totalPnl" as "pnl", + '${timeSpan}' as "timeSpan", + "currentEquity", + ROW_NUMBER() over (order by aggregated_results."totalPnl" desc) as rank + FROM + aggregated_results; + `, + ) as { rows: LeaderboardPnlCreateObject[] }; + + return result.rows; +} + +/** + * Constructs a query to calculate and rank the Profit and Loss (PnL) and current equity of + * subaccounts for the current day. This query is divided into 3 main parts: + * 1. latest_pnl: This selects the most recent PnL tick for each Parent subaccount + * and associated child subaccounts. It filters subaccounts based on the current date. + * Additionally, it excludes any addresses that are vault addresses. + * + * 2. aggregated_results: This CTE aggregates the results from latest_pnl by address. + * It sums up the total PnL and current equity for each address. + * + * 3. The final SELECT statement calculates a rank for each address based on the total PnL in + * descending order along with associated fields + */ +async function getAllTimeRankedPnlTicks(): Promise { + const vaultAddresses: string[] = getVaultAddresses(); + const vaultAddressesString: string = vaultAddresses.map((address) => `'${address}'`).join(','); + const result: { + rows: LeaderboardPnlCreateObject[], + } = await knexReadReplica.getConnection().raw( + ` + WITH latest_pnl as ( + SELECT DISTINCT ON (a."subaccountId") + "subaccountId", + "totalPnl", + "equity" as "currentEquity", + "address" + FROM + pnl_ticks a left join subaccounts b ON a."subaccountId"=b."id" + WHERE + "createdAt"::date = CURRENT_DATE + AND (b."subaccountNumber" % 128) = 0 + AND b."address" NOT IN (${vaultAddressesString}) + ORDER BY a."subaccountId", "blockHeight" DESC + ), aggregated_results as( + SELECT + "address", + sum(latest_pnl."totalPnl") as "totalPnl", + sum(latest_pnl."currentEquity") as "currentEquity" + FROM + latest_pnl + GROUP BY address + ) + SELECT + "address", + "totalPnl" as "pnl", + 'ALL_TIME' as "timeSpan", + "currentEquity", + ROW_NUMBER() over (order by aggregated_results."totalPnl" desc) as rank + FROM + aggregated_results; + `, + ) as { rows: LeaderboardPnlCreateObject[] }; + + return result.rows; +} + +/** + * Constructs a query to get pnl ticks at a specific interval for a set of subaccounts + * within a time range. + * Uses a windowing function in the raw query to get the first row of each window of the specific + * interval time. + * Currently only supports hourly / daily as the interval. + * @param interval 'day' or 'hour'. + * @param timeWindowSeconds Window of time to get pnl ticks for at the specified interval. + * @param subaccountIds Set of subaccounts to get pnl ticks for. + * @returns + */ +export async function getPnlTicksAtIntervals( + interval: PnlTickInterval, + timeWindowSeconds: number, + subaccountIds: string[], + earliestDate: DateTime, +): Promise { + if (subaccountIds.length === 0) { + return []; + } + const result: { + rows: PnlTicksFromDatabase[], + } = await knexReadReplica.getConnection().raw( + ` + SELECT + "id", + "subaccountId", + "equity", + "totalPnl", + "netTransfers", + "createdAt", + "blockHeight", + "blockTime" + FROM ( + SELECT + pnl_ticks.*, + ROW_NUMBER() OVER ( + PARTITION BY + "subaccountId", + DATE_TRUNC( + '${interval}', + "blockTime" + ) ORDER BY "blockTime" + ) AS r + FROM pnl_ticks + WHERE + "subaccountId" IN (${subaccountIds.map((id: string) => { return `'${id}'`; }).join(',')}) AND + "blockTime" >= '${earliestDate.toUTC().toISO()}'::timestamp AND + "blockTime" > NOW() - INTERVAL '${timeWindowSeconds} second' + ) AS pnl_intervals + WHERE + r = 1 + ORDER BY "subaccountId"; + `, + ) as unknown as { + rows: PnlTicksFromDatabase[], + }; + + return result.rows; +} + +export async function getLatestPnlTick( + subaccountIds: string[], + beforeOrAt: DateTime, +): Promise { + if (subaccountIds.length === 0) { + return []; + } + const result: { + rows: PnlTicksFromDatabase[], + } = await knexReadReplica.getConnection().raw( + ` + SELECT + DISTINCT ON ("subaccountId") + "id", + "subaccountId", + "equity", + "totalPnl", + "netTransfers", + "createdAt", + "blockHeight", + "blockTime" + FROM + pnl_ticks + WHERE + "subaccountId" in (${subaccountIds.map((id: string) => { return `'${id}'`; }).join(',')}) AND + "blockTime" <= '${beforeOrAt.toUTC().toISO()}'::timestamp AND + "blockTime" >= '${beforeOrAt.toUTC().minus({ hours: 4 }).toISO()}'::timestamp + ORDER BY + "subaccountId", + "blockTime" DESC + `, + ) as unknown as { + rows: PnlTicksFromDatabase[], + }; + + return result.rows; +} diff --git a/indexer/packages/postgres/src/stores/subaccount-table.ts b/indexer/packages/postgres/src/stores/subaccount-table.ts index 18b861e0dee..5c3ea231fdd 100644 --- a/indexer/packages/postgres/src/stores/subaccount-table.ts +++ b/indexer/packages/postgres/src/stores/subaccount-table.ts @@ -1,6 +1,7 @@ import { IndexerSubaccountId } from '@dydxprotocol-indexer/v4-protos'; import { PartialModelObject, QueryBuilder } from 'objection'; +import config from '../config'; import { BUFFER_ENCODING_UTF_8, DEFAULT_POSTGRES_OPTIONS } from '../constants'; import { verifyAllRequiredFields, @@ -58,7 +59,6 @@ export async function findAll( SubaccountModel, options, ); - if (id) { baseQuery = baseQuery.whereIn(SubaccountColumns.id, id); } @@ -67,7 +67,7 @@ export async function findAll( baseQuery = baseQuery.where(SubaccountColumns.address, address); } - if (subaccountNumber) { + if (subaccountNumber !== undefined) { baseQuery = baseQuery.where(SubaccountColumns.subaccountNumber, subaccountNumber); } @@ -188,3 +188,16 @@ export async function update( // The objection types mistakenly think the query returns an array of Subaccounts. return updatedSubaccount as unknown as (SubaccountFromDatabase | undefined); } + +export async function deleteById( + id: string, + options: Options = { txId: undefined }, +): Promise { + if (config.NODE_ENV !== 'test') { + throw new Error('Subaccount deletion is not allowed in non-test environments'); + } + + await SubaccountModel.query( + Transaction.get(options.txId), + ).deleteById(id); +} diff --git a/indexer/packages/postgres/src/stores/subaccount-usernames-table.ts b/indexer/packages/postgres/src/stores/subaccount-usernames-table.ts new file mode 100644 index 00000000000..d03e91afbd6 --- /dev/null +++ b/indexer/packages/postgres/src/stores/subaccount-usernames-table.ts @@ -0,0 +1,147 @@ +import { QueryBuilder } from 'objection'; + +import { DEFAULT_POSTGRES_OPTIONS } from '../constants'; +import { knexReadReplica } from '../helpers/knex'; +import { + verifyAllRequiredFields, + setupBaseQuery, + rawQuery, +} from '../helpers/stores-helpers'; +import Transaction from '../helpers/transaction'; +import SubaccountUsernamesModel from '../models/subaccount-usernames-model'; +import { + QueryConfig, + SubaccountUsernamesFromDatabase, + SubaccountUsernamesQueryConfig, + SubaccountUsernamesColumns, + SubaccountUsernamesCreateObject, + SubaccountsWithoutUsernamesResult, + Options, + Ordering, + QueryableField, + AddressUsername, +} from '../types'; + +export async function findAll( + { + username, + subaccountId, + limit, + }: SubaccountUsernamesQueryConfig, + requiredFields: QueryableField[], + options: Options = DEFAULT_POSTGRES_OPTIONS, +): Promise { + verifyAllRequiredFields( + { + username, + subaccountId, + limit, + } as QueryConfig, + requiredFields, + ); + + let baseQuery: QueryBuilder = setupBaseQuery( + SubaccountUsernamesModel, + options, + ); + + if (username) { + baseQuery = baseQuery.whereIn(SubaccountUsernamesColumns.username, username); + } + + if (subaccountId) { + baseQuery = baseQuery.whereIn(SubaccountUsernamesColumns.subaccountId, subaccountId); + } + + if (options.orderBy !== undefined) { + for (const [column, order] of options.orderBy) { + baseQuery = baseQuery.orderBy( + column, + order, + ); + } + } else { + baseQuery = baseQuery.orderBy( + SubaccountUsernamesColumns.username, + Ordering.ASC, + ); + } + + if (limit) { + baseQuery = baseQuery.limit(limit); + } + + return baseQuery.returning('*'); +} + +export async function create( + subaccountUsernameToCreate: SubaccountUsernamesCreateObject, + options: Options = { txId: undefined }, +): Promise { + return SubaccountUsernamesModel.query( + Transaction.get(options.txId), + ).insert({ + ...subaccountUsernameToCreate, + }).returning('*'); +} + +export async function findByUsername( + username: string, + options: Options = DEFAULT_POSTGRES_OPTIONS, +): Promise { + const baseQuery: + QueryBuilder = setupBaseQuery( + SubaccountUsernamesModel, + options, + ); + return (await baseQuery).find((subaccountUsername) => subaccountUsername.username === username); +} + +export async function getSubaccountZerosWithoutUsernames( + limit: number, + options: Options = DEFAULT_POSTGRES_OPTIONS): + Promise { + const queryString: string = ` + SELECT id as "subaccountId", address + FROM subaccounts + WHERE subaccounts."subaccountNumber" = 0 + AND id NOT IN ( + SELECT "subaccountId" FROM subaccount_usernames + ) + ORDER BY address + LIMIT ? + `; + + const result: { + rows: SubaccountsWithoutUsernamesResult[], + } = await rawQuery(queryString, { ...options, bindings: [limit] }); + + return result.rows; +} + +export async function findByAddress( + addresses: string[], +): Promise { + if (addresses.length === 0) { + return []; + } + + const result: { rows: AddressUsername[] } = await knexReadReplica + .getConnection() + .raw( + ` + WITH subaccountIds AS ( + SELECT "id", "address" + FROM subaccounts + WHERE "address" = ANY(?) + AND "subaccountNumber" = 0 + ) + SELECT s."address", u."username" + FROM subaccountIds s + LEFT JOIN subaccount_usernames u ON u."subaccountId" = s."id" + `, + [addresses], + ); + + return result.rows; +} diff --git a/indexer/packages/postgres/src/stores/transfer-table.ts b/indexer/packages/postgres/src/stores/transfer-table.ts index 1a14bb174b5..c625dacaebb 100644 --- a/indexer/packages/postgres/src/stores/transfer-table.ts +++ b/indexer/packages/postgres/src/stores/transfer-table.ts @@ -21,6 +21,7 @@ import { QueryableField, ToAndFromSubaccountTransferQueryConfig, SubaccountAssetNetTransferMap, + PaginationFromDatabase, } from '../types'; export function uuid( @@ -40,9 +41,9 @@ export function uuid( } interface SubaccountAssetNetTransfer { - subaccountId: string; - assetId: string; - totalSize: string; + subaccountId: string, + assetId: string, + totalSize: string, } export async function findAll( @@ -194,10 +195,11 @@ export async function findAllToOrFromSubaccountId( createdBeforeOrAt, createdAfterHeight, createdAfter, + page, }: ToAndFromSubaccountTransferQueryConfig, requiredFields: QueryableField[], options: Options = DEFAULT_POSTGRES_OPTIONS, -): Promise { +): Promise> { verifyAllRequiredFields( { limit, @@ -291,11 +293,41 @@ export async function findAllToOrFromSubaccountId( } } - if (limit !== undefined) { + if (limit !== undefined && page === undefined) { baseQuery = baseQuery.limit(limit); } - return baseQuery.returning('*'); + /** + * If a query is made using a page number, then the limit property is used as 'page limit' + * TODO: Improve pagination by adding a required eventId for orderBy clause + */ + if (page !== undefined && limit !== undefined) { + /** + * We make sure that the page number is always >= 1 + */ + const currentPage: number = Math.max(1, page); + const offset: number = (currentPage - 1) * limit; + + /** + * Ensure sorting is applied to maintain consistent pagination results. + * Also a casting of the ts type is required since the infer of the type + * obtained from the count is not performed. + */ + const count: { count?: string } = await baseQuery.clone().clearOrder().count({ count: '*' }).first() as unknown as { count?: string }; + + baseQuery = baseQuery.offset(offset).limit(limit); + + return { + results: await baseQuery.returning('*'), + limit, + offset, + total: parseInt(count.count ?? '0', 10), + }; + } + + return { + results: await baseQuery.returning('*'), + }; } function convertToSubaccountAssetMap( @@ -313,7 +345,7 @@ function convertToSubaccountAssetMap( } export interface AssetTransferMap { - [assetId: string]: Big; + [assetId: string]: Big, } export async function getNetTransfersBetweenBlockHeightsForSubaccount( @@ -346,8 +378,8 @@ export async function getNetTransfersBetweenBlockHeightsForSubaccount( const result: { rows: { - assetId: string; - totalSize: string; + assetId: string, + totalSize: string, }[], } = await rawQuery(queryString, options); return _.mapValues(_.keyBy(result.rows, 'assetId'), (row: { assetId: string, totalSize: string }) => { @@ -399,6 +431,44 @@ export async function getNetTransfersPerSubaccount( return convertToSubaccountAssetMap(assetsPerSubaccount); } +export async function getNetTransfersBetweenSubaccountIds( + sourceSubaccountId: string, + recipientSubaccountId: string, + assetId: string, + options: Options = DEFAULT_POSTGRES_OPTIONS, +): Promise { + const queryString: string = ` + SELECT + COALESCE(SUM(sub."size"), '0') AS "totalSize" + FROM ( + SELECT DISTINCT + "size" AS "size", + "id" + FROM + "transfers" + WHERE "transfers"."assetId" = '${assetId}' + AND "transfers"."senderSubaccountId" = '${sourceSubaccountId}' + AND "transfers"."recipientSubaccountId" = '${recipientSubaccountId}' + UNION + SELECT DISTINCT + -"size" AS "size", + "id" + FROM + "transfers" + WHERE "transfers"."assetId" = '${assetId}' + AND "transfers"."senderSubaccountId" = '${recipientSubaccountId}' + AND "transfers"."recipientSubaccountId" = '${sourceSubaccountId}' + ) AS sub + `; + + const result: { + rows: { totalSize: string }[], + } = await rawQuery(queryString, options); + + // Should only ever return a single row + return result.rows[0].totalSize; +} + export async function create( transferToCreate: TransferCreateObject, options: Options = { txId: undefined }, @@ -430,3 +500,53 @@ export async function findById( .findById(id) .returning('*'); } + +export async function getLastTransferTimeForSubaccounts( + subaccountIds: string[], + options: Options = DEFAULT_POSTGRES_OPTIONS, +): Promise<{ [subaccountId: string]: string }> { + if (!subaccountIds.length) { + return {}; + } + + let baseQuery: QueryBuilder = setupBaseQuery( + TransferModel, + options, + ); + + baseQuery = baseQuery + .select('senderSubaccountId', 'recipientSubaccountId', 'createdAt') + .where((queryBuilder) => { + // eslint-disable-next-line no-void + void queryBuilder.whereIn('senderSubaccountId', subaccountIds) + .orWhereIn('recipientSubaccountId', subaccountIds); + }) + .orderBy('createdAt', 'desc'); + + const result: TransferFromDatabase[] = await baseQuery; + + const mapping: { [subaccountId: string]: string } = {}; + + result.forEach((row) => { + if ( + row.senderSubaccountId !== undefined && + subaccountIds.includes(row.senderSubaccountId) + ) { + if (!mapping[row.senderSubaccountId] || row.createdAt > mapping[row.senderSubaccountId]) { + mapping[row.senderSubaccountId] = row.createdAt; + } + } + if ( + row.recipientSubaccountId !== undefined && + subaccountIds.includes(row.recipientSubaccountId) + ) { + if ( + !mapping[row.recipientSubaccountId] || + row.createdAt > mapping[row.recipientSubaccountId]) { + mapping[row.recipientSubaccountId] = row.createdAt; + } + } + }); + + return mapping; +} diff --git a/indexer/packages/postgres/src/stores/vault-pnl-ticks-view.ts b/indexer/packages/postgres/src/stores/vault-pnl-ticks-view.ts new file mode 100644 index 00000000000..1cfa51e46f8 --- /dev/null +++ b/indexer/packages/postgres/src/stores/vault-pnl-ticks-view.ts @@ -0,0 +1,88 @@ +import { DateTime } from 'luxon'; + +import { knexReadReplica } from '../helpers/knex'; +import { rawQuery } from '../helpers/stores-helpers'; +import { + PnlTickInterval, + PnlTicksFromDatabase, +} from '../types'; + +const VAULT_HOURLY_PNL_VIEW: string = 'vaults_hourly_pnl'; +const VAULT_DAILY_PNL_VIEW: string = 'vaults_daily_pnl'; + +export async function refreshHourlyView(): Promise { + await rawQuery( + `REFRESH MATERIALIZED VIEW CONCURRENTLY ${VAULT_HOURLY_PNL_VIEW}`, + { + readReplica: false, + }, + ); +} + +export async function refreshDailyView(): Promise { + await rawQuery( + `REFRESH MATERIALIZED VIEW CONCURRENTLY ${VAULT_DAILY_PNL_VIEW}`, + { + readReplica: false, + }, + ); +} + +export async function getVaultsPnl( + interval: PnlTickInterval, + timeWindowSeconds: number, + earliestDate: DateTime, +): Promise { + let viewName: string = VAULT_DAILY_PNL_VIEW; + if (interval === PnlTickInterval.hour) { + viewName = VAULT_HOURLY_PNL_VIEW; + } + const result: { + rows: PnlTicksFromDatabase[], + } = await knexReadReplica.getConnection().raw( + ` + SELECT + "id", + "subaccountId", + "equity", + "totalPnl", + "netTransfers", + "createdAt", + "blockHeight", + "blockTime" + FROM ${viewName} + WHERE + "blockTime" >= '${earliestDate.toUTC().toISO()}'::timestamp AND + "blockTime" > NOW() - INTERVAL '${timeWindowSeconds} second' + ORDER BY "subaccountId", "blockTime"; + `, + ) as unknown as { + rows: PnlTicksFromDatabase[], + }; + + return result.rows; +} + +export async function getLatestVaultPnl(): Promise { + const result: { + rows: PnlTicksFromDatabase[], + } = await knexReadReplica.getConnection().raw( + ` + SELECT DISTINCT ON ("subaccountId") + "id", + "subaccountId", + "equity", + "totalPnl", + "netTransfers", + "createdAt", + "blockHeight", + "blockTime" + FROM ${VAULT_HOURLY_PNL_VIEW} + ORDER BY "subaccountId", "blockTime" DESC; + `, + ) as unknown as { + rows: PnlTicksFromDatabase[], + }; + + return result.rows; +} diff --git a/indexer/packages/postgres/src/stores/vault-table.ts b/indexer/packages/postgres/src/stores/vault-table.ts new file mode 100644 index 00000000000..4cfe9b6ca8b --- /dev/null +++ b/indexer/packages/postgres/src/stores/vault-table.ts @@ -0,0 +1,100 @@ +import { QueryBuilder } from 'objection'; + +import { DEFAULT_POSTGRES_OPTIONS } from '../constants'; +import { + verifyAllRequiredFields, + setupBaseQuery, +} from '../helpers/stores-helpers'; +import Transaction from '../helpers/transaction'; +import VaultModel from '../models/vault-model'; +import { + QueryConfig, + VaultQueryConfig, + VaultColumns, + Options, + Ordering, + QueryableField, + VaultFromDatabase, + VaultCreateObject, +} from '../types'; + +export async function findAll( + { + address, + clobPairId, + status, + limit, + }: VaultQueryConfig, + requiredFields: QueryableField[], + options: Options = DEFAULT_POSTGRES_OPTIONS, +): Promise { + verifyAllRequiredFields( + { + address, + clobPairId, + status, + limit, + } as QueryConfig, + requiredFields, + ); + + let baseQuery: QueryBuilder = setupBaseQuery( + VaultModel, + options, + ); + + if (address) { + baseQuery = baseQuery.whereIn(VaultColumns.address, address); + } + + if (clobPairId) { + baseQuery = baseQuery.whereIn(VaultColumns.clobPairId, clobPairId); + } + + if (status) { + baseQuery = baseQuery.whereIn(VaultColumns.status, status); + } + + if (options.orderBy !== undefined) { + for (const [column, order] of options.orderBy) { + baseQuery = baseQuery.orderBy( + column, + order, + ); + } + } else { + baseQuery = baseQuery.orderBy( + VaultColumns.clobPairId, + Ordering.ASC, + ); + } + + if (limit) { + baseQuery = baseQuery.limit(limit); + } + + return baseQuery.returning('*'); +} + +export async function create( + vaultToCreate: VaultCreateObject, + options: Options = { txId: undefined }, +): Promise { + return VaultModel.query( + Transaction.get(options.txId), + ).insert({ + ...vaultToCreate, + }); +} + +export async function upsert( + vaultToUpsert: VaultCreateObject, + options: Options = { txId: undefined }, +): Promise { + const vaults: VaultModel[] = await VaultModel.query( + Transaction.get(options.txId), + ).upsert({ + ...vaultToUpsert, + }).returning('*'); + return vaults[0]; +} diff --git a/indexer/packages/postgres/src/stores/wallet-table.ts b/indexer/packages/postgres/src/stores/wallet-table.ts index fd04bb560d2..ce5e04a122b 100644 --- a/indexer/packages/postgres/src/stores/wallet-table.ts +++ b/indexer/packages/postgres/src/stores/wallet-table.ts @@ -1,6 +1,8 @@ +import Knex from 'knex'; import { PartialModelObject, QueryBuilder } from 'objection'; import { DEFAULT_POSTGRES_OPTIONS } from '../constants'; +import { knexPrimary } from '../helpers/knex'; import { setupBaseQuery, verifyAllRequiredFields } from '../helpers/stores-helpers'; import Transaction from '../helpers/transaction'; import WalletModel from '../models/wallet-model'; @@ -96,6 +98,7 @@ export async function upsert( // should only ever be one wallet return wallets[0]; } + export async function findById( address: string, options: Options = DEFAULT_POSTGRES_OPTIONS, @@ -108,3 +111,54 @@ export async function findById( .findById(address) .returning('*'); } + +/** + * Calculates the total volume in a given time window for each address and adds the values to the + * existing totalVolume values. + * + * @async + * @function updateTotalVolume + * @param {string} windowStartTs - The exclusive start timestamp for filtering fills. + * @param {string} windowEndTs - The inclusive end timestamp for filtering fill. + * @param {number} [txId] - Optional transaction ID. + * @returns {Promise} + */ +export async function updateTotalVolume( + windowStartTs: string, + windowEndTs: string, + txId: number | undefined = undefined, +) : Promise { + const transaction: Knex.Transaction | undefined = Transaction.get(txId); + + const query = ` + WITH fills_total AS ( + -- Step 1: Calculate total volume for each subaccountId + SELECT "subaccountId", SUM("price" * "size") AS "totalVolume" + FROM fills + WHERE "createdAt" > '${windowStartTs}' AND "createdAt" <= '${windowEndTs}' + GROUP BY "subaccountId" + ), + subaccount_volume AS ( + -- Step 2: Merge with subaccounts table to get the address + SELECT s."address", f."totalVolume" + FROM fills_total f + JOIN subaccounts s + ON f."subaccountId" = s."id" + ), + address_volume AS ( + -- Step 3: Group by address and sum the totalVolume + SELECT "address", SUM("totalVolume") AS "totalVolume" + FROM subaccount_volume + GROUP BY "address" + ) + -- Step 4: Left join the result with the wallets table and update the total volume + UPDATE wallets + SET "totalVolume" = COALESCE(wallets."totalVolume", 0) + av."totalVolume" + FROM address_volume av + WHERE wallets."address" = av."address"; + `; + + return transaction + ? knexPrimary.raw(query).transacting(transaction) + : knexPrimary.raw(query); +} diff --git a/indexer/packages/postgres/src/types/affiliate-info-types.ts b/indexer/packages/postgres/src/types/affiliate-info-types.ts new file mode 100644 index 00000000000..a1dcc61be69 --- /dev/null +++ b/indexer/packages/postgres/src/types/affiliate-info-types.ts @@ -0,0 +1,25 @@ +export interface AffiliateInfoCreateObject { + address: string, + affiliateEarnings: string, + referredMakerTrades: number, + referredTakerTrades: number, + totalReferredMakerFees: string, + totalReferredTakerFees: string, + totalReferredMakerRebates: string, + totalReferredUsers: number, + firstReferralBlockHeight: string, + referredTotalVolume: string, +} + +export enum AffiliateInfoColumns { + address = 'address', + affiliateEarnings = 'affiliateEarnings', + referredMakerTrades = 'referredMakerTrades', + referredTakerTrades = 'referredTakerTrades', + totalReferredMakerFees = 'totalReferredMakerFees', + totalReferredTakerFees = 'totalReferredTakerFees', + totalReferredMakerRebates = 'totalReferredMakerRebates', + totalReferredUsers = 'totalReferredUsers', + firstReferralBlockHeight = 'firstReferralBlockHeight', + referredTotalVolume = 'referredTotalVolume', +} diff --git a/indexer/packages/postgres/src/types/affiliate-referred-users-types.ts b/indexer/packages/postgres/src/types/affiliate-referred-users-types.ts new file mode 100644 index 00000000000..4ba76de49ea --- /dev/null +++ b/indexer/packages/postgres/src/types/affiliate-referred-users-types.ts @@ -0,0 +1,11 @@ +export interface AffiliateReferredUsersCreateObject { + affiliateAddress: string, + refereeAddress: string, + referredAtBlock: string, +} + +export enum AffiliateReferredUsersColumns { + affiliateAddress = 'affiliateAddress', + refereeAddress = 'refereeAddress', + referredAtBlock = 'referredAtBlock', +} diff --git a/indexer/packages/postgres/src/types/block-types.ts b/indexer/packages/postgres/src/types/block-types.ts index d2aea5f90e4..e94c3fbc808 100644 --- a/indexer/packages/postgres/src/types/block-types.ts +++ b/indexer/packages/postgres/src/types/block-types.ts @@ -3,8 +3,8 @@ type IsoString = string; export interface BlockCreateObject { - blockHeight: string; - time: IsoString; + blockHeight: string, + time: IsoString, } export enum BlockColumns { diff --git a/indexer/packages/postgres/src/types/candle-types.ts b/indexer/packages/postgres/src/types/candle-types.ts index 87f30dbf320..db7b3f66f76 100644 --- a/indexer/packages/postgres/src/types/candle-types.ts +++ b/indexer/packages/postgres/src/types/candle-types.ts @@ -1,29 +1,33 @@ import { IsoString } from './utility-types'; export interface CandleCreateObject { - startedAt: IsoString; - ticker: string; - resolution: CandleResolution; - low: string; - high: string; - open: string; - close: string; - baseTokenVolume: string; - usdVolume: string; - trades: number; - startingOpenInterest: string; + startedAt: IsoString, + ticker: string, + resolution: CandleResolution, + low: string, + high: string, + open: string, + close: string, + baseTokenVolume: string, + usdVolume: string, + trades: number, + startingOpenInterest: string, + orderbookMidPriceOpen: string | undefined, + orderbookMidPriceClose: string | undefined, } export interface CandleUpdateObject { - id: string; - low?: string; - high?: string; - open?: string; - close?: string; - baseTokenVolume?: string; - usdVolume?: string; - trades?: number; - startingOpenInterest?: string; + id: string, + low?: string, + high?: string, + open?: string, + close?: string, + baseTokenVolume?: string, + usdVolume?: string, + trades?: number, + startingOpenInterest?: string, + orderbookMidPriceOpen?: string, + orderbookMidPriceClose?: string, } export enum CandleResolution { diff --git a/indexer/packages/postgres/src/types/compliance-data-types.ts b/indexer/packages/postgres/src/types/compliance-data-types.ts index 4a018eb1e66..2657bc09145 100644 --- a/indexer/packages/postgres/src/types/compliance-data-types.ts +++ b/indexer/packages/postgres/src/types/compliance-data-types.ts @@ -7,21 +7,21 @@ export enum ComplianceProvider { } export interface ComplianceDataCreateObject { - address: string; - provider: string; - chain?: string; - blocked: boolean; - riskScore?: string; - updatedAt?: IsoString; + address: string, + provider: string, + chain?: string, + blocked: boolean, + riskScore?: string, + updatedAt?: IsoString, } export interface ComplianceDataUpdateObject { - address: string; - provider: string; - chain?: string; - blocked?: boolean; - riskScore?: string; - updatedAt?: IsoString; + address: string, + provider: string, + chain?: string, + blocked?: boolean, + riskScore?: string, + updatedAt?: IsoString, } export enum ComplianceDataColumns { diff --git a/indexer/packages/postgres/src/types/compliance-status-types.ts b/indexer/packages/postgres/src/types/compliance-status-types.ts index 2c501ed9ea8..2f0d99ac733 100644 --- a/indexer/packages/postgres/src/types/compliance-status-types.ts +++ b/indexer/packages/postgres/src/types/compliance-status-types.ts @@ -6,38 +6,40 @@ export enum ComplianceReason { MANUAL = 'MANUAL', US_GEO = 'US_GEO', CA_GEO = 'CA_GEO', + GB_GEO = 'GB_GEO', SANCTIONED_GEO = 'SANCTIONED_GEO', COMPLIANCE_PROVIDER = 'COMPLIANCE_PROVIDER', } export enum ComplianceStatus { COMPLIANT = 'COMPLIANT', + FIRST_STRIKE_CLOSE_ONLY = 'FIRST_STRIKE_CLOSE_ONLY', FIRST_STRIKE = 'FIRST_STRIKE', CLOSE_ONLY = 'CLOSE_ONLY', BLOCKED = 'BLOCKED', } export interface ComplianceStatusCreateObject { - address: string; - status: ComplianceStatus; - reason?: ComplianceReason; - createdAt?: IsoString; - updatedAt?: IsoString; + address: string, + status: ComplianceStatus, + reason?: ComplianceReason, + createdAt?: IsoString, + updatedAt?: IsoString, } export interface ComplianceStatusUpsertObject { - address: string; - status: ComplianceStatus; - reason?: ComplianceReason; - updatedAt: IsoString; + address: string, + status: ComplianceStatus, + reason?: ComplianceReason, + updatedAt: IsoString, } export interface ComplianceStatusUpdateObject { - address: string; - status?: ComplianceStatus; - reason?: ComplianceReason | null; - createdAt?: IsoString; - updatedAt?: IsoString; + address: string, + status?: ComplianceStatus, + reason?: ComplianceReason | null, + createdAt?: IsoString, + updatedAt?: IsoString, } export enum ComplianceStatusColumns { diff --git a/indexer/packages/postgres/src/types/db-model-types.ts b/indexer/packages/postgres/src/types/db-model-types.ts index f6e9a40c52d..ad0d1a802e0 100644 --- a/indexer/packages/postgres/src/types/db-model-types.ts +++ b/indexer/packages/postgres/src/types/db-model-types.ts @@ -7,15 +7,16 @@ import { FillType, Liquidity } from './fill-types'; import { OrderSide, OrderStatus, OrderType, TimeInForce, } from './order-types'; -import { PerpetualMarketStatus } from './perpetual-market-types'; +import { PerpetualMarketStatus, PerpetualMarketType } from './perpetual-market-types'; import { PerpetualPositionStatus } from './perpetual-position-types'; import { PositionSide } from './position-types'; import { TradingRewardAggregationPeriod } from './trading-reward-aggregation-types'; +import { VaultStatus } from './vault-types'; type IsoString = string; export interface IdBasedModelFromDatabase { - id: string; + id: string, } export interface SubaccountFromDatabase extends IdBasedModelFromDatabase { @@ -28,100 +29,105 @@ export interface SubaccountFromDatabase extends IdBasedModelFromDatabase { export interface WalletFromDatabase { address: string, totalTradingRewards: string, + totalVolume: string, } export interface PerpetualPositionFromDatabase extends IdBasedModelFromDatabase { - id: string; - subaccountId: string; - perpetualId: string; - side: PositionSide; - status: PerpetualPositionStatus; - size: string; // The size of the position. Positive for long, negative for short. - maxSize: string; - entryPrice: string; - exitPrice?: string; - sumOpen: string; - sumClose: string; - createdAt: IsoString; - closedAt?: IsoString; - createdAtHeight: string; - closedAtHeight?: string; - openEventId: Buffer; - closeEventId?: Buffer; - lastEventId: Buffer; - settledFunding: string; + id: string, + subaccountId: string, + perpetualId: string, + side: PositionSide, + status: PerpetualPositionStatus, + size: string, // The size of the position. Positive for long, negative for short. + maxSize: string, + entryPrice: string, + exitPrice?: string, + sumOpen: string, + sumClose: string, + createdAt: IsoString, + closedAt?: IsoString, + createdAtHeight: string, + closedAtHeight?: string, + openEventId: Buffer, + closeEventId?: Buffer, + lastEventId: Buffer, + settledFunding: string, } export interface OrderFromDatabase extends IdBasedModelFromDatabase { - subaccountId: string; - clientId: string; - clobPairId: string; - side: OrderSide; - size: string; - totalFilled: string; - price: string; - type: OrderType; - status: OrderStatus; - timeInForce: TimeInForce; - reduceOnly: boolean; - orderFlags: string; - updatedAt: IsoString; - updatedAtHeight: string; - goodTilBlock?: string; - goodTilBlockTime?: string; + subaccountId: string, + clientId: string, + clobPairId: string, + side: OrderSide, + size: string, + totalFilled: string, + price: string, + type: OrderType, + status: OrderStatus, + timeInForce: TimeInForce, + reduceOnly: boolean, + orderFlags: string, + updatedAt: IsoString, + updatedAtHeight: string, + goodTilBlock?: string, + goodTilBlockTime?: string, // createdAtHeight is optional because short term orders do not have a createdAtHeight. - createdAtHeight?: string; - clientMetadata: string; - triggerPrice?: string; + createdAtHeight?: string, + clientMetadata: string, + triggerPrice?: string, } export interface PerpetualMarketFromDatabase { - id: string; - clobPairId: string; - ticker: string; - marketId: number; - status: PerpetualMarketStatus; - priceChange24H: string; - volume24H: string; - trades24H: number; - nextFundingRate: string; - openInterest: string; - quantumConversionExponent: number; - atomicResolution: number; - subticksPerTick: number; - stepBaseQuantums: number; - liquidityTierId: number; + id: string, + clobPairId: string, + ticker: string, + marketId: number, + status: PerpetualMarketStatus, + priceChange24H: string, + volume24H: string, + trades24H: number, + nextFundingRate: string, + openInterest: string, + quantumConversionExponent: number, + atomicResolution: number, + subticksPerTick: number, + stepBaseQuantums: number, + liquidityTierId: number, + marketType: PerpetualMarketType, + baseOpenInterest: string, + defaultFundingRate1H?: string, } export interface FillFromDatabase { - id: string; - subaccountId: string; - side: OrderSide; - liquidity: Liquidity; - type: FillType; - clobPairId: string; - size: string; - price: string; - quoteAmount: string; - eventId: Buffer; - transactionHash: string; - createdAt: IsoString; - createdAtHeight: string; - orderId?: string; - clientMetadata?: string; - fee: string; + id: string, + subaccountId: string, + side: OrderSide, + liquidity: Liquidity, + type: FillType, + clobPairId: string, + size: string, + price: string, + quoteAmount: string, + eventId: Buffer, + transactionHash: string, + createdAt: IsoString, + createdAtHeight: string, + orderId?: string, + clientMetadata?: string, + fee: string, + affiliateRevShare: string, } export interface BlockFromDatabase { - blockHeight: string; - time: IsoString; + blockHeight: string, + time: IsoString, } export interface TendermintEventFromDatabase { - id: Buffer; - blockHeight: string; - transactionIndex: number; - eventIndex: number; + id: Buffer, + blockHeight: string, + transactionIndex: number, + eventIndex: number, } export interface TransactionFromDatabase extends IdBasedModelFromDatabase { @@ -132,129 +138,190 @@ export interface TransactionFromDatabase extends IdBasedModelFromDatabase { } export interface AssetFromDatabase { - id: string; - symbol: string; - atomicResolution: number; - hasMarket: boolean; - marketId?: number; + id: string, + symbol: string, + atomicResolution: number, + hasMarket: boolean, + marketId?: number, } export interface AssetPositionFromDatabase { - id: string; - assetId: string; - subaccountId: string; - size: string; - isLong: boolean; + id: string, + assetId: string, + subaccountId: string, + size: string, + isLong: boolean, } export interface TransferFromDatabase extends IdBasedModelFromDatabase { - senderSubaccountId?: string; - recipientSubaccountId?: string; - senderWalletAddress?: string; - recipientWalletAddress?: string; - assetId: string; - size: string; - eventId: Buffer; - transactionHash: string; - createdAt: IsoString; - createdAtHeight: string; + senderSubaccountId?: string, + recipientSubaccountId?: string, + senderWalletAddress?: string, + recipientWalletAddress?: string, + assetId: string, + size: string, + eventId: Buffer, + transactionHash: string, + createdAt: IsoString, + createdAtHeight: string, } export interface MarketFromDatabase { - id: number; - pair: string; - exponent: number; - minPriceChangePpm: number; - oraclePrice?: string; + id: number, + pair: string, + exponent: number, + minPriceChangePpm: number, + oraclePrice?: string, } export interface OraclePriceFromDatabase extends IdBasedModelFromDatabase { - marketId: number; - price: string; - effectiveAt: IsoString; - effectiveAtHeight: string; + marketId: number, + price: string, + effectiveAt: IsoString, + effectiveAtHeight: string, } export interface LiquidityTiersFromDatabase { - id: number; - name: string; - initialMarginPpm: string; - maintenanceFractionPpm: string; + id: number, + name: string, + initialMarginPpm: string, + maintenanceFractionPpm: string, + openInterestLowerCap?: string, + openInterestUpperCap?: string, } export interface CandleFromDatabase extends IdBasedModelFromDatabase { - startedAt: IsoString; - ticker: string; - resolution: CandleResolution; - low: string; - high: string; - open: string; - close: string; - baseTokenVolume: string; - usdVolume: string; - trades: number; - startingOpenInterest: string; + startedAt: IsoString, + ticker: string, + resolution: CandleResolution, + low: string, + high: string, + open: string, + close: string, + baseTokenVolume: string, + usdVolume: string, + trades: number, + startingOpenInterest: string, + orderbookMidPriceOpen?: string | null, + orderbookMidPriceClose?: string | null, } export interface PnlTicksFromDatabase extends IdBasedModelFromDatabase { - subaccountId: string; - equity: string; - totalPnl: string; - netTransfers: string; - createdAt: IsoString; - blockHeight: string; - blockTime: IsoString; + subaccountId: string, + equity: string, + totalPnl: string, + netTransfers: string, + createdAt: IsoString, + blockHeight: string, + blockTime: IsoString, } export interface FundingIndexUpdatesFromDatabase extends IdBasedModelFromDatabase { - perpetualId: string; - eventId: Buffer; - rate: string; - oraclePrice: string; - fundingIndex: string; - effectiveAt: string; - effectiveAtHeight: string; + perpetualId: string, + eventId: Buffer, + rate: string, + oraclePrice: string, + fundingIndex: string, + effectiveAt: string, + effectiveAtHeight: string, } export interface ComplianceDataFromDatabase { - address: string; - chain?: string; - blocked: boolean; - riskScore?: string; - updatedAt: string; + address: string, + chain?: string, + blocked: boolean, + riskScore?: string, + updatedAt: string, } export interface ComplianceStatusFromDatabase { - address: string; - status: ComplianceStatus; - reason?: ComplianceReason; - createdAt: IsoString; - updatedAt: IsoString; + address: string, + status: ComplianceStatus, + reason?: ComplianceReason, + createdAt: IsoString, + updatedAt: IsoString, } export interface TradingRewardFromDatabase { - id: string; - address: string; - blockTime: IsoString; - blockHeight: string; - amount: string; + id: string, + address: string, + blockTime: IsoString, + blockHeight: string, + amount: string, } export interface TradingRewardAggregationFromDatabase { - id: string; - address: string; - startedAt: IsoString; - startedAtHeight: string; - endedAt?: IsoString; - endedAtHeight?: string; - period: TradingRewardAggregationPeriod; - amount: string; + id: string, + address: string, + startedAt: IsoString, + startedAtHeight: string, + endedAt?: IsoString, + endedAtHeight?: string, + period: TradingRewardAggregationPeriod, + amount: string, +} + +export interface SubaccountUsernamesFromDatabase { + username: string, + subaccountId: string, +} + +export interface AddressUsername { + address: string, + username: string, +} + +export interface LeaderboardPnlFromDatabase { + address: string, + timeSpan: string, + pnl: string, + currentEquity: string, + rank: number, +} + +export interface PersistentCacheFromDatabase { + key: string, + value: string, +} + +export interface AffiliateInfoFromDatabase { + address: string, + affiliateEarnings: string, + referredMakerTrades: number, + referredTakerTrades: number, + totalReferredMakerFees: string, + totalReferredTakerFees: string, + totalReferredMakerRebates: string, + totalReferredUsers: number, + firstReferralBlockHeight: string, + referredTotalVolume: string, +} + +export interface AffiliateReferredUserFromDatabase { + affiliateAddress: string, + refereeAddress: string, + referredAtBlock: string, +} + +export interface FirebaseNotificationTokenFromDatabase { + address: string, + token: string, + updatedAt: IsoString, + language: string, +} + +export interface VaultFromDatabase { + address: string, + clobPairId: string, + status: VaultStatus, + createdAt: IsoString, + updatedAt: IsoString, } export type SubaccountAssetNetTransferMap = { [subaccountId: string]: -{ [assetId: string]: string } }; +{ [assetId: string]: string }, }; export type SubaccountToPerpetualPositionsMap = { [subaccountId: string]: -{ [perpetualId: string]: PerpetualPositionFromDatabase } }; +{ [perpetualId: string]: PerpetualPositionFromDatabase }, }; export type PerpetualPositionsMap = { [perpetualMarketId: string]: PerpetualPositionFromDatabase }; export type PerpetualMarketsMap = { [perpetualMarketId: string]: PerpetualMarketFromDatabase }; export type AssetsMap = { [assetId: string]: AssetFromDatabase }; diff --git a/indexer/packages/postgres/src/types/fill-types.ts b/indexer/packages/postgres/src/types/fill-types.ts index 9055c8e09d0..38c01cd0ff9 100644 --- a/indexer/packages/postgres/src/types/fill-types.ts +++ b/indexer/packages/postgres/src/types/fill-types.ts @@ -30,32 +30,33 @@ export enum FillType { } export interface FillCreateObject { - subaccountId: string; - side: OrderSide; - liquidity: Liquidity; - type: FillType; - clobPairId: string; - orderId?: string; - size: string; - price: string; - quoteAmount: string; - eventId: Buffer; - transactionHash: string; - createdAt: string; - createdAtHeight: string; - clientMetadata?: string; - fee: string; + subaccountId: string, + side: OrderSide, + liquidity: Liquidity, + type: FillType, + clobPairId: string, + orderId?: string, + size: string, + price: string, + quoteAmount: string, + eventId: Buffer, + transactionHash: string, + createdAt: string, + createdAtHeight: string, + clientMetadata?: string, + fee: string, + affiliateRevShare: string, } export interface FillUpdateObject { - id: string; - side?: OrderSide; - type?: FillType; - clobPairId?: string; - orderId?: string | null; - size?: string; - price?: string; - quoteAmount?: string; + id: string, + side?: OrderSide, + type?: FillType, + clobPairId?: string, + orderId?: string | null, + size?: string, + price?: string, + quoteAmount?: string, } export enum FillColumns { @@ -75,30 +76,31 @@ export enum FillColumns { createdAtHeight = 'createdAtHeight', clientMetadata = 'clientMetadata', fee = 'fee', + affiliateRevShare = 'affiliateRevShare', } export type CostOfFills = { - cost: number; + cost: number, }; export interface OrderedFillsWithFundingIndices { - id: string; - subaccountId: string; - side: OrderSide; - size: string; - createdAtHeight: string; - fundingIndex: string; - lastFillId: string; - lastFillSide: OrderSide; - lastFillSize: string; - lastFillCreatedAtHeight: string; - lastFillFundingIndex: string; + id: string, + subaccountId: string, + side: OrderSide, + size: string, + createdAtHeight: string, + fundingIndex: string, + lastFillId: string, + lastFillSide: OrderSide, + lastFillSize: string, + lastFillCreatedAtHeight: string, + lastFillFundingIndex: string, } export interface OpenSizeWithFundingIndex { - clobPairId: string; - openSize: string; - lastFillHeight: string; - fundingIndex: string; - fundingIndexHeight: string; + clobPairId: string, + openSize: string, + lastFillHeight: string, + fundingIndex: string, + fundingIndexHeight: string, } diff --git a/indexer/packages/postgres/src/types/firebase-notification-token-types.ts b/indexer/packages/postgres/src/types/firebase-notification-token-types.ts new file mode 100644 index 00000000000..c25f52fc743 --- /dev/null +++ b/indexer/packages/postgres/src/types/firebase-notification-token-types.ts @@ -0,0 +1,24 @@ +/* ------- TOKEN TYPES ------- */ + +type IsoString = string; + +export interface FirebaseNotificationTokenCreateObject { + token: string, + address: string, + language: string, + updatedAt: IsoString, +} + +export interface FirebaseNotificationTokenUpdateObject { + token?: string, + address?: string, + language?: string, + updatedAt?: IsoString, +} + +export enum FirebaseNotificationTokenColumns { + token = 'token', + address = 'address', + language = 'language', + updatedAt = 'updatedAt', +} diff --git a/indexer/packages/postgres/src/types/index.ts b/indexer/packages/postgres/src/types/index.ts index 455c67d7ff7..16c2c1ed9fb 100644 --- a/indexer/packages/postgres/src/types/index.ts +++ b/indexer/packages/postgres/src/types/index.ts @@ -25,4 +25,12 @@ export * from './compliance-data-types'; export * from './compliance-status-types'; export * from './trading-reward-types'; export * from './trading-reward-aggregation-types'; +export * from './pagination-types'; +export * from './subaccount-usernames-types'; +export * from './leaderboard-pnl-types'; +export * from './affiliate-referred-users-types'; +export * from './persistent-cache-types'; +export * from './affiliate-info-types'; +export * from './firebase-notification-token-types'; +export * from './vault-types'; export { PositionSide } from './position-types'; diff --git a/indexer/packages/postgres/src/types/leaderboard-pnl-types.ts b/indexer/packages/postgres/src/types/leaderboard-pnl-types.ts new file mode 100644 index 00000000000..4488bf826c5 --- /dev/null +++ b/indexer/packages/postgres/src/types/leaderboard-pnl-types.ts @@ -0,0 +1,25 @@ +/* ------- LEADERBOARD PNL TYPES ------- */ + +export interface LeaderboardPnlCreateObject { + address: string, + pnl: string, + timeSpan: string, + currentEquity: string, + rank: number, +} + +export enum LeaderboardPnlColumns { + address = 'address', + timeSpan = 'timeSpan', + pnl = 'pnl', + currentEquity = 'currentEquity', + rank = 'rank', +} + +export enum LeaderboardPnlTimeSpan { + ONE_DAY = 'ONE_DAY', + SEVEN_DAYS = 'SEVEN_DAYS', + THIRTY_DAYS = 'THIRTY_DAYS', + ONE_YEAR = 'ONE_YEAR', + ALL_TIME = 'ALL_TIME', +} diff --git a/indexer/packages/postgres/src/types/liquidity-tiers-types.ts b/indexer/packages/postgres/src/types/liquidity-tiers-types.ts index 784dd5f7ff6..da0317e9ac0 100644 --- a/indexer/packages/postgres/src/types/liquidity-tiers-types.ts +++ b/indexer/packages/postgres/src/types/liquidity-tiers-types.ts @@ -5,6 +5,8 @@ export interface LiquidityTiersCreateObject { name: string, initialMarginPpm: string, maintenanceFractionPpm: string, + openInterestLowerCap?: string, + openInterestUpperCap?: string, } export interface LiquidityTiersUpdateObject { @@ -12,6 +14,8 @@ export interface LiquidityTiersUpdateObject { name?: string, initialMarginPpm?: string, maintenanceFractionPpm?: string, + openInterestLowerCap?: string, + openInterestUpperCap?: string, } export enum LiquidityTiersColumns { @@ -19,4 +23,6 @@ export enum LiquidityTiersColumns { name = 'name', initialMarginPpm = 'initialMarginPpm', maintenanceFractionPpm = 'maintenanceFractionPpm', + openInterestLowerCap = 'openInterestLowerCap', + openInterestUpperCap = 'openInterestUpperCap', } diff --git a/indexer/packages/postgres/src/types/market-types.ts b/indexer/packages/postgres/src/types/market-types.ts index 39bf16476f7..3a29bd7134c 100644 --- a/indexer/packages/postgres/src/types/market-types.ts +++ b/indexer/packages/postgres/src/types/market-types.ts @@ -12,7 +12,7 @@ export interface MarketUpdateObject { id: number, pair?: string, minPriceChangePpm?: number, - oraclePrice?: string; + oraclePrice?: string, } export enum MarketColumns { diff --git a/indexer/packages/postgres/src/types/order-types.ts b/indexer/packages/postgres/src/types/order-types.ts index 373ea210140..dfee69d60c7 100644 --- a/indexer/packages/postgres/src/types/order-types.ts +++ b/indexer/packages/postgres/src/types/order-types.ts @@ -23,9 +23,6 @@ export enum OrderType { TRAILING_STOP = 'TRAILING_STOP', TAKE_PROFIT = 'TAKE_PROFIT', TAKE_PROFIT_MARKET = 'TAKE_PROFIT_MARKET', - HARD_TRADE = 'HARD_TRADE', - FAILED_HARD_TRADE = 'FAILED_HARD_TRADE', - TRANSFER_PLACEHOLDER = 'TRANSFER_PLACEHOLDER', } export enum TimeInForce { @@ -46,46 +43,46 @@ export enum TimeInForce { } export interface OrderCreateObject { - subaccountId: string; - clientId: string; - clobPairId: string; - side: OrderSide; - size: string; - totalFilled: string; - price: string; - type: OrderType; - status: OrderStatus; - timeInForce: TimeInForce; - reduceOnly: boolean; - orderFlags: string; - updatedAt: IsoString; - updatedAtHeight: string; - goodTilBlock?: string; - goodTilBlockTime?: string; + subaccountId: string, + clientId: string, + clobPairId: string, + side: OrderSide, + size: string, + totalFilled: string, + price: string, + type: OrderType, + status: OrderStatus, + timeInForce: TimeInForce, + reduceOnly: boolean, + orderFlags: string, + updatedAt: IsoString, + updatedAtHeight: string, + goodTilBlock?: string, + goodTilBlockTime?: string, // createdAtHeight is optional because short term orders do not have a createdAtHeight. - createdAtHeight?: string; - clientMetadata: string; + createdAtHeight?: string, + clientMetadata: string, triggerPrice?: string, } export interface OrderUpdateObject { - id: string; - clobPairId?: string; - side?: OrderSide; - size?: string; - totalFilled?: string; - price?: string; - type?: OrderType; - status?: OrderStatus; - timeInForce?: TimeInForce; - reduceOnly?: boolean; - orderFlags?: string; - updatedAt?: IsoString; - updatedAtHeight?: string; - goodTilBlock?: string | null; - goodTilBlockTime?: string | null; - clientMetadata?: string; - triggerPrice?: string; + id: string, + clobPairId?: string, + side?: OrderSide, + size?: string, + totalFilled?: string, + price?: string, + type?: OrderType, + status?: OrderStatus, + timeInForce?: TimeInForce, + reduceOnly?: boolean, + orderFlags?: string, + updatedAt?: IsoString, + updatedAtHeight?: string, + goodTilBlock?: string | null, + goodTilBlockTime?: string | null, + clientMetadata?: string, + triggerPrice?: string, } export enum OrderColumns { diff --git a/indexer/packages/postgres/src/types/pagination-types.ts b/indexer/packages/postgres/src/types/pagination-types.ts new file mode 100644 index 00000000000..04454616782 --- /dev/null +++ b/indexer/packages/postgres/src/types/pagination-types.ts @@ -0,0 +1,6 @@ +export interface PaginationFromDatabase { + results: T[], + total?: number, + offset?: number, + limit?: number, +} diff --git a/indexer/packages/postgres/src/types/perpetual-market-types.ts b/indexer/packages/postgres/src/types/perpetual-market-types.ts index 13318b3c685..9f75a625d09 100644 --- a/indexer/packages/postgres/src/types/perpetual-market-types.ts +++ b/indexer/packages/postgres/src/types/perpetual-market-types.ts @@ -1,39 +1,42 @@ /* ------- PERPETUAL MARKET TYPES ------- */ export interface PerpetualMarketCreateObject { - id: string; - clobPairId: string; - ticker: string; - marketId: number; - status: PerpetualMarketStatus; - priceChange24H: string; - volume24H: string; - trades24H: number; - nextFundingRate: string; - openInterest: string; - quantumConversionExponent: number; - atomicResolution: number; - subticksPerTick: number; - stepBaseQuantums: number; - liquidityTierId: number; + id: string, + clobPairId: string, + ticker: string, + marketId: number, + status: PerpetualMarketStatus, + priceChange24H: string, + volume24H: string, + trades24H: number, + nextFundingRate: string, + openInterest: string, + quantumConversionExponent: number, + atomicResolution: number, + subticksPerTick: number, + stepBaseQuantums: number, + liquidityTierId: number, + marketType: PerpetualMarketType, + baseOpenInterest: string, + defaultFundingRate1H: string, } export interface PerpetualMarketUpdateObject { - id?: string; - clobPairId?: string; - ticker?: string; - marketId?: number; - status?: PerpetualMarketStatus; - priceChange24H?: string; - volume24H?: string; - trades24H?: number; - nextFundingRate?: string; - openInterest?: string; - quantumConversionExponent?: number; - atomicResolution?: number; - subticksPerTick?: number; - stepBaseQuantums?: number; - liquidityTierId?: number; + id?: string, + clobPairId?: string, + ticker?: string, + marketId?: number, + status?: PerpetualMarketStatus, + priceChange24H?: string, + volume24H?: string, + trades24H?: number, + nextFundingRate?: string, + openInterest?: string, + quantumConversionExponent?: number, + atomicResolution?: number, + subticksPerTick?: number, + stepBaseQuantums?: number, + liquidityTierId?: number, } export enum PerpetualMarketColumns { @@ -52,6 +55,7 @@ export enum PerpetualMarketColumns { subticksPerTick = 'subticksPerTick', stepBaseQuantums = 'stepBaseQuantums', liquidityTierId = 'liquidityTierId', + defaultFundingRate1H = 'defaultFundingRate1H', } export enum PerpetualMarketStatus { @@ -62,3 +66,8 @@ export enum PerpetualMarketStatus { INITIALIZING = 'INITIALIZING', FINAL_SETTLEMENT = 'FINAL_SETTLEMENT', } + +export enum PerpetualMarketType { + CROSS = 'CROSS', + ISOLATED = 'ISOLATED', +} diff --git a/indexer/packages/postgres/src/types/perpetual-position-types.ts b/indexer/packages/postgres/src/types/perpetual-position-types.ts index 43369f7d023..b0e2a9b22e0 100644 --- a/indexer/packages/postgres/src/types/perpetual-position-types.ts +++ b/indexer/packages/postgres/src/types/perpetual-position-types.ts @@ -16,43 +16,43 @@ export enum PerpetualPositionStatus { } export interface PerpetualPositionCreateObject { - subaccountId: string; - perpetualId: string; - side: PositionSide; - status: PerpetualPositionStatus; - size: string; - maxSize: string; - sumOpen?: string; - sumClose?: string; - entryPrice?: string; - createdAt: IsoString; - createdAtHeight: string; - openEventId: Buffer; - lastEventId: Buffer; - settledFunding: string; - closedAt?: IsoString; - closedAtHeight?: string; - closeEventId?: Buffer; - exitPrice?: string; + subaccountId: string, + perpetualId: string, + side: PositionSide, + status: PerpetualPositionStatus, + size: string, + maxSize: string, + sumOpen?: string, + sumClose?: string, + entryPrice?: string, + createdAt: IsoString, + createdAtHeight: string, + openEventId: Buffer, + lastEventId: Buffer, + settledFunding: string, + closedAt?: IsoString, + closedAtHeight?: string, + closeEventId?: Buffer, + exitPrice?: string, } export interface PerpetualPositionUpdateObject { - id: string; - side?: PositionSide; - status?: PerpetualPositionStatus; - size?: string; - maxSize?: string; - entryPrice?: string; - exitPrice?: string | null; - sumOpen?: string; - sumClose?: string; - createdAt?: IsoString; - closedAt?: IsoString | null; - createdAtHeight?: string; - closedAtHeight?: string | null; - closeEventId?: Buffer | null; - lastEventId?: Buffer; - settledFunding?: string; + id: string, + side?: PositionSide, + status?: PerpetualPositionStatus, + size?: string, + maxSize?: string, + entryPrice?: string, + exitPrice?: string | null, + sumOpen?: string, + sumClose?: string, + createdAt?: IsoString, + closedAt?: IsoString | null, + createdAtHeight?: string, + closedAtHeight?: string | null, + closeEventId?: Buffer | null, + lastEventId?: Buffer, + settledFunding?: string, } // Object used to update a subaccount's perpetual position in the SubaccountUpdateHandler @@ -60,9 +60,9 @@ export interface PerpetualPositionSubaccountUpdateObject { id: string, closedAt?: IsoString | null, closedAtHeight?: string | null, - closeEventId?: Buffer | null; - lastEventId: Buffer; - settledFunding: string; + closeEventId?: Buffer | null, + lastEventId: Buffer, + settledFunding: string, status: PerpetualPositionStatus, size: string, } @@ -76,21 +76,21 @@ This is all of the fields in PerpetualPositionFromDatabase with the exception of closedAt, closedAtHeight, and closeEventId are nullable. */ export interface UpdatedPerpetualPositionSubaccountKafkaObject { - id: string; - perpetualId: string; - side: PositionSide; - status: PerpetualPositionStatus; - size: string; - maxSize: string; - entryPrice: string; - exitPrice?: string; - sumOpen: string; - sumClose: string; - closedAt?: IsoString | null; - closedAtHeight?: string | null; - lastEventId: Buffer; - closeEventId?: Buffer | null; - settledFunding: string; + id: string, + perpetualId: string, + side: PositionSide, + status: PerpetualPositionStatus, + size: string, + maxSize: string, + entryPrice: string, + exitPrice?: string, + sumOpen: string, + sumClose: string, + closedAt?: IsoString | null, + closedAtHeight?: string | null, + lastEventId: Buffer, + closeEventId?: Buffer | null, + settledFunding: string, realizedPnl?: string, unrealizedPnl?: string, } diff --git a/indexer/packages/postgres/src/types/persistent-cache-types.ts b/indexer/packages/postgres/src/types/persistent-cache-types.ts new file mode 100644 index 00000000000..0ae79ae7e1c --- /dev/null +++ b/indexer/packages/postgres/src/types/persistent-cache-types.ts @@ -0,0 +1,14 @@ +export interface PersistentCacheCreateObject { + key: string, + value: string, +} + +export enum PersistentCacheColumns { + key = 'key', + value = 'value', +} + +export enum PersistentCacheKeys { + TOTAL_VOLUME_UPDATE_TIME = 'totalVolumeUpdateTime', + AFFILIATE_INFO_UPDATE_TIME = 'affiliateInfoUpdateTime', +} diff --git a/indexer/packages/postgres/src/types/pnl-ticks-types.ts b/indexer/packages/postgres/src/types/pnl-ticks-types.ts index 6b04c655651..87c206b890b 100644 --- a/indexer/packages/postgres/src/types/pnl-ticks-types.ts +++ b/indexer/packages/postgres/src/types/pnl-ticks-types.ts @@ -9,7 +9,7 @@ export interface PnlTicksCreateObject { netTransfers: string, createdAt: string, blockHeight: string, - blockTime: IsoString; + blockTime: IsoString, } export enum PnlTicksColumns { @@ -22,3 +22,8 @@ export enum PnlTicksColumns { blockHeight = 'blockHeight', blockTime = 'blockTime', } + +export enum PnlTickInterval { + hour = 'hour', + day = 'day', +} diff --git a/indexer/packages/postgres/src/types/query-types.ts b/indexer/packages/postgres/src/types/query-types.ts index b4e25410590..3b5cd025e2d 100644 --- a/indexer/packages/postgres/src/types/query-types.ts +++ b/indexer/packages/postgres/src/types/query-types.ts @@ -10,6 +10,7 @@ import { IsoString } from './utility-types'; export enum QueryableField { LIMIT = 'limit', + PAGE = 'page', ID = 'id', ADDRESS = 'address', SUBACCOUNT_NUMBER = 'subaccountNumber', @@ -84,224 +85,270 @@ export enum QueryableField { STARTED_AT_BEFORE_OR_AT = 'startedAtBeforeOrAt', STARTED_AT_HEIGHT_BEFORE_OR_AT = 'startedAtHeightBeforeOrAt', REASON = 'reason', + USERNAME = 'username', + TIMESPAN = 'timeSpan', + RANK = 'rank', + AFFILIATE_ADDRESS = 'affiliateAddress', + REFEREE_ADDRESS = 'refereeAddress', + KEY = 'key', + TOKEN = 'token', + ADDRESS_IN_WALLETS_TABLE = 'addressInWalletsTable', } export interface QueryConfig { - [QueryableField.LIMIT]?: number; + [QueryableField.LIMIT]?: number, + [QueryableField.PAGE]?: number, } export interface SubaccountQueryConfig extends QueryConfig { - [QueryableField.ID]?: string[]; - [QueryableField.ADDRESS]?: string; - [QueryableField.SUBACCOUNT_NUMBER]?: number; - [QueryableField.UPDATED_BEFORE_OR_AT]?: string; - [QueryableField.UPDATED_ON_OR_AFTER]?: string; + [QueryableField.ID]?: string[], + [QueryableField.ADDRESS]?: string, + [QueryableField.SUBACCOUNT_NUMBER]?: number, + [QueryableField.UPDATED_BEFORE_OR_AT]?: string, + [QueryableField.UPDATED_ON_OR_AFTER]?: string, +} + +export interface SubaccountUsernamesQueryConfig extends QueryConfig { + [QueryableField.USERNAME]?: string[], + [QueryableField.SUBACCOUNT_ID]?: string[], } export interface WalletQueryConfig extends QueryConfig { - [QueryableField.ADDRESS]?: string; + [QueryableField.ADDRESS]?: string, } export interface PerpetualPositionQueryConfig extends QueryConfig { - [QueryableField.ID]?: string[]; - [QueryableField.SUBACCOUNT_ID]?: string[]; - [QueryableField.PERPETUAL_ID]?: string[]; - [QueryableField.SIDE]?: PositionSide; - [QueryableField.STATUS]?: PerpetualPositionStatus[]; - [QueryableField.CREATED_BEFORE_OR_AT_HEIGHT]?: string; - [QueryableField.CREATED_BEFORE_OR_AT]?: string; + [QueryableField.ID]?: string[], + [QueryableField.SUBACCOUNT_ID]?: string[], + [QueryableField.PERPETUAL_ID]?: string[], + [QueryableField.SIDE]?: PositionSide, + [QueryableField.STATUS]?: PerpetualPositionStatus[], + [QueryableField.CREATED_BEFORE_OR_AT_HEIGHT]?: string, + [QueryableField.CREATED_BEFORE_OR_AT]?: string, } export interface OrderQueryConfig extends QueryConfig { - [QueryableField.ID]?: string[]; - [QueryableField.SUBACCOUNT_ID]?: string[]; - [QueryableField.CLIENT_ID]?: string; - [QueryableField.CLOB_PAIR_ID]?: string; - [QueryableField.SIDE]?: OrderSide; - [QueryableField.SIZE]?: string; - [QueryableField.TOTAL_FILLED]?: string; - [QueryableField.PRICE]?: string; - [QueryableField.TYPE]?: OrderType; - [QueryableField.STATUSES]?: OrderStatus[]; - [QueryableField.POST_ONLY]?: boolean; - [QueryableField.REDUCE_ONLY]?: boolean; - [QueryableField.GOOD_TIL_BLOCK_BEFORE_OR_AT]?: string; - [QueryableField.GOOD_TIL_BLOCK_TIME_BEFORE_OR_AT]?: string; - [QueryableField.ORDER_FLAGS]?: string; - [QueryableField.CLIENT_METADATA]?: string; - [QueryableField.TRIGGER_PRICE]?: string; + [QueryableField.ID]?: string[], + [QueryableField.SUBACCOUNT_ID]?: string[], + [QueryableField.CLIENT_ID]?: string, + [QueryableField.CLOB_PAIR_ID]?: string, + [QueryableField.SIDE]?: OrderSide, + [QueryableField.SIZE]?: string, + [QueryableField.TOTAL_FILLED]?: string, + [QueryableField.PRICE]?: string, + [QueryableField.TYPE]?: OrderType, + [QueryableField.STATUSES]?: OrderStatus[], + [QueryableField.POST_ONLY]?: boolean, + [QueryableField.REDUCE_ONLY]?: boolean, + [QueryableField.GOOD_TIL_BLOCK_BEFORE_OR_AT]?: string, + [QueryableField.GOOD_TIL_BLOCK_TIME_BEFORE_OR_AT]?: string, + [QueryableField.ORDER_FLAGS]?: string, + [QueryableField.CLIENT_METADATA]?: string, + [QueryableField.TRIGGER_PRICE]?: string, } export interface PerpetualMarketQueryConfig extends QueryConfig { - [QueryableField.ID]?: string[]; - [QueryableField.MARKET_ID]?: number[]; - [QueryableField.LIQUIDITY_TIER_ID]?: number[]; + [QueryableField.ID]?: string[], + [QueryableField.MARKET_ID]?: number[], + [QueryableField.LIQUIDITY_TIER_ID]?: number[], } export interface FillQueryConfig extends QueryConfig { - [QueryableField.ID]?: string[]; - [QueryableField.SUBACCOUNT_ID]?: string[]; - [QueryableField.SIDE]?: OrderSide; - [QueryableField.LIQUIDITY]?: Liquidity; - [QueryableField.TYPE]?: OrderType; - [QueryableField.CLOB_PAIR_ID]?: string; - [QueryableField.EVENT_ID]?: Buffer; - [QueryableField.TRANSACTION_HASH]?: string; - [QueryableField.CREATED_BEFORE_OR_AT_HEIGHT]?: string; - [QueryableField.CREATED_BEFORE_OR_AT]?: string; - [QueryableField.CREATED_ON_OR_AFTER_HEIGHT]?: string; - [QueryableField.CREATED_ON_OR_AFTER]?: string; - [QueryableField.CLIENT_METADATA]?: string; - [QueryableField.FEE]?: string; + [QueryableField.ID]?: string[], + [QueryableField.SUBACCOUNT_ID]?: string[], + [QueryableField.SIDE]?: OrderSide, + [QueryableField.LIQUIDITY]?: Liquidity, + [QueryableField.TYPE]?: OrderType, + [QueryableField.CLOB_PAIR_ID]?: string, + [QueryableField.EVENT_ID]?: Buffer, + [QueryableField.TRANSACTION_HASH]?: string, + [QueryableField.CREATED_BEFORE_OR_AT_HEIGHT]?: string, + [QueryableField.CREATED_BEFORE_OR_AT]?: string, + [QueryableField.CREATED_ON_OR_AFTER_HEIGHT]?: string, + [QueryableField.CREATED_ON_OR_AFTER]?: string, + [QueryableField.CLIENT_METADATA]?: string, + [QueryableField.FEE]?: string, } export interface BlockQueryConfig extends QueryConfig { - [QueryableField.BLOCK_HEIGHT]?: string[]; - [QueryableField.CREATED_ON_OR_AFTER]?: string; + [QueryableField.BLOCK_HEIGHT]?: string[], + [QueryableField.CREATED_ON_OR_AFTER]?: string, } export interface TendermintEventQueryConfig extends QueryConfig { - [QueryableField.ID]?: Buffer[]; - [QueryableField.BLOCK_HEIGHT]?: string[]; - [QueryableField.TRANSACTION_INDEX]?: number[]; - [QueryableField.EVENT_INDEX]?: number[]; + [QueryableField.ID]?: Buffer[], + [QueryableField.BLOCK_HEIGHT]?: string[], + [QueryableField.TRANSACTION_INDEX]?: number[], + [QueryableField.EVENT_INDEX]?: number[], } export interface TransactionQueryConfig extends QueryConfig { - [QueryableField.ID]?: string[]; - [QueryableField.BLOCK_HEIGHT]?: string[]; - [QueryableField.TRANSACTION_INDEX]?: number[]; - [QueryableField.TRANSACTION_HASH]?: string[]; + [QueryableField.ID]?: string[], + [QueryableField.BLOCK_HEIGHT]?: string[], + [QueryableField.TRANSACTION_INDEX]?: number[], + [QueryableField.TRANSACTION_HASH]?: string[], } export interface AssetQueryConfig extends QueryConfig { - [QueryableField.ID]?: string[]; - [QueryableField.SYMBOL]?: string; - [QueryableField.ATOMIC_RESOLUTION]?: number; - [QueryableField.HAS_MARKET]?: boolean; - [QueryableField.MARKET_ID]?: number; + [QueryableField.ID]?: string[], + [QueryableField.SYMBOL]?: string, + [QueryableField.ATOMIC_RESOLUTION]?: number, + [QueryableField.HAS_MARKET]?: boolean, + [QueryableField.MARKET_ID]?: number, } export interface AssetPositionQueryConfig extends QueryConfig { - [QueryableField.ID]?: string[]; - [QueryableField.ASSET_ID]?: string[]; - [QueryableField.SUBACCOUNT_ID]?: string[]; - [QueryableField.SIZE]?: string; - [QueryableField.IS_LONG]?: boolean; + [QueryableField.ID]?: string[], + [QueryableField.ASSET_ID]?: string[], + [QueryableField.SUBACCOUNT_ID]?: string[], + [QueryableField.SIZE]?: string, + [QueryableField.IS_LONG]?: boolean, } export interface TransferQueryConfig extends QueryConfig { - [QueryableField.ID]?: string[]; - [QueryableField.SENDER_SUBACCOUNT_ID]?: string[]; - [QueryableField.RECIPIENT_SUBACCOUNT_ID]?: string[]; - [QueryableField.SENDER_WALLET_ADDRESS]?: string[]; - [QueryableField.RECIPIENT_WALLET_ADDRESS]?: string[]; - [QueryableField.ASSET_ID]?: string[]; - [QueryableField.SIZE]?: string; - [QueryableField.EVENT_ID]?: Buffer[]; - [QueryableField.TRANSACTION_HASH]?: string[]; - [QueryableField.CREATED_AT]?: string; - [QueryableField.CREATED_AT_HEIGHT]?: string[]; - [QueryableField.CREATED_BEFORE_OR_AT_HEIGHT]?: string; - [QueryableField.CREATED_BEFORE_OR_AT]?: string; - [QueryableField.CREATED_AFTER]?: string; - [QueryableField.CREATED_AFTER_HEIGHT]?: string; + [QueryableField.ID]?: string[], + [QueryableField.SENDER_SUBACCOUNT_ID]?: string[], + [QueryableField.RECIPIENT_SUBACCOUNT_ID]?: string[], + [QueryableField.SENDER_WALLET_ADDRESS]?: string[], + [QueryableField.RECIPIENT_WALLET_ADDRESS]?: string[], + [QueryableField.ASSET_ID]?: string[], + [QueryableField.SIZE]?: string, + [QueryableField.EVENT_ID]?: Buffer[], + [QueryableField.TRANSACTION_HASH]?: string[], + [QueryableField.CREATED_AT]?: string, + [QueryableField.CREATED_AT_HEIGHT]?: string[], + [QueryableField.CREATED_BEFORE_OR_AT_HEIGHT]?: string, + [QueryableField.CREATED_BEFORE_OR_AT]?: string, + [QueryableField.CREATED_AFTER]?: string, + [QueryableField.CREATED_AFTER_HEIGHT]?: string, } export interface ToAndFromSubaccountTransferQueryConfig extends QueryConfig { - [QueryableField.ID]?: string[]; - [QueryableField.SUBACCOUNT_ID]?: string[]; - [QueryableField.ASSET_ID]?: string[]; - [QueryableField.SIZE]?: string; - [QueryableField.EVENT_ID]?: Buffer[]; - [QueryableField.TRANSACTION_HASH]?: string[]; - [QueryableField.CREATED_AT]?: string; - [QueryableField.CREATED_AT_HEIGHT]?: string[]; - [QueryableField.CREATED_BEFORE_OR_AT_HEIGHT]?: string | undefined; - [QueryableField.CREATED_BEFORE_OR_AT]?: string | undefined; - [QueryableField.CREATED_AFTER_HEIGHT]?: string | undefined; - [QueryableField.CREATED_AFTER]?: string | undefined; + [QueryableField.ID]?: string[], + [QueryableField.SUBACCOUNT_ID]?: string[], + [QueryableField.ASSET_ID]?: string[], + [QueryableField.SIZE]?: string, + [QueryableField.EVENT_ID]?: Buffer[], + [QueryableField.TRANSACTION_HASH]?: string[], + [QueryableField.CREATED_AT]?: string, + [QueryableField.CREATED_AT_HEIGHT]?: string[], + [QueryableField.CREATED_BEFORE_OR_AT_HEIGHT]?: string | undefined, + [QueryableField.CREATED_BEFORE_OR_AT]?: string | undefined, + [QueryableField.CREATED_AFTER_HEIGHT]?: string | undefined, + [QueryableField.CREATED_AFTER]?: string | undefined, } export interface OraclePriceQueryConfig extends QueryConfig { - [QueryableField.ID]?: string[]; - [QueryableField.MARKET_ID]?: number[]; - [QueryableField.PRICE]?: string[]; - [QueryableField.EFFECTIVE_AT]?: string; - [QueryableField.EFFECTIVE_AT_HEIGHT]?: string; - [QueryableField.EFFECTIVE_BEFORE_OR_AT]?: string; - [QueryableField.EFFECTIVE_BEFORE_OR_AT_HEIGHT]?: string; + [QueryableField.ID]?: string[], + [QueryableField.MARKET_ID]?: number[], + [QueryableField.PRICE]?: string[], + [QueryableField.EFFECTIVE_AT]?: string, + [QueryableField.EFFECTIVE_AT_HEIGHT]?: string, + [QueryableField.EFFECTIVE_BEFORE_OR_AT]?: string, + [QueryableField.EFFECTIVE_BEFORE_OR_AT_HEIGHT]?: string, } export interface MarketQueryConfig extends QueryConfig { - [QueryableField.ID]?: number[]; - [QueryableField.PAIR]?: string[]; + [QueryableField.ID]?: number[], + [QueryableField.PAIR]?: string[], } export interface CandleQueryConfig extends QueryConfig { - [QueryableField.ID]?: number[]; - [QueryableField.TICKER]?: string[]; - [QueryableField.RESOLUTION]?: CandleResolution; - [QueryableField.FROM_ISO]?: IsoString; - [QueryableField.TO_ISO]?: IsoString; + [QueryableField.ID]?: number[], + [QueryableField.TICKER]?: string[], + [QueryableField.RESOLUTION]?: CandleResolution, + [QueryableField.FROM_ISO]?: IsoString, + [QueryableField.TO_ISO]?: IsoString, } export interface PnlTicksQueryConfig extends QueryConfig { - [QueryableField.ID]?: string[]; - [QueryableField.SUBACCOUNT_ID]?: string[]; - [QueryableField.CREATED_AT]?: string; - [QueryableField.BLOCK_HEIGHT]?: string; - [QueryableField.BLOCK_TIME]?: string; - [QueryableField.CREATED_BEFORE_OR_AT]?: string; - [QueryableField.CREATED_BEFORE_OR_AT_BLOCK_HEIGHT]?: string; - [QueryableField.CREATED_ON_OR_AFTER]?: string; - [QueryableField.CREATED_ON_OR_AFTER_BLOCK_HEIGHT]?: string; + [QueryableField.ID]?: string[], + [QueryableField.SUBACCOUNT_ID]?: string[], + [QueryableField.CREATED_AT]?: string, + [QueryableField.BLOCK_HEIGHT]?: string, + [QueryableField.BLOCK_TIME]?: string, + [QueryableField.CREATED_BEFORE_OR_AT]?: string, + [QueryableField.CREATED_BEFORE_OR_AT_BLOCK_HEIGHT]?: string, + [QueryableField.CREATED_ON_OR_AFTER]?: string, + [QueryableField.CREATED_ON_OR_AFTER_BLOCK_HEIGHT]?: string, } export interface FundingIndexUpdatesQueryConfig extends QueryConfig { - [QueryableField.ID]?: string[]; - [QueryableField.PERPETUAL_ID]?: string[]; - [QueryableField.EVENT_ID]?: Buffer; - [QueryableField.EFFECTIVE_AT]?: string; - [QueryableField.EFFECTIVE_AT_HEIGHT]?: string; - [QueryableField.EFFECTIVE_BEFORE_OR_AT]?: string; - [QueryableField.EFFECTIVE_BEFORE_OR_AT_HEIGHT]?: string; + [QueryableField.ID]?: string[], + [QueryableField.PERPETUAL_ID]?: string[], + [QueryableField.EVENT_ID]?: Buffer, + [QueryableField.EFFECTIVE_AT]?: string, + [QueryableField.EFFECTIVE_AT_HEIGHT]?: string, + [QueryableField.EFFECTIVE_BEFORE_OR_AT]?: string, + [QueryableField.EFFECTIVE_BEFORE_OR_AT_HEIGHT]?: string, } export interface LiquidityTiersQueryConfig extends QueryConfig { - [QueryableField.ID]?: string[]; + [QueryableField.ID]?: string[], } export interface ComplianceDataQueryConfig extends QueryConfig { - [QueryableField.ADDRESS]?: string[]; - [QueryableField.UPDATED_BEFORE_OR_AT]?: string; - [QueryableField.PROVIDER]?: string; - [QueryableField.BLOCKED]?: boolean; + [QueryableField.ADDRESS]?: string[], + [QueryableField.UPDATED_BEFORE_OR_AT]?: string, + [QueryableField.PROVIDER]?: string, + [QueryableField.BLOCKED]?: boolean, + [QueryableField.ADDRESS_IN_WALLETS_TABLE]?: boolean, } export interface ComplianceStatusQueryConfig extends QueryConfig { - [QueryableField.ADDRESS]?: string[]; - [QueryableField.STATUS]?: string; - [QueryableField.CREATED_BEFORE_OR_AT]?: string; - [QueryableField.UPDATED_BEFORE_OR_AT]?: string; - [QueryableField.REASON]?: string; + [QueryableField.ADDRESS]?: string[], + [QueryableField.STATUS]?: string[], + [QueryableField.CREATED_BEFORE_OR_AT]?: string, + [QueryableField.UPDATED_BEFORE_OR_AT]?: string, + [QueryableField.REASON]?: string, } export interface TradingRewardQueryConfig extends QueryConfig { - [QueryableField.ADDRESS]?: string; - [QueryableField.BLOCK_HEIGHT]?: string; - [QueryableField.BLOCK_TIME_BEFORE_OR_AT]?: IsoString; - [QueryableField.BLOCK_TIME_AFTER_OR_AT]?: IsoString; - [QueryableField.BLOCK_TIME_BEFORE]?: IsoString; - [QueryableField.BLOCK_HEIGHT_BEFORE_OR_AT]?: IsoString; + [QueryableField.ADDRESS]?: string, + [QueryableField.BLOCK_HEIGHT]?: string, + [QueryableField.BLOCK_TIME_BEFORE_OR_AT]?: IsoString, + [QueryableField.BLOCK_TIME_AFTER_OR_AT]?: IsoString, + [QueryableField.BLOCK_TIME_BEFORE]?: IsoString, + [QueryableField.BLOCK_HEIGHT_BEFORE_OR_AT]?: IsoString, } export interface TradingRewardAggregationQueryConfig extends QueryConfig { - [QueryableField.ADDRESS]?: string; - [QueryableField.ADDRESSES]?: string[]; - [QueryableField.STARTED_AT_HEIGHT]?: string; - [QueryableField.STARTED_AT_HEIGHT_OR_AFTER]?: string; - [QueryableField.PERIOD]?: TradingRewardAggregationPeriod; - [QueryableField.STARTED_AT_BEFORE_OR_AT]?: IsoString; - [QueryableField.STARTED_AT_HEIGHT_BEFORE_OR_AT]?: string; + [QueryableField.ADDRESS]?: string, + [QueryableField.ADDRESSES]?: string[], + [QueryableField.STARTED_AT_HEIGHT]?: string, + [QueryableField.STARTED_AT_HEIGHT_OR_AFTER]?: string, + [QueryableField.PERIOD]?: TradingRewardAggregationPeriod, + [QueryableField.STARTED_AT_BEFORE_OR_AT]?: IsoString, + [QueryableField.STARTED_AT_HEIGHT_BEFORE_OR_AT]?: string, +} + +export interface AffiliateReferredUsersQueryConfig extends QueryConfig { + [QueryableField.AFFILIATE_ADDRESS]?: string[], + [QueryableField.REFEREE_ADDRESS]?: string[], +} + +export interface LeaderboardPnlQueryConfig extends QueryConfig { + [QueryableField.ADDRESS]?: string[], + [QueryableField.TIMESPAN]?: string[], + [QueryableField.RANK]?: number[], +} + +export interface PersistentCacheQueryConfig extends QueryConfig { + [QueryableField.KEY]?: string, +} + +export interface AffiliateInfoQueryConfig extends QueryConfig { + [QueryableField.ADDRESS]?: string, +} + +export interface FirebaseNotificationTokenQueryConfig extends QueryConfig { + [QueryableField.ADDRESS]?: string, + [QueryableField.TOKEN]?: string, + [QueryableField.UPDATED_BEFORE_OR_AT]?: IsoString, +} + +export interface VaultQueryConfig extends QueryConfig { + [QueryableField.ADDRESS]?: string[], + [QueryableField.CLOB_PAIR_ID]?: string[], + [QueryableField.STATUS]?: string[], } diff --git a/indexer/packages/postgres/src/types/subaccount-usernames-types.ts b/indexer/packages/postgres/src/types/subaccount-usernames-types.ts new file mode 100644 index 00000000000..c0b0eb25f08 --- /dev/null +++ b/indexer/packages/postgres/src/types/subaccount-usernames-types.ts @@ -0,0 +1,16 @@ +/* ------- SUBACCOUNT USERNAME TYPES ------- */ + +export interface SubaccountUsernamesCreateObject { + username: string, + subaccountId: string, +} + +export enum SubaccountUsernamesColumns { + username = 'username', + subaccountId = 'subaccountId', +} + +export interface SubaccountsWithoutUsernamesResult { + subaccountId: string, + address: string, +} diff --git a/indexer/packages/postgres/src/types/tendermint-event-types.ts b/indexer/packages/postgres/src/types/tendermint-event-types.ts index 99916473ecf..b66ea348bb8 100644 --- a/indexer/packages/postgres/src/types/tendermint-event-types.ts +++ b/indexer/packages/postgres/src/types/tendermint-event-types.ts @@ -1,9 +1,9 @@ /* ------- TENDERMINT EVENT TYPES ------- */ export interface TendermintEventCreateObject { - blockHeight: string; - transactionIndex: number; - eventIndex: number; + blockHeight: string, + transactionIndex: number, + eventIndex: number, } export enum TendermintEventColumns { diff --git a/indexer/packages/postgres/src/types/trading-reward-aggregation-types.ts b/indexer/packages/postgres/src/types/trading-reward-aggregation-types.ts index c0244bed98e..60d6a9060d9 100644 --- a/indexer/packages/postgres/src/types/trading-reward-aggregation-types.ts +++ b/indexer/packages/postgres/src/types/trading-reward-aggregation-types.ts @@ -7,20 +7,20 @@ export enum TradingRewardAggregationPeriod { } export interface TradingRewardAggregationCreateObject { - address: string; - startedAt: IsoString; - startedAtHeight: string; - endedAt?: IsoString; - endedAtHeight?: string; - period: TradingRewardAggregationPeriod; - amount: string; + address: string, + startedAt: IsoString, + startedAtHeight: string, + endedAt?: IsoString, + endedAtHeight?: string, + period: TradingRewardAggregationPeriod, + amount: string, } export interface TradingRewardAggregationUpdateObject { - id: string; - endedAt?: IsoString; - endedAtHeight?: string; - amount?: string; + id: string, + endedAt?: IsoString, + endedAtHeight?: string, + amount?: string, } export enum TradingRewardAggregationColumns { diff --git a/indexer/packages/postgres/src/types/trading-reward-types.ts b/indexer/packages/postgres/src/types/trading-reward-types.ts index da1dc816761..b075cfe55e1 100644 --- a/indexer/packages/postgres/src/types/trading-reward-types.ts +++ b/indexer/packages/postgres/src/types/trading-reward-types.ts @@ -1,10 +1,10 @@ import { IsoString } from './utility-types'; export interface TradingRewardCreateObject { - address: string; - blockTime: IsoString; - blockHeight: string; - amount: string; + address: string, + blockTime: IsoString, + blockHeight: string, + amount: string, } export enum TradingRewardColumns { diff --git a/indexer/packages/postgres/src/types/utility-types.ts b/indexer/packages/postgres/src/types/utility-types.ts index f0eecb43977..69cb68a4958 100644 --- a/indexer/packages/postgres/src/types/utility-types.ts +++ b/indexer/packages/postgres/src/types/utility-types.ts @@ -13,15 +13,15 @@ export enum IsolationLevel { } export interface Options { - txId?: number; - forUpdate?: boolean; - noWait?: boolean; - orderBy?: [string, Ordering][]; + txId?: number, + forUpdate?: boolean, + noWait?: boolean, + orderBy?: [string, Ordering][], readReplica?: boolean, - random?: boolean; - bindings?: readonly RawBinding[]; + random?: boolean, + bindings?: readonly RawBinding[], // eslint-disable-next-line @typescript-eslint/no-explicit-any - sqlOptions?: Readonly<{ [key: string]: any }>; + sqlOptions?: Readonly<{ [key: string]: any }>, } export enum Ordering { diff --git a/indexer/packages/postgres/src/types/vault-types.ts b/indexer/packages/postgres/src/types/vault-types.ts new file mode 100644 index 00000000000..4f1fe3f7a4a --- /dev/null +++ b/indexer/packages/postgres/src/types/vault-types.ts @@ -0,0 +1,24 @@ +import { IsoString } from './utility-types'; + +export interface VaultCreateObject { + address: string, + clobPairId: string, + status: VaultStatus, + createdAt: IsoString, + updatedAt: IsoString, +} + +export enum VaultStatus { + DEACTIVATED = 'DEACTIVATED', + STAND_BY = 'STAND_BY', + QUOTING = 'QUOTING', + CLOSE_ONLY = 'CLOSE_ONLY', +} + +export enum VaultColumns { + address = 'address', + clobPairId = 'clobPairId', + status = 'status', + createdAt = 'createdAt', + updatedAt = 'updatedAt', +} diff --git a/indexer/packages/postgres/src/types/wallet-types.ts b/indexer/packages/postgres/src/types/wallet-types.ts index 48ad8307afa..750b8b33a88 100644 --- a/indexer/packages/postgres/src/types/wallet-types.ts +++ b/indexer/packages/postgres/src/types/wallet-types.ts @@ -3,14 +3,17 @@ export interface WalletCreateObject { address: string, totalTradingRewards: string, + totalVolume: string, } export interface WalletUpdateObject { address: string, totalTradingRewards: string, + totalVolume: string, } export enum WalletColumns { address = 'address', totalTradingRewards = 'totalTradingRewards', + totalVolume = 'totalVolume', } diff --git a/indexer/packages/postgres/src/types/websocket-message-types.ts b/indexer/packages/postgres/src/types/websocket-message-types.ts index 8e68b8f3eb6..eb2e907099b 100644 --- a/indexer/packages/postgres/src/types/websocket-message-types.ts +++ b/indexer/packages/postgres/src/types/websocket-message-types.ts @@ -5,7 +5,7 @@ import { OrderStatus, OrderType, } from './order-types'; -import { PerpetualMarketStatus } from './perpetual-market-types'; +import { PerpetualMarketStatus, PerpetualMarketType } from './perpetual-market-types'; import { PerpetualPositionStatus } from './perpetual-position-types'; import { PositionSide } from './position-types'; import { TradeType } from './trade-types'; @@ -37,6 +37,7 @@ export interface SubaccountMessageContents { fills?: FillSubaccountMessageContents[], transfers?: TransferSubaccountMessageContents, tradingReward?: TradingRewardSubaccountMessageContents, + blockHeight?: string, } export interface PerpetualPositionSubaccountMessageContents { @@ -98,53 +99,53 @@ export const APIOrderStatusEnum = { }; export interface OrderSubaccountMessageContents { - id: string; - subaccountId: string; - clientId: string; - clobPairId: string; - side: OrderSide; - size: string; - ticker: string, - price: string; - type: OrderType; - timeInForce: APITimeInForce; - postOnly: boolean; - reduceOnly: boolean; - status: APIOrderStatus; - orderFlags: string; - - totalFilled?: string; - totalOptimisticFilled?: string; - goodTilBlock?: string; - goodTilBlockTime?: string; - triggerPrice?: string; - updatedAt?: IsoString; - updatedAtHeight?: string; + id: string, + subaccountId: string, + clientId: string, + clobPairId?: string, + side?: OrderSide, + size?: string, + ticker?: string, + price?: string, + type?: OrderType, + timeInForce?: APITimeInForce, + postOnly?: boolean, + reduceOnly?: boolean, + status: APIOrderStatus, + orderFlags: string, + + totalFilled?: string, + totalOptimisticFilled?: string, + goodTilBlock?: string, + goodTilBlockTime?: string, + triggerPrice?: string, + updatedAt?: IsoString, + updatedAtHeight?: string, // This will only be filled if the order was removed - removalReason?: string; + removalReason?: string, // This will only be set for stateful orders - createdAtHeight?: string; - clientMetadata: string; + createdAtHeight?: string, + clientMetadata?: string, } export interface FillSubaccountMessageContents { - id: string; - subaccountId: string; - side: OrderSide; - liquidity: Liquidity; - type: FillType; - clobPairId: string; - size: string; - price: string; - quoteAmount: string; + id: string, + subaccountId: string, + side: OrderSide, + liquidity: Liquidity, + type: FillType, + clobPairId: string, + size: string, + price: string, + quoteAmount: string, eventId: string, - transactionHash: string; - createdAt: IsoString; - createdAtHeight: string; - orderId?: string; - ticker: string; - clientMetadata?: string; + transactionHash: string, + createdAt: IsoString, + createdAtHeight: string, + orderId?: string, + ticker: string, + clientMetadata?: string, } export interface TransferSubaccountMessageContents { @@ -160,14 +161,14 @@ export interface TransferSubaccountMessageContents { size: string, type: TransferType, transactionHash: string, - createdAt: IsoString; - createdAtHeight: string; + createdAt: IsoString, + createdAtHeight: string, } export interface TradingRewardSubaccountMessageContents { - tradingReward: string; - createdAtHeight: string; - createdAt: string; + tradingReward: string, + createdAtHeight: string, + createdAt: string, } /* ------- TradeMessageContents ------- */ @@ -194,30 +195,39 @@ export interface MarketMessageContents { } export type TradingMarketMessageContents = { - [ticker: string]: TradingPerpetualMarketMessage + [ticker: string]: TradingPerpetualMarketMessage, }; // All the fields in PerpetualMarketFromDatabase, but optional export interface TradingPerpetualMarketMessage { // These fields are very unlikely to change - id?: string; - clobPairId?: string; - ticker?: string; - marketId?: number; - status?: PerpetualMarketStatus; - initialMarginFraction?: string; - maintenanceMarginFraction?: string; - openInterest?: string; - quantumConversionExponent?: number; - atomicResolution?: number; - subticksPerTick?: number; - stepBaseQuantums?: number; + id?: string, + clobPairId?: string, + ticker?: string, + marketId?: number, + status?: PerpetualMarketStatus, + initialMarginFraction?: string, + maintenanceMarginFraction?: string, + openInterest?: string, + quantumConversionExponent?: number, + atomicResolution?: number, + subticksPerTick?: number, + stepBaseQuantums?: number, + marketType?: PerpetualMarketType, + openInterestLowerCap?: string, + openInterestUpperCap?: string, + baseOpenInterest?: string, + defaultFundingRate1H?: string, // Fields that are likely to change - priceChange24H?: string; - volume24H?: string; - trades24H?: number; - nextFundingRate?: string; + priceChange24H?: string, + volume24H?: string, + trades24H?: number, + nextFundingRate?: string, + + // Derived fields + tickSize?: string, + stepSize?: string, } export type OraclePriceMarketMessageContentsMapping = { @@ -243,6 +253,6 @@ export interface CandleMessageContents { close: string, baseTokenVolume: string, trades: number, - usdVolume: string + usdVolume: string, startingOpenInterest: string, } diff --git a/indexer/packages/redis/__tests__/caches/orderbook-levels-cache.test.ts b/indexer/packages/redis/__tests__/caches/orderbook-levels-cache.test.ts index 6a7622e1153..603e332865f 100644 --- a/indexer/packages/redis/__tests__/caches/orderbook-levels-cache.test.ts +++ b/indexer/packages/redis/__tests__/caches/orderbook-levels-cache.test.ts @@ -10,10 +10,13 @@ import { getOrderBookLevels, getKey, deleteZeroPriceLevel, + getLastUpdatedKey, + deleteStalePriceLevel, + getOrderBookMidPrice, } from '../../src/caches/orderbook-levels-cache'; import { OrderSide } from '@dydxprotocol-indexer/postgres'; import { OrderbookLevels, PriceLevel } from '../../src/types'; -import { InvalidOptionsError, InvalidPriceLevelUpdateError } from '../../src/errors'; +import { InvalidOptionsError } from '../../src/errors'; import { logger } from '@dydxprotocol-indexer/base'; describe('orderbookLevelsCache', () => { @@ -174,11 +177,10 @@ describe('orderbookLevelsCache', () => { expect(orderbookLevels.bids).toEqual([]); }); - it('throws error if update will cause quantums to be negative', async () => { + it('sets price level to 0 if update will cause quantums to be negative', async () => { const humanPrice: string = '50000'; const quantums: string = '1000'; const invalidDelta: string = '-2000'; - const resultingQuantums: string = '-1000'; // Set existing quantums for the level await updatePriceLevel({ ticker, @@ -188,31 +190,26 @@ describe('orderbookLevelsCache', () => { client, }); - // Test that an error is thrown if the update results in a negative quantums for the price - // level - await expect(updatePriceLevel({ + await updatePriceLevel({ ticker, side: OrderSide.BUY, humanPrice, sizeDeltaInQuantums: invalidDelta, client, - })).rejects.toBeInstanceOf(InvalidPriceLevelUpdateError); + }); expect(logger.crit).toHaveBeenCalledTimes(1); - await expect(updatePriceLevel({ + + // Expect that the value in the orderbook is set to 0 + const orderbookLevels: OrderbookLevels = await getOrderBookLevels( ticker, - side: OrderSide.BUY, - humanPrice, - sizeDeltaInQuantums: invalidDelta, client, - })).rejects.toEqual(expect.objectContaining({ - message: expect.stringContaining(resultingQuantums), - })); - - // Expect that the value in the orderbook is unchanged - const orderbookLevels: OrderbookLevels = await getOrderBookLevels(ticker, client); + { + removeZeros: false, + }, + ); expect(orderbookLevels.bids).toMatchObject([{ humanPrice, - quantums, + quantums: '0', }]); }); }); @@ -615,4 +612,210 @@ describe('orderbookLevelsCache', () => { expect(size).toEqual('10'); }); }); + + describe('deleteStalePriceLevel', () => { + const humanPrice: string = '45100'; + + it('deletes stale price level', async () => { + await updatePriceLevel({ + ticker, + side: OrderSide.BUY, + humanPrice, + sizeDeltaInQuantums: '100', + client, + }); + + client.hset(getLastUpdatedKey(ticker, OrderSide.BUY), humanPrice, (Date.now() / 1000) - 20); + + let size: string | null = await hGetAsync( + { + hash: getKey(ticker, OrderSide.BUY), + key: humanPrice, + }, + client, + ); + + expect(size).toEqual('100'); + + const deleted: boolean = await deleteStalePriceLevel({ + ticker, + side: OrderSide.BUY, + humanPrice, + timeThreshold: 10, + client, + }); + + size = await hGetAsync( + { + hash: getKey(ticker, OrderSide.BUY), + key: humanPrice, + }, + client, + ); + + expect(deleted).toEqual(true); + expect(size).toBeNull(); + }); + + it('does not delete recent price level', async () => { + await updatePriceLevel({ + ticker, + side: OrderSide.BUY, + humanPrice, + sizeDeltaInQuantums: '10', + client, + }); + + const deleted: boolean = await deleteStalePriceLevel({ + ticker, + side: OrderSide.BUY, + humanPrice, + timeThreshold: 10, + client, + }); + + const size: string | null = await hGetAsync( + { + hash: getKey(ticker, OrderSide.BUY), + key: humanPrice, + }, + client, + ); + + expect(deleted).toEqual(false); + expect(size).toEqual('10'); + }); + }); + + describe('getMidPrice', () => { + beforeEach(() => { + jest.restoreAllMocks(); + jest.restoreAllMocks(); + }); + afterEach(() => { + jest.restoreAllMocks(); + jest.restoreAllMocks(); + }); + + it('returns the correct mid price', async () => { + await Promise.all([ + updatePriceLevel({ + ticker, + side: OrderSide.BUY, + humanPrice: '45200', + sizeDeltaInQuantums: '2000', + client, + }), + updatePriceLevel({ + ticker, + side: OrderSide.BUY, + humanPrice: '45100', + sizeDeltaInQuantums: '2000', + client, + }), + updatePriceLevel({ + ticker, + side: OrderSide.BUY, + humanPrice: '45300', + sizeDeltaInQuantums: '2000', + client, + }), + updatePriceLevel({ + ticker, + side: OrderSide.SELL, + humanPrice: '45500', + sizeDeltaInQuantums: '2000', + client, + }), + updatePriceLevel({ + ticker, + side: OrderSide.SELL, + humanPrice: '45400', + sizeDeltaInQuantums: '2000', + client, + }), + updatePriceLevel({ + ticker, + side: OrderSide.SELL, + humanPrice: '45600', + sizeDeltaInQuantums: '2000', + client, + }), + ]); + + const midPrice = await getOrderBookMidPrice(ticker, client); + expect(midPrice).toEqual('45350'); + }); + }); + + it('returns the correct mid price for very small numbers', async () => { + await Promise.all([ + updatePriceLevel({ + ticker, + side: OrderSide.SELL, + humanPrice: '0.000000002346', + sizeDeltaInQuantums: '2000', + client, + }), + updatePriceLevel({ + ticker, + side: OrderSide.BUY, + humanPrice: '0.000000002344', + sizeDeltaInQuantums: '2000', + client, + }), + ]); + + const midPrice = await getOrderBookMidPrice(ticker, client); + expect(midPrice).toEqual('0.000000002345'); + }); + + it('returns the approprite amount of decimal precision', async () => { + await Promise.all([ + updatePriceLevel({ + ticker, + side: OrderSide.SELL, + humanPrice: '1.02', + sizeDeltaInQuantums: '2000', + client, + }), + updatePriceLevel({ + ticker, + side: OrderSide.BUY, + humanPrice: '1.01', + sizeDeltaInQuantums: '2000', + client, + }), + ]); + + const midPrice = await getOrderBookMidPrice(ticker, client); + expect(midPrice).toEqual('1.015'); + }); + + it('returns undefined if there are no bids or asks', async () => { + await updatePriceLevel({ + ticker, + side: OrderSide.SELL, + humanPrice: '45400', + sizeDeltaInQuantums: '2000', + client, + }); + + const midPrice = await getOrderBookMidPrice(ticker, client); + expect(midPrice).toBeUndefined(); + }); + + it('returns undefined if humanPrice is NaN', async () => { + await updatePriceLevel({ + ticker, + side: OrderSide.SELL, + humanPrice: 'nan', + sizeDeltaInQuantums: '2000', + client, + }); + + const midPrice = await getOrderBookMidPrice(ticker, client); + + expect(midPrice).toBeUndefined(); + }); }); diff --git a/indexer/packages/redis/__tests__/caches/orderbook-mid-prices-cache.test.ts b/indexer/packages/redis/__tests__/caches/orderbook-mid-prices-cache.test.ts new file mode 100644 index 00000000000..4ea457764c8 --- /dev/null +++ b/indexer/packages/redis/__tests__/caches/orderbook-mid-prices-cache.test.ts @@ -0,0 +1,331 @@ +import { deleteAllAsync } from '../../src/helpers/redis'; +import { redis as client } from '../helpers/utils'; +import { + fetchAndCacheOrderbookMidPrices, + getMedianPrices, + ORDERBOOK_MID_PRICES_CACHE_KEY_PREFIX, +} from '../../src/caches/orderbook-mid-prices-cache'; +import * as OrderbookLevelsCache from '../../src/caches/orderbook-levels-cache'; + +// Mock the OrderbookLevelsCache module +jest.mock('../../src/caches/orderbook-levels-cache', () => ({ + getOrderBookMidPrice: jest.fn(), +})); + +describe('orderbook-mid-prices-cache', () => { + const defaultTicker: string = 'BTC-USD'; + + // Helper function to set a price for a given market ticker + const setPrice = (marketTicker: string, price: string) => { + const now = Date.now(); + client.zadd(`${ORDERBOOK_MID_PRICES_CACHE_KEY_PREFIX}${marketTicker}`, now, price); + }; + + afterAll(async () => { + await deleteAllAsync(client); + }); + + beforeEach(async () => { + await deleteAllAsync(client); + jest.resetAllMocks(); + (OrderbookLevelsCache.getOrderBookMidPrice as jest.Mock).mockReset(); + }); + + describe('fetchAndCacheOrderbookMidPrices', () => { + it('sets a price for a ticker', async () => { + (OrderbookLevelsCache.getOrderBookMidPrice as jest.Mock).mockResolvedValue('50000'); + + await fetchAndCacheOrderbookMidPrices(client, [defaultTicker]); + + expect(OrderbookLevelsCache.getOrderBookMidPrice).toHaveBeenCalledTimes(1); + expect(OrderbookLevelsCache.getOrderBookMidPrice).toHaveBeenCalledWith( + defaultTicker, + client, + ); + + client.zrange( + `${ORDERBOOK_MID_PRICES_CACHE_KEY_PREFIX}${defaultTicker}`, + 0, + -1, + (_: any, response: string[]) => { + expect(response[0]).toBe('50000'); + }, + ); + }); + + it('sets multiple prices for a ticker', async () => { + const mockPrices = ['49000', '50000', '51000']; + for (const price of mockPrices) { + (OrderbookLevelsCache.getOrderBookMidPrice as jest.Mock).mockResolvedValue(price); + await fetchAndCacheOrderbookMidPrices(client, [defaultTicker]); + } + + client.zrange( + `${ORDERBOOK_MID_PRICES_CACHE_KEY_PREFIX}${defaultTicker}`, + 0, + -1, + (_: any, response: string[]) => { + expect(response).toEqual(mockPrices); + }, + ); + expect(OrderbookLevelsCache.getOrderBookMidPrice).toHaveBeenCalledTimes(3); + }); + + it('sets prices for multiple tickers', async () => { + const ticker2 = 'SHIB-USD'; + const ticker3 = 'SOL-USD'; + const mockPrices = { + [defaultTicker]: '49000', + [ticker2]: '50000', + [ticker3]: '51000', + }; + + // Mock the getOrderBookMidPrice function for each ticker + (OrderbookLevelsCache.getOrderBookMidPrice as jest.Mock) + .mockResolvedValueOnce(mockPrices[defaultTicker]) + .mockResolvedValueOnce(mockPrices[ticker2]) + .mockResolvedValueOnce(mockPrices[ticker3]); + + await fetchAndCacheOrderbookMidPrices(client, [defaultTicker, ticker2, ticker3]); + expect(OrderbookLevelsCache.getOrderBookMidPrice).toHaveBeenCalledTimes(3); + + for (const [key, price] of Object.entries(mockPrices)) { + client.zrange(`${ORDERBOOK_MID_PRICES_CACHE_KEY_PREFIX}${key}`, + 0, + -1, + (err: Error, res: string[]) => { + expect(res).toHaveLength(1); + expect(res[0]).toEqual(price); + }); + } + }); + }); + + describe('getMedianPrice', () => { + + it('returns null when no prices are set', async () => { + const result: {[ticker: string]: string | undefined} = await getMedianPrices( + client, + [defaultTicker], + ); + expect(result).toEqual({ 'BTC-USD': undefined }); + }); + + it('returns the median price for odd number of prices', async () => { + setPrice(defaultTicker, '51000'); + setPrice(defaultTicker, '50000'); + setPrice(defaultTicker, '49000'); + + const result: {[ticker: string]: string | undefined} = await getMedianPrices( + client, + [defaultTicker], + ); + expect(result).toEqual({ 'BTC-USD': '50000' }); + }); + + it('returns the median price for even number of prices', async () => { + setPrice(defaultTicker, '50000'); + setPrice(defaultTicker, '51000'); + setPrice(defaultTicker, '49000'); + setPrice(defaultTicker, '52000'); + + const result: {[ticker: string]: string | undefined} = await getMedianPrices( + client, + [defaultTicker], + ); + expect(result).toEqual({ 'BTC-USD': '50500' }); + }); + + it('returns the correct median price after 60 seconds', async () => { + jest.useFakeTimers(); + // Mock the getOrderBookMidPrice function for the ticker + const mockPrices: string[] = ['50000', '51000', '49000', '48000', '52000', '53000']; + + (OrderbookLevelsCache.getOrderBookMidPrice as jest.Mock) + .mockResolvedValueOnce(mockPrices[0]) + .mockResolvedValueOnce(mockPrices[1]) + .mockResolvedValueOnce(mockPrices[2]) + .mockResolvedValueOnce(mockPrices[3]) + .mockResolvedValueOnce(mockPrices[4]) + .mockResolvedValueOnce(mockPrices[5]); + + // Fetch and cache initial prices + await fetchAndCacheOrderbookMidPrices(client, [defaultTicker, defaultTicker]); + expect(OrderbookLevelsCache.getOrderBookMidPrice).toHaveBeenCalledTimes(2); + + // Advance time and fetch more prices + jest.advanceTimersByTime(61000); // Advance time by 61 seconds + await fetchAndCacheOrderbookMidPrices( + client, + [defaultTicker, defaultTicker, defaultTicker, defaultTicker], + ); + + client.zrange(`${ORDERBOOK_MID_PRICES_CACHE_KEY_PREFIX}${defaultTicker}`, + 0, + -1, + (err: Error, res: string[]) => { + expect(res).toHaveLength(4); + }); + expect(OrderbookLevelsCache.getOrderBookMidPrice).toHaveBeenCalledTimes(6); + + // Check the median price + const result:{[ticker: string]: string | undefined} = await getMedianPrices( + client, + [defaultTicker], + ); + // Median of last 4 prices, as first two should have expired after moving clock forward + expect(result).toEqual({ 'BTC-USD': '50500' }); + + jest.useRealTimers(); + }); + + it('returns the correct median price for small numbers with even number of prices', async () => { + setPrice(defaultTicker, '0.00000000002345'); + setPrice(defaultTicker, '0.00000000002346'); + + const midPrice1: { [ticker: string]: string | undefined } = await getMedianPrices( + client, + [defaultTicker], + ); + expect(midPrice1).toEqual({ 'BTC-USD': '0.000000000023455' }); + }); + + it('returns the correct median price for small numbers with odd number of prices', async () => { + setPrice(defaultTicker, '0.00000000001'); + setPrice(defaultTicker, '0.00000000002'); + setPrice(defaultTicker, '0.00000000003'); + setPrice(defaultTicker, '0.00000000004'); + setPrice(defaultTicker, '0.00000000005'); + + const midPrice1: { [ticker: string]: string | undefined } = await getMedianPrices( + client, + [defaultTicker], + ); + expect(midPrice1).toEqual({ 'BTC-USD': '0.00000000003' }); + + await deleteAllAsync(client); + + setPrice(defaultTicker, '0.00000847007'); + setPrice(defaultTicker, '0.00000847006'); + setPrice(defaultTicker, '0.00000847008'); + + const midPrice2: { [ticker: string]: string | undefined } = await getMedianPrices( + client, + [defaultTicker], + ); + expect(midPrice2).toEqual({ 'BTC-USD': '0.00000847007' }); + }); + }); + + describe('getMedianPrices for multiple markets', () => { + const btcUsdTicker = 'BTC-USD'; + const ethUsdTicker = 'ETH-USD'; + const solUsdTicker = 'SOL-USD'; + + beforeEach(async () => { + await deleteAllAsync(client); + }); + + it('returns correct median prices for multiple markets with odd number of prices', async () => { + // Set prices for BTC-USD + setPrice(btcUsdTicker, '50000'); + setPrice(btcUsdTicker, '51000'); + setPrice(btcUsdTicker, '49000'); + + // Set prices for ETH-USD + setPrice(ethUsdTicker, '3000'); + setPrice(ethUsdTicker, '3100'); + setPrice(ethUsdTicker, '2900'); + + // Set prices for SOL-USD + setPrice(solUsdTicker, '100'); + setPrice(solUsdTicker, '102'); + setPrice(solUsdTicker, '98'); + + const result: { [ticker: string]: string | undefined } = await getMedianPrices( + client, + [btcUsdTicker, ethUsdTicker, solUsdTicker], + ); + expect(result).toEqual({ + 'BTC-USD': '50000', + 'ETH-USD': '3000', + 'SOL-USD': '100', + }); + }); + + it('returns correct median prices for multiple markets with even number of prices', async () => { + // Set prices for BTC-USD + setPrice(btcUsdTicker, '50000'); + setPrice(btcUsdTicker, '51000'); + setPrice(btcUsdTicker, '49000'); + setPrice(btcUsdTicker, '52000'); + + // Set prices for ETH-USD + setPrice(ethUsdTicker, '3000'); + setPrice(ethUsdTicker, '3100'); + setPrice(ethUsdTicker, '2900'); + setPrice(ethUsdTicker, '3200'); + + const result: { [ticker: string]: string | undefined } = await getMedianPrices( + client, + [btcUsdTicker, ethUsdTicker], + ); + expect(result).toEqual({ + 'BTC-USD': '50500', + 'ETH-USD': '3050', + }); + }); + + it('handles markets with different numbers of prices', async () => { + // Set prices for BTC-USD (odd number) + setPrice(btcUsdTicker, '50000'); + setPrice(btcUsdTicker, '51000'); + setPrice(btcUsdTicker, '49000'); + + // Set prices for ETH-USD (even number) + setPrice(ethUsdTicker, '3000'); + setPrice(ethUsdTicker, '3100'); + setPrice(ethUsdTicker, '2900'); + setPrice(ethUsdTicker, '3200'); + + // Set no prices for SOL-USD + + const result: { [ticker: string]: string | undefined } = await getMedianPrices( + client, + [btcUsdTicker, ethUsdTicker, solUsdTicker], + ); + expect(result).toEqual({ + 'BTC-USD': '50000', + 'ETH-USD': '3050', + 'SOL-USD': undefined, + }); + }); + + it('calculates correct median prices for markets with small and large numbers', async () => { + // Set prices for BTC-USD (large numbers) + setPrice(btcUsdTicker, '50000.12345'); + setPrice(btcUsdTicker, '50000.12346'); + + // Set prices for ETH-USD (medium numbers) + setPrice(ethUsdTicker, '3000.5'); + setPrice(ethUsdTicker, '3000.6'); + setPrice(ethUsdTicker, '3000.7'); + + // Set prices for SOL-USD (small numbers) + setPrice(solUsdTicker, '0.00000123'); + setPrice(solUsdTicker, '0.00000124'); + setPrice(solUsdTicker, '0.00000125'); + setPrice(solUsdTicker, '0.00000126'); + + const result: { [ticker: string]: string | undefined } = await getMedianPrices( + client, + [btcUsdTicker, ethUsdTicker, solUsdTicker], + ); + expect(result).toEqual({ + 'BTC-USD': '50000.123455', + 'ETH-USD': '3000.6', + 'SOL-USD': '0.000001245', + }); + }); + }); +}); diff --git a/indexer/packages/redis/__tests__/helpers/constants.ts b/indexer/packages/redis/__tests__/helpers/constants.ts index 71912a20a40..8925727f321 100644 --- a/indexer/packages/redis/__tests__/helpers/constants.ts +++ b/indexer/packages/redis/__tests__/helpers/constants.ts @@ -168,3 +168,44 @@ export const orderUpdate: OffChainUpdateOrderUpdateUpdateMessage = { totalFilledQuantums: Long.fromValue(250_500, true), }, }; + +export const isolatedSubaccountId: IndexerSubaccountId = { + owner: testConstants.isolatedSubaccount.address, + number: testConstants.isolatedSubaccount.subaccountNumber, +}; +export const isolatedMarketOrderId: IndexerOrderId = { + subaccountId: isolatedSubaccountId, + clientId: 1, + clobPairId: parseInt(testConstants.isolatedPerpetualMarket.clobPairId, 10), + orderFlags: ORDER_FLAG_SHORT_TERM, +}; +export const isolatedMarketOrder: IndexerOrder = { + orderId: isolatedMarketOrderId, + side: IndexerOrder_Side.SIDE_BUY, + quantums: Long.fromValue(1_000_000, true), + subticks: Long.fromValue(2_000_000, true), + goodTilBlock: 1150, + goodTilBlockTime: undefined, + timeInForce: IndexerOrder_TimeInForce.TIME_IN_FORCE_POST_ONLY, + reduceOnly: false, + clientMetadata: 0, + conditionType: IndexerOrder_ConditionType.CONDITION_TYPE_UNSPECIFIED, + conditionalOrderTriggerSubticks: Long.fromValue(0, true), +}; + +export const isolatedMarketOrderUuid: string = OrderTable.orderIdToUuid(isolatedMarketOrderId); + +export const isolatedMarketRedisOrder: RedisOrder = { + id: isolatedMarketOrderUuid, + order: isolatedMarketOrder, + ticker: testConstants.isolatedPerpetualMarket.ticker, + tickerType: RedisOrder_TickerType.TICKER_TYPE_PERPETUAL, + price: protocolTranslations.subticksToPrice( + isolatedMarketOrder.subticks.toString(), + testConstants.isolatedPerpetualMarket, + ), + size: protocolTranslations.quantumsToHumanFixedString( + isolatedMarketOrder.quantums.toString(), + testConstants.isolatedPerpetualMarket.atomicResolution, + ), +}; diff --git a/indexer/packages/redis/src/caches/leaderboard-processed-cache.ts b/indexer/packages/redis/src/caches/leaderboard-processed-cache.ts new file mode 100644 index 00000000000..5b8825b66cf --- /dev/null +++ b/indexer/packages/redis/src/caches/leaderboard-processed-cache.ts @@ -0,0 +1,25 @@ +import { LeaderboardPnlTimeSpan } from '@dydxprotocol-indexer/postgres'; +import { RedisClient } from 'redis'; + +import { getAsync } from '../helpers/redis'; + +export const LEADERBOARD_PNL_TIMESPAN_PROCESSED_CACHE_KEY: string = 'v4/leaderboard_pnl_processed/'; + +function getKey(period: LeaderboardPnlTimeSpan): string { + return `${LEADERBOARD_PNL_TIMESPAN_PROCESSED_CACHE_KEY}${period}`; +} + +export async function getProcessedTime( + timespan: LeaderboardPnlTimeSpan, + client: RedisClient, +): Promise { + return getAsync(getKey(timespan), client); +} + +export async function setProcessedTime( + period: LeaderboardPnlTimeSpan, + timestamp: string, + client: RedisClient, +): Promise { + await client.set(getKey(period), timestamp); +} diff --git a/indexer/packages/redis/src/caches/orderbook-levels-cache.ts b/indexer/packages/redis/src/caches/orderbook-levels-cache.ts index e593bebcbcd..cb75fda5845 100644 --- a/indexer/packages/redis/src/caches/orderbook-levels-cache.ts +++ b/indexer/packages/redis/src/caches/orderbook-levels-cache.ts @@ -4,10 +4,15 @@ import Big from 'big.js'; import _ from 'lodash'; import { Callback, RedisClient } from 'redis'; -import { InvalidOptionsError, InvalidPriceLevelUpdateError } from '../errors'; +import { InvalidOptionsError } from '../errors'; import { hGetAsync } from '../helpers/redis'; import { OrderbookLevels, PriceLevel } from '../types'; -import { deleteZeroPriceLevelScript, getOrderbookSideScript, incrementOrderbookLevelScript } from './scripts'; +import { + deleteZeroPriceLevelScript, + deleteStalePriceLevelScript, + getOrderbookSideScript, + incrementOrderbookLevelScript, +} from './scripts'; // Cache of orderbook levels for each clob pair // Each side of each exchange pair is an HSET with the hash = price, and value = total size of @@ -53,32 +58,29 @@ export async function updatePriceLevel({ // NOTE: If this happens from a single price level update, it's possible for multiple subsequent // price level updates to fail with the same error due to interleaved price level updates. if (updatedQuantums < 0) { - // Undo the update. This can't be done in a Lua script as Redis runs Lua 5.1, which only - // uses doubles which support up to 53-bit integers. Race-condition where it's possible for a - // price-level to have negative quantums handled in `getOrderbookLevels` where price-levels with - // negative quantums are filtered out. Note: even though we are reverting this information, each - // call to incrementOrderbookLevel updates the lastUpdated key in the cache. + // Set the price level to 0. + // Race-condition where it's possible for a price-level to have negative quantums handled in + // `getOrderbookLevels` where price-levels with negative quantums are filtered out. Note: even + // though we are reverting this information, each call to incrementOrderbookLevel updates the + // lastUpdated key in the cache. await incrementOrderbookLevel( ticker, side, humanPrice, // Needs to be an integer - Big(sizeDeltaInQuantums).mul(-1).toFixed(0), + Big(updatedQuantums).mul(-1).toFixed(0), client, ); logger.crit({ at: 'orderbookLevelsCache#updatePriceLevel', - message: 'Price level updated to negative quantums', + message: 'Price level updated to negative quantums, set to zero', ticker, side, humanPrice, updatedQuantums, sizeDeltaInQuantums, }); - throw new InvalidPriceLevelUpdateError( - '#updatePriceLevel: Resulting price level has negative quantums, quantums = ' + - `${updatedQuantums}`, - ); + return 0; } return updatedQuantums; @@ -333,6 +335,71 @@ export async function deleteZeroPriceLevel({ ); } +/** + * Deletes a stale price level from the orderbook levels cache idempotently using a Lua script. + * @param param0 Ticker of the exchange pair, side, human readable price level, + * time threshold to delete. + * @returns `boolean`, true/false for whether the level was deleted. + */ +export async function deleteStalePriceLevel({ + ticker, + side, + humanPrice, + timeThreshold, + client, +}: { + ticker: string, + side: OrderSide, + humanPrice: string, + timeThreshold: number, + client: RedisClient, +}): Promise { + // Number of keys for the lua script. + const numKeys: number = 2; + + let evalAsync: ( + orderbookKey: string, + lastUpdatedKey: string, + priceLevel: string, + timeInterval: number, + ) => Promise = ( + orderbookKey, + lastUpdatedKey, + priceLevel, + timeInterval, + ) => { + return new Promise((resolve, reject) => { + const callback: Callback = ( + err: Error | null, + results: number, + ) => { + if (err) { + return reject(err); + } + const deleted: number = results; + return resolve(deleted === 1); + }; + client.evalsha( + deleteStalePriceLevelScript.hash, + numKeys, + orderbookKey, + lastUpdatedKey, + priceLevel, + timeInterval, + callback, + ); + }); + }; + evalAsync = evalAsync.bind(client); + + return evalAsync( + getKey(ticker, side), + getLastUpdatedKey(ticker, side), + humanPrice, + timeThreshold, + ); +} + /** * Gets the quantums and lastUpdated data from the cache for the given orderbook side. * @param param0 Ticker of the exchange pair, side, Redis client. @@ -396,8 +463,10 @@ export async function getOrderbookSideData({ // The 1st list is a flat array of alternating key-value pairs representing prices and quantums. // The 2nd is a flat array of alternating key-value pairs representing prices and lastUpdated // values. - const quantumsMapping: {[field: string]: string} = _.fromPairs(_.chunk(rawRedisResults[0], 2)); - const lastUpdatedMapping: {[field: string]: string} = _.fromPairs(_.chunk(rawRedisResults[1], 2)); + const quantumsMapping: { [field: string]: string } = _.fromPairs(_.chunk(rawRedisResults[0], 2)); + const lastUpdatedMapping: { [field: string]: string } = _.fromPairs( + _.chunk(rawRedisResults[1], 2), + ); return convertToPriceLevels(ticker, side, quantumsMapping, lastUpdatedMapping); @@ -432,8 +501,8 @@ async function getOrderbookSide( function convertToPriceLevels( ticker: string, side: OrderSide, - price2QuantumsMapping: {[field: string]: string}, - price2LastUpdatedMapping: {[field: string]: string}, + price2QuantumsMapping: { [field: string]: string }, + price2LastUpdatedMapping: { [field: string]: string }, ): PriceLevel[] { const quantumsKeys: string[] = _.keys(price2QuantumsMapping); const lastUpdatedKeys: string[] = _.keys(price2LastUpdatedMapping); @@ -460,3 +529,27 @@ function convertToPriceLevels( }; }); } + +export async function getOrderBookMidPrice( + ticker: string, + client: RedisClient, +): Promise { + const levels = await getOrderBookLevels(ticker, client, { + removeZeros: true, + sortSides: true, + uncrossBook: true, + limitPerSide: 1, + }); + + if (levels.bids.length === 0 || levels.asks.length === 0) { + return undefined; + } + + const bestAsk = Big(levels.asks[0].humanPrice); + const bestBid = Big(levels.bids[0].humanPrice); + + if (bestAsk === undefined || bestBid === undefined) { + return undefined; + } + return bestBid.plus(bestAsk).div(2).toFixed(); +} diff --git a/indexer/packages/redis/src/caches/orderbook-mid-prices-cache.ts b/indexer/packages/redis/src/caches/orderbook-mid-prices-cache.ts new file mode 100644 index 00000000000..bec1c50fa4f --- /dev/null +++ b/indexer/packages/redis/src/caches/orderbook-mid-prices-cache.ts @@ -0,0 +1,158 @@ +import { logger } from '@dydxprotocol-indexer/base'; +import Big from 'big.js'; +import { Callback, RedisClient } from 'redis'; + +import { getOrderBookMidPrice } from './orderbook-levels-cache'; +import { + addOrderbookMidPricesScript, + getOrderbookMidPricesScript, +} from './scripts'; + +// Cache of orderbook prices for each clob pair +// Each price is cached for a 5 second window and in a ZSET +export const ORDERBOOK_MID_PRICES_CACHE_KEY_PREFIX: string = 'v4/orderbook_mid_prices/'; + +/** + * Generates a cache key for a given ticker's orderbook mid price. + * @param ticker The ticker symbol + * @returns The cache key string + */ +function getOrderbookMidPriceCacheKey(ticker: string): string { + return `${ORDERBOOK_MID_PRICES_CACHE_KEY_PREFIX}${ticker}`; +} + +/** + * Fetches and caches mid prices for multiple tickers. + * @param client The Redis client + * @param tickers An array of ticker symbols + * @returns A promise that resolves when all prices are fetched and cached + */ +export async function fetchAndCacheOrderbookMidPrices( + client: RedisClient, + tickers: string[], +): Promise { + // Fetch midPrices and filter out undefined values + const cacheKeyPricePairs: ({ cacheKey: string, midPrice: string } | null)[] = await Promise.all( + tickers.map(async (ticker) => { + const cacheKey: string = getOrderbookMidPriceCacheKey(ticker); + const midPrice: string | undefined = await getOrderBookMidPrice(ticker, client); + if (midPrice !== undefined) { + return { cacheKey, midPrice }; + } + return null; + }), + ); + + // Filter out null values + const validPairs: { cacheKey: string, midPrice: string }[] = cacheKeyPricePairs.filter( + (pair): pair is { cacheKey: string, midPrice: string } => pair !== null, + ); + if (validPairs.length === 0) { + // No valid midPrices to cache + return; + } + + const nowSeconds: number = Math.floor(Date.now() / 1000); // Current time in seconds + // Extract cache keys and prices + const priceValues: string[] = validPairs.map((pair) => pair.midPrice); + const priceCacheKeys: string[] = validPairs.map((pair) => { + + logger.info({ + at: 'orderbook-mid-prices-cache#fetchAndCacheOrderbookMidPrices', + message: 'Caching orderbook mid price', + cacheKey: pair.cacheKey, + midPrice: pair.midPrice, + }); + return pair.cacheKey; + }); + + return new Promise((resolve, reject) => { + client.evalsha( + addOrderbookMidPricesScript.hash, + priceCacheKeys.length, + ...priceCacheKeys, + ...priceValues, + nowSeconds, + (err: Error | null) => { + if (err) { + reject(err); + } else { + resolve(); + } + }, + ); + }); +} + +/** + * Retrieves the median prices for a given array of tickers from the cache. + * @param client The Redis client + * @param tickers Array of ticker symbols + * @returns A promise that resolves with an object mapping tickers + * to their median prices (as strings) or undefined if not found + */ +export async function getMedianPrices( + client: RedisClient, + tickers: string[], +): Promise<{ [ticker: string]: string | undefined }> { + + let evalAsync: ( + marketCacheKeys: string[], + ) => Promise = ( + marketCacheKeys, + ) => { + return new Promise((resolve, reject) => { + const callback: Callback = ( + err: Error | null, + results: string[][], + ) => { + if (err) { + return reject(err); + } + return resolve(results); + }; + + client.evalsha( + getOrderbookMidPricesScript.hash, // The Lua script to get cached prices + marketCacheKeys.length, + ...marketCacheKeys, + callback, + ); + }); + }; + evalAsync = evalAsync.bind(client); + + // Map tickers to cache keys + const marketCacheKeys: string[] = tickers.map(getOrderbookMidPriceCacheKey); + // Fetch the prices arrays from Redis (without scores) + const pricesArrays: string[][] = await evalAsync(marketCacheKeys); + + const result: { [ticker: string]: string | undefined } = {}; + tickers.forEach((ticker, index) => { + const prices = pricesArrays[index]; + + // Check if there are any prices + if (!prices || prices.length === 0) { + result[ticker] = undefined; + return; + } + + // Convert the prices to Big.js objects for precision + const bigPrices: Big[] = prices.map((price) => Big(price)); + + // Sort the prices in ascending order + bigPrices.sort((a, b) => a.cmp(b)); + + // Calculate the median + const mid: number = Math.floor(bigPrices.length / 2); + if (bigPrices.length % 2 === 1) { + // Odd number of prices: the middle one is the median + result[ticker] = bigPrices[mid].toFixed(); + } else { + // Even number of prices: average the two middle ones + result[ticker] = bigPrices[mid - 1].plus(bigPrices[mid]).div(2).toFixed(); + } + }); + + return result; +} diff --git a/indexer/packages/redis/src/caches/scripts.ts b/indexer/packages/redis/src/caches/scripts.ts index 5bfa6a13826..f9ff244e7cb 100644 --- a/indexer/packages/redis/src/caches/scripts.ts +++ b/indexer/packages/redis/src/caches/scripts.ts @@ -53,6 +53,7 @@ function newLuaScript(name: string, scriptPath: string): LuaScript { // Lua Scripts for deleting zero price levels export const deleteZeroPriceLevelScript: LuaScript = newLuaScript('deleteZeroPriceLevel', '../scripts/delete_zero_level.lua'); +export const deleteStalePriceLevelScript: LuaScript = newLuaScript('deleteStalePriceLevel', '../scripts/delete_stale_price_level.lua'); // Lua Scripts for updating/retrieving the orderbook levels, keeping the lastUpdated cache in sync export const incrementOrderbookLevelScript: LuaScript = newLuaScript('incrementOrderbookLevel', '../scripts/increment_orderbook_level.lua'); export const getOrderbookSideScript: LuaScript = newLuaScript('getOrderbookSide', '../scripts/get_orderbook_side.lua'); @@ -62,9 +63,12 @@ export const removeOrderScript: LuaScript = newLuaScript('removeOrder', '../scri export const addCanceledOrderIdScript: LuaScript = newLuaScript('addCanceledOrderId', '../scripts/add_canceled_order_id.lua'); export const addStatefulOrderUpdateScript: LuaScript = newLuaScript('addStatefulOrderUpdate', '../scripts/add_stateful_order_update.lua'); export const removeStatefulOrderUpdateScript: LuaScript = newLuaScript('removeStatefulOrderUpdate', '../scripts/remove_stateful_order_update.lua'); +export const addOrderbookMidPricesScript: LuaScript = newLuaScript('addOrderbookMidPrices', '../scripts/add_orderbook_mid_prices.lua'); +export const getOrderbookMidPricesScript: LuaScript = newLuaScript('getOrderbookMidPrices', '../scripts/get_orderbook_mid_prices.lua'); export const allLuaScripts: LuaScript[] = [ deleteZeroPriceLevelScript, + deleteStalePriceLevelScript, incrementOrderbookLevelScript, getOrderbookSideScript, updateOrderScript, @@ -73,4 +77,6 @@ export const allLuaScripts: LuaScript[] = [ addCanceledOrderIdScript, addStatefulOrderUpdateScript, removeStatefulOrderUpdateScript, + addOrderbookMidPricesScript, + getOrderbookMidPricesScript, ]; diff --git a/indexer/packages/redis/src/errors.ts b/indexer/packages/redis/src/errors.ts index 55ca29bb34f..84bd2603081 100644 --- a/indexer/packages/redis/src/errors.ts +++ b/indexer/packages/redis/src/errors.ts @@ -21,11 +21,3 @@ export class InvalidOptionsError extends Error { Error.captureStackTrace(this, this.constructor); } } - -export class InvalidPriceLevelUpdateError extends Error { - constructor(message: string) { - super(`Invalid price level update: ${message}`); - this.name = this.constructor.name; - Error.captureStackTrace(this, this.constructor); - } -} diff --git a/indexer/packages/redis/src/helpers/order-helper.ts b/indexer/packages/redis/src/helpers/order-helper.ts new file mode 100644 index 00000000000..d2a910a282f --- /dev/null +++ b/indexer/packages/redis/src/helpers/order-helper.ts @@ -0,0 +1,28 @@ +import { OrderTable, PerpetualMarketFromDatabase, protocolTranslations } from '@dydxprotocol-indexer/postgres'; +import { IndexerOrder, RedisOrder, RedisOrder_TickerType } from '@dydxprotocol-indexer/v4-protos'; + +/** + * Creates a `RedisOrder` given an `Order` and the corresponding `PerpetualMarket` for the `Order`. + * @param order + * @param perpetualMarket + * @returns + */ +export function convertToRedisOrder( + order: IndexerOrder, + perpetualMarket: PerpetualMarketFromDatabase, +): RedisOrder { + return { + order, + id: OrderTable.orderIdToUuid(order.orderId!), + ticker: perpetualMarket.ticker, + tickerType: RedisOrder_TickerType.TICKER_TYPE_PERPETUAL, + price: protocolTranslations.subticksToPrice( + order.subticks.toString(), + perpetualMarket, + ), + size: protocolTranslations.quantumsToHumanFixedString( + order.quantums.toString(), + perpetualMarket.atomicResolution, + ), + }; +} diff --git a/indexer/packages/redis/src/helpers/redis.ts b/indexer/packages/redis/src/helpers/redis.ts index 15471831f6d..77a26e25304 100644 --- a/indexer/packages/redis/src/helpers/redis.ts +++ b/indexer/packages/redis/src/helpers/redis.ts @@ -378,7 +378,7 @@ export async function hSetnxAsync( }: { hash: string, key: string, - value: string + value: string, }, client: RedisClient, ): Promise { diff --git a/indexer/packages/redis/src/index.ts b/indexer/packages/redis/src/index.ts index 08708b42e61..5c7aeed1037 100644 --- a/indexer/packages/redis/src/index.ts +++ b/indexer/packages/redis/src/index.ts @@ -1,7 +1,6 @@ export * as redis from './helpers/redis'; export * as AggregateTradingRewardsProcessedCache from './caches/aggregate-trading-rewards-processed-cache'; -export * as OpenOrdersCache from './caches/open-orders-cache'; export * as OrdersCache from './caches/orders-cache'; export * as OrdersDataCache from './caches/orders-data-cache'; export * as OrderExpiryCache from './caches/order-expiry-cache'; @@ -12,9 +11,12 @@ export * as LatestAccountPnlTicksCache from './caches/latest-account-pnl-ticks-c export * as CanceledOrdersCache from './caches/canceled-orders-cache'; export * as StatefulOrderUpdatesCache from './caches/stateful-order-updates-cache'; export * as StateFilledQuantumsCache from './caches/state-filled-quantums-cache'; +export * as LeaderboardPnlProcessedCache from './caches/leaderboard-processed-cache'; +export * as OrderbookMidPricesCache from './caches/orderbook-mid-prices-cache'; export { placeOrder } from './caches/place-order'; export { removeOrder } from './caches/remove-order'; export { updateOrder } from './caches/update-order'; +export * from './helpers/order-helper'; export * from './types'; export { redisConfigSchema } from './config'; diff --git a/indexer/packages/redis/src/scripts/add_orderbook_mid_prices.lua b/indexer/packages/redis/src/scripts/add_orderbook_mid_prices.lua new file mode 100644 index 00000000000..c30880fff46 --- /dev/null +++ b/indexer/packages/redis/src/scripts/add_orderbook_mid_prices.lua @@ -0,0 +1,38 @@ +-- KEYS contains the market cache keys +-- ARGV contains the prices for each market and a single timestamp at the end + +local numKeys = #KEYS +local numArgs = #ARGV + +-- Get the timestamp from the last argument +local timestamp = tonumber(ARGV[numArgs]) + +-- Time window (60 seconds) +local sixtySeconds = 60 + +-- Validate the timestamp +if not timestamp then + return redis.error_reply("Invalid timestamp") +end + +-- Calculate the cutoff time for removing old prices +local cutoffTime = timestamp - sixtySeconds + +-- Iterate through each key (market) and corresponding price +for i = 1, numKeys do + local priceCacheKey = KEYS[i] + local price = tonumber(ARGV[i]) + + -- Validate the price + if not price then + return redis.error_reply("Invalid price for key " .. priceCacheKey) + end + + -- Add the price to the sorted set with the current timestamp as the score + redis.call("ZADD", priceCacheKey, timestamp, price) + + -- Remove entries older than the cutoff time (older than 60 seconds) + redis.call("ZREMRANGEBYSCORE", priceCacheKey, "-inf", cutoffTime) +end + +return true diff --git a/indexer/packages/redis/src/scripts/delete_stale_price_level.lua b/indexer/packages/redis/src/scripts/delete_stale_price_level.lua new file mode 100644 index 00000000000..314842d87ba --- /dev/null +++ b/indexer/packages/redis/src/scripts/delete_stale_price_level.lua @@ -0,0 +1,30 @@ +-- Key for the hset of price levels +local hash = KEYS[1] +-- Key for the hset of price levels 'last updated' data +local lastUpdatedHash = KEYS[2] +-- Price level +local level = ARGV[1] +-- Time threshold in seconds +local timeThreshold = tonumber(ARGV[2]) + +-- This script deletes a price level in the orderbook levels cache if the last updated time is more than timeThreshold seconds in the past. +-- The return value is 1 if a price level was deleted and 0 if a price level was not deleted. + +-- Get the current time +local currentTime = tonumber(redis.call("time")[1]) + +-- Get the last updated time for the level +local lastUpdatedTime = tonumber(redis.call("hget", lastUpdatedHash, level)) +if not lastUpdatedTime then + return 0 +end + +-- Check if the last updated time is more than timeThreshold seconds in the past +if currentTime - lastUpdatedTime <= timeThreshold then + return 0 +end + +-- Delete the level from both hashes +redis.call("hdel", hash, level) +redis.call("hdel", lastUpdatedHash, level) +return 1 diff --git a/indexer/packages/redis/src/scripts/get_orderbook_mid_prices.lua b/indexer/packages/redis/src/scripts/get_orderbook_mid_prices.lua new file mode 100644 index 00000000000..d897ccc9f72 --- /dev/null +++ b/indexer/packages/redis/src/scripts/get_orderbook_mid_prices.lua @@ -0,0 +1,10 @@ +-- KEYS is an array of cache keys for a market + +local results = {} +for i, key in ipairs(KEYS) do + -- Get the prices for each key, but limit to a maximum of 10 + local prices = redis.call("ZRANGE", key, 0, 9) + results[i] = prices +end + +return results diff --git a/indexer/packages/redis/src/types.ts b/indexer/packages/redis/src/types.ts index 76e2339d271..a467a9b53c1 100644 --- a/indexer/packages/redis/src/types.ts +++ b/indexer/packages/redis/src/types.ts @@ -6,7 +6,7 @@ export interface PlaceOrderResult { // true if an order was placed placed: boolean, // true if an order was replaced - replaced: boolean + replaced: boolean, // total filled of the old order in quantums, undefined if an order was not replaced oldTotalFilledQuantums?: number, // whether the old order was resting on the book, undefined if an order was not replaced @@ -60,11 +60,11 @@ export interface OrderData { export type LuaScript = { // The name of the script - readonly name: string; + readonly name: string, // The contents of the script - readonly script: string; + readonly script: string, // The SHA1 hash of the contents of the script - readonly hash: string; + readonly hash: string, }; export enum CanceledOrderStatus { @@ -78,7 +78,7 @@ export type PnlTickForSubaccounts = { // Stores a PnlTicksCreateObject for the most recent pnl tick for each subaccount. // Opted for PnlTicksCreateObject instead ofPnlTicksFromDatabase as we don't need to store // the uuid. - [subaccountId: string]: PnlTicksCreateObject + [subaccountId: string]: PnlTicksCreateObject, }; /* -------- Stateful order update cache types -------- */ diff --git a/indexer/packages/v4-proto-parser/__tests__/order-helpers.test.ts b/indexer/packages/v4-proto-parser/__tests__/order-helpers.test.ts index bd8d3f811a3..90468e4cbda 100644 --- a/indexer/packages/v4-proto-parser/__tests__/order-helpers.test.ts +++ b/indexer/packages/v4-proto-parser/__tests__/order-helpers.test.ts @@ -1,5 +1,5 @@ import { IndexerOrderId } from '@dydxprotocol-indexer/v4-protos'; -import { getOrderIdHash, isStatefulOrder } from '../src/order-helpers'; +import { getOrderIdHash, isLongTermOrder, isStatefulOrder } from '../src/order-helpers'; import { ORDER_FLAG_CONDITIONAL, ORDER_FLAG_LONG_TERM, ORDER_FLAG_SHORT_TERM } from '../src'; describe('getOrderIdHash', () => { @@ -65,3 +65,23 @@ describe('isStatefulOrder', () => { expect(isStatefulOrder(flag)).toEqual(isStateful); }); }); + +describe('isLongTermOrder', () => { + it.each([ + [ORDER_FLAG_SHORT_TERM.toString(), 'string', false], + ['4', 'string', false], + [ORDER_FLAG_CONDITIONAL.toString(), 'string', false], + [ORDER_FLAG_LONG_TERM.toString(), 'string', true], + [ORDER_FLAG_SHORT_TERM, 'number', false], + [3, 'number', false], + [ORDER_FLAG_CONDITIONAL, 'number', false], + [ORDER_FLAG_LONG_TERM, 'number', true], + ['abc', 'string', false], + ])('Checks if flag %s with type %s is a long term order', ( + flag: number | string, + _type: string, + isLongTerm: boolean, + ) => { + expect(isLongTermOrder(flag)).toEqual(isLongTerm); + }); +}); diff --git a/indexer/packages/v4-proto-parser/src/order-helpers.ts b/indexer/packages/v4-proto-parser/src/order-helpers.ts index beeb567ac13..6724942941d 100644 --- a/indexer/packages/v4-proto-parser/src/order-helpers.ts +++ b/indexer/packages/v4-proto-parser/src/order-helpers.ts @@ -24,6 +24,15 @@ export function isStatefulOrder(orderFlag: number | String): boolean { return numberOrderFlag === ORDER_FLAG_CONDITIONAL || numberOrderFlag === ORDER_FLAG_LONG_TERM; } +export function isLongTermOrder(orderFlag: number | String): boolean { + const numberOrderFlag: number = Number(orderFlag); + // A string that is not a number will be converted to NaN, and should return false. + if (Number.isNaN(numberOrderFlag)) { + return false; + } + return numberOrderFlag === ORDER_FLAG_LONG_TERM; +} + export function requiresImmediateExecution(tif: IndexerOrder_TimeInForce): boolean { return ( tif === IndexerOrder_TimeInForce.TIME_IN_FORCE_FILL_OR_KILL || diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/accountplus.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/accountplus.ts new file mode 100644 index 00000000000..71069ee5768 --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/accountplus.ts @@ -0,0 +1,155 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../helpers"; +/** Account State */ + +export interface AccountState { + address: string; + timestampNonceDetails?: TimestampNonceDetails; +} +/** Account State */ + +export interface AccountStateSDKType { + address: string; + timestamp_nonce_details?: TimestampNonceDetailsSDKType; +} +/** Timestamp nonce details */ + +export interface TimestampNonceDetails { + /** unsorted list of n most recent timestamp nonces */ + timestampNonces: Long[]; + /** max timestamp nonce that was ejected from list above */ + + maxEjectedNonce: Long; +} +/** Timestamp nonce details */ + +export interface TimestampNonceDetailsSDKType { + /** unsorted list of n most recent timestamp nonces */ + timestamp_nonces: Long[]; + /** max timestamp nonce that was ejected from list above */ + + max_ejected_nonce: Long; +} + +function createBaseAccountState(): AccountState { + return { + address: "", + timestampNonceDetails: undefined + }; +} + +export const AccountState = { + encode(message: AccountState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (message.timestampNonceDetails !== undefined) { + TimestampNonceDetails.encode(message.timestampNonceDetails, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AccountState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAccountState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.timestampNonceDetails = TimestampNonceDetails.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AccountState { + const message = createBaseAccountState(); + message.address = object.address ?? ""; + message.timestampNonceDetails = object.timestampNonceDetails !== undefined && object.timestampNonceDetails !== null ? TimestampNonceDetails.fromPartial(object.timestampNonceDetails) : undefined; + return message; + } + +}; + +function createBaseTimestampNonceDetails(): TimestampNonceDetails { + return { + timestampNonces: [], + maxEjectedNonce: Long.UZERO + }; +} + +export const TimestampNonceDetails = { + encode(message: TimestampNonceDetails, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + + for (const v of message.timestampNonces) { + writer.uint64(v); + } + + writer.ldelim(); + + if (!message.maxEjectedNonce.isZero()) { + writer.uint32(16).uint64(message.maxEjectedNonce); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TimestampNonceDetails { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTimestampNonceDetails(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.timestampNonces.push((reader.uint64() as Long)); + } + } else { + message.timestampNonces.push((reader.uint64() as Long)); + } + + break; + + case 2: + message.maxEjectedNonce = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): TimestampNonceDetails { + const message = createBaseTimestampNonceDetails(); + message.timestampNonces = object.timestampNonces?.map(e => Long.fromValue(e)) || []; + message.maxEjectedNonce = object.maxEjectedNonce !== undefined && object.maxEjectedNonce !== null ? Long.fromValue(object.maxEjectedNonce) : Long.UZERO; + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/genesis.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/genesis.ts new file mode 100644 index 00000000000..5ffc5a7edf1 --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/genesis.ts @@ -0,0 +1,201 @@ +import { AccountAuthenticator, AccountAuthenticatorSDKType } from "./models"; +import { AccountState, AccountStateSDKType } from "./accountplus"; +import { Params, ParamsSDKType } from "./params"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../helpers"; +/** + * AuthenticatorData represents a genesis exported account with Authenticators. + * The address is used as the key, and the account authenticators are stored in + * the authenticators field. + */ + +export interface AuthenticatorData { + /** address is an account address, one address can have many authenticators */ + address: string; + /** + * authenticators are the account's authenticators, these can be multiple + * types including SignatureVerification, AllOfs, CosmWasmAuthenticators, etc + */ + + authenticators: AccountAuthenticator[]; +} +/** + * AuthenticatorData represents a genesis exported account with Authenticators. + * The address is used as the key, and the account authenticators are stored in + * the authenticators field. + */ + +export interface AuthenticatorDataSDKType { + /** address is an account address, one address can have many authenticators */ + address: string; + /** + * authenticators are the account's authenticators, these can be multiple + * types including SignatureVerification, AllOfs, CosmWasmAuthenticators, etc + */ + + authenticators: AccountAuthenticatorSDKType[]; +} +/** Module genesis state */ + +export interface GenesisState { + accounts: AccountState[]; + /** params define the parameters for the authenticator module. */ + + params?: Params; + /** next_authenticator_id is the next available authenticator ID. */ + + nextAuthenticatorId: Long; + /** + * authenticator_data contains the data for multiple accounts, each with their + * authenticators. + */ + + authenticatorData: AuthenticatorData[]; +} +/** Module genesis state */ + +export interface GenesisStateSDKType { + accounts: AccountStateSDKType[]; + /** params define the parameters for the authenticator module. */ + + params?: ParamsSDKType; + /** next_authenticator_id is the next available authenticator ID. */ + + next_authenticator_id: Long; + /** + * authenticator_data contains the data for multiple accounts, each with their + * authenticators. + */ + + authenticator_data: AuthenticatorDataSDKType[]; +} + +function createBaseAuthenticatorData(): AuthenticatorData { + return { + address: "", + authenticators: [] + }; +} + +export const AuthenticatorData = { + encode(message: AuthenticatorData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + for (const v of message.authenticators) { + AccountAuthenticator.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AuthenticatorData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAuthenticatorData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.authenticators.push(AccountAuthenticator.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AuthenticatorData { + const message = createBaseAuthenticatorData(); + message.address = object.address ?? ""; + message.authenticators = object.authenticators?.map(e => AccountAuthenticator.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseGenesisState(): GenesisState { + return { + accounts: [], + params: undefined, + nextAuthenticatorId: Long.UZERO, + authenticatorData: [] + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.accounts) { + AccountState.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(18).fork()).ldelim(); + } + + if (!message.nextAuthenticatorId.isZero()) { + writer.uint32(24).uint64(message.nextAuthenticatorId); + } + + for (const v of message.authenticatorData) { + AuthenticatorData.encode(v!, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.accounts.push(AccountState.decode(reader, reader.uint32())); + break; + + case 2: + message.params = Params.decode(reader, reader.uint32()); + break; + + case 3: + message.nextAuthenticatorId = (reader.uint64() as Long); + break; + + case 4: + message.authenticatorData.push(AuthenticatorData.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.accounts = object.accounts?.map(e => AccountState.fromPartial(e)) || []; + message.params = object.params !== undefined && object.params !== null ? Params.fromPartial(object.params) : undefined; + message.nextAuthenticatorId = object.nextAuthenticatorId !== undefined && object.nextAuthenticatorId !== null ? Long.fromValue(object.nextAuthenticatorId) : Long.UZERO; + message.authenticatorData = object.authenticatorData?.map(e => AuthenticatorData.fromPartial(e)) || []; + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/models.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/models.ts new file mode 100644 index 00000000000..2e7ce339aa3 --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/models.ts @@ -0,0 +1,117 @@ +import * as _m0 from "protobufjs/minimal"; +import { Long, DeepPartial } from "../../helpers"; +/** + * AccountAuthenticator represents a foundational model for all authenticators. + * It provides extensibility by allowing concrete types to interpret and + * validate transactions based on the encapsulated data. + */ + +export interface AccountAuthenticator { + /** ID uniquely identifies the authenticator instance. */ + id: Long; + /** + * Type specifies the category of the AccountAuthenticator. + * This type information is essential for differentiating authenticators + * and ensuring precise data retrieval from the storage layer. + */ + + type: string; + /** + * Config is a versatile field used in conjunction with the specific type of + * account authenticator to facilitate complex authentication processes. + * The interpretation of this field is overloaded, enabling multiple + * authenticators to utilize it for their respective purposes. + */ + + config: Uint8Array; +} +/** + * AccountAuthenticator represents a foundational model for all authenticators. + * It provides extensibility by allowing concrete types to interpret and + * validate transactions based on the encapsulated data. + */ + +export interface AccountAuthenticatorSDKType { + /** ID uniquely identifies the authenticator instance. */ + id: Long; + /** + * Type specifies the category of the AccountAuthenticator. + * This type information is essential for differentiating authenticators + * and ensuring precise data retrieval from the storage layer. + */ + + type: string; + /** + * Config is a versatile field used in conjunction with the specific type of + * account authenticator to facilitate complex authentication processes. + * The interpretation of this field is overloaded, enabling multiple + * authenticators to utilize it for their respective purposes. + */ + + config: Uint8Array; +} + +function createBaseAccountAuthenticator(): AccountAuthenticator { + return { + id: Long.UZERO, + type: "", + config: new Uint8Array() + }; +} + +export const AccountAuthenticator = { + encode(message: AccountAuthenticator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.id.isZero()) { + writer.uint32(8).uint64(message.id); + } + + if (message.type !== "") { + writer.uint32(18).string(message.type); + } + + if (message.config.length !== 0) { + writer.uint32(26).bytes(message.config); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AccountAuthenticator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAccountAuthenticator(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.id = (reader.uint64() as Long); + break; + + case 2: + message.type = reader.string(); + break; + + case 3: + message.config = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AccountAuthenticator { + const message = createBaseAccountAuthenticator(); + message.id = object.id !== undefined && object.id !== null ? Long.fromValue(object.id) : Long.UZERO; + message.type = object.type ?? ""; + message.config = object.config ?? new Uint8Array(); + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/params.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/params.ts new file mode 100644 index 00000000000..e0c5fb5adb4 --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/params.ts @@ -0,0 +1,67 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** Params defines the parameters for the module. */ + +export interface Params { + /** + * IsSmartAccountActive defines the state of the authenticator. + * If set to false, the authenticator module will not be used + * and the classic cosmos sdk authentication will be used instead. + */ + isSmartAccountActive: boolean; +} +/** Params defines the parameters for the module. */ + +export interface ParamsSDKType { + /** + * IsSmartAccountActive defines the state of the authenticator. + * If set to false, the authenticator module will not be used + * and the classic cosmos sdk authentication will be used instead. + */ + is_smart_account_active: boolean; +} + +function createBaseParams(): Params { + return { + isSmartAccountActive: false + }; +} + +export const Params = { + encode(message: Params, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.isSmartAccountActive === true) { + writer.uint32(8).bool(message.isSmartAccountActive); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Params { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.isSmartAccountActive = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Params { + const message = createBaseParams(); + message.isSmartAccountActive = object.isSmartAccountActive ?? false; + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/query.lcd.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/query.lcd.ts new file mode 100644 index 00000000000..d0a5e9d697d --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/query.lcd.ts @@ -0,0 +1,46 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryParamsRequest, QueryParamsResponseSDKType, GetAuthenticatorRequest, GetAuthenticatorResponseSDKType, GetAuthenticatorsRequest, GetAuthenticatorsResponseSDKType, AccountStateRequest, AccountStateResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.params = this.params.bind(this); + this.getAuthenticator = this.getAuthenticator.bind(this); + this.getAuthenticators = this.getAuthenticators.bind(this); + this.accountState = this.accountState.bind(this); + } + /* Parameters queries the parameters of the module. */ + + + async params(_params: QueryParamsRequest = {}): Promise { + const endpoint = `dydxprotocol/accountplus/params`; + return await this.req.get(endpoint); + } + /* Queries a single authenticator by account and authenticator ID. */ + + + async getAuthenticator(params: GetAuthenticatorRequest): Promise { + const endpoint = `dydxprotocol/accountplus/authenticator/${params.account}/${params.authenticatorId}`; + return await this.req.get(endpoint); + } + /* Queries all authenticators for a given account. */ + + + async getAuthenticators(params: GetAuthenticatorsRequest): Promise { + const endpoint = `dydxprotocol/accountplus/authenticators/${params.account}`; + return await this.req.get(endpoint); + } + /* Queries for an account state (timestamp nonce). */ + + + async accountState(params: AccountStateRequest): Promise { + const endpoint = `dydxprotocol/accountplus/account_state/${params.address}`; + return await this.req.get(endpoint); + } + +} \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/query.rpc.Query.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/query.rpc.Query.ts new file mode 100644 index 00000000000..85bf8dcafcb --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/query.rpc.Query.ts @@ -0,0 +1,77 @@ +import { Rpc } from "../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryParamsRequest, QueryParamsResponse, GetAuthenticatorRequest, GetAuthenticatorResponse, GetAuthenticatorsRequest, GetAuthenticatorsResponse, AccountStateRequest, AccountStateResponse } from "./query"; +/** Query defines the gRPC querier service. */ + +export interface Query { + /** Parameters queries the parameters of the module. */ + params(request?: QueryParamsRequest): Promise; + /** Queries a single authenticator by account and authenticator ID. */ + + getAuthenticator(request: GetAuthenticatorRequest): Promise; + /** Queries all authenticators for a given account. */ + + getAuthenticators(request: GetAuthenticatorsRequest): Promise; + /** Queries for an account state (timestamp nonce). */ + + accountState(request: AccountStateRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.params = this.params.bind(this); + this.getAuthenticator = this.getAuthenticator.bind(this); + this.getAuthenticators = this.getAuthenticators.bind(this); + this.accountState = this.accountState.bind(this); + } + + params(request: QueryParamsRequest = {}): Promise { + const data = QueryParamsRequest.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.accountplus.Query", "Params", data); + return promise.then(data => QueryParamsResponse.decode(new _m0.Reader(data))); + } + + getAuthenticator(request: GetAuthenticatorRequest): Promise { + const data = GetAuthenticatorRequest.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.accountplus.Query", "GetAuthenticator", data); + return promise.then(data => GetAuthenticatorResponse.decode(new _m0.Reader(data))); + } + + getAuthenticators(request: GetAuthenticatorsRequest): Promise { + const data = GetAuthenticatorsRequest.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.accountplus.Query", "GetAuthenticators", data); + return promise.then(data => GetAuthenticatorsResponse.decode(new _m0.Reader(data))); + } + + accountState(request: AccountStateRequest): Promise { + const data = AccountStateRequest.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.accountplus.Query", "AccountState", data); + return promise.then(data => AccountStateResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + params(request?: QueryParamsRequest): Promise { + return queryService.params(request); + }, + + getAuthenticator(request: GetAuthenticatorRequest): Promise { + return queryService.getAuthenticator(request); + }, + + getAuthenticators(request: GetAuthenticatorsRequest): Promise { + return queryService.getAuthenticators(request); + }, + + accountState(request: AccountStateRequest): Promise { + return queryService.accountState(request); + } + + }; +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/query.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/query.ts new file mode 100644 index 00000000000..b109634853a --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/query.ts @@ -0,0 +1,462 @@ +import { AccountState, AccountStateSDKType } from "./accountplus"; +import { Params, ParamsSDKType } from "./params"; +import { AccountAuthenticator, AccountAuthenticatorSDKType } from "./models"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../helpers"; +/** AccountStateRequest is request type for the Query/AccountState RPC method. */ + +export interface AccountStateRequest { + /** AccountStateRequest is request type for the Query/AccountState RPC method. */ + address: string; +} +/** AccountStateRequest is request type for the Query/AccountState RPC method. */ + +export interface AccountStateRequestSDKType { + /** AccountStateRequest is request type for the Query/AccountState RPC method. */ + address: string; +} +/** + * AccountStateResponse is response type for the Query/GetAccountState RPC + * method. + */ + +export interface AccountStateResponse { + /** + * AccountStateResponse is response type for the Query/GetAccountState RPC + * method. + */ + accountState?: AccountState; +} +/** + * AccountStateResponse is response type for the Query/GetAccountState RPC + * method. + */ + +export interface AccountStateResponseSDKType { + /** + * AccountStateResponse is response type for the Query/GetAccountState RPC + * method. + */ + account_state?: AccountStateSDKType; +} +/** QueryParamsRequest is request type for the Query/Params RPC method. */ + +export interface QueryParamsRequest {} +/** QueryParamsRequest is request type for the Query/Params RPC method. */ + +export interface QueryParamsRequestSDKType {} +/** QueryParamsResponse is response type for the Query/Params RPC method. */ + +export interface QueryParamsResponse { + /** params holds all the parameters of this module. */ + params?: Params; +} +/** QueryParamsResponse is response type for the Query/Params RPC method. */ + +export interface QueryParamsResponseSDKType { + /** params holds all the parameters of this module. */ + params?: ParamsSDKType; +} +/** MsgGetAuthenticatorsRequest defines the Msg/GetAuthenticators request type. */ + +export interface GetAuthenticatorsRequest { + /** MsgGetAuthenticatorsRequest defines the Msg/GetAuthenticators request type. */ + account: string; +} +/** MsgGetAuthenticatorsRequest defines the Msg/GetAuthenticators request type. */ + +export interface GetAuthenticatorsRequestSDKType { + /** MsgGetAuthenticatorsRequest defines the Msg/GetAuthenticators request type. */ + account: string; +} +/** MsgGetAuthenticatorsResponse defines the Msg/GetAuthenticators response type. */ + +export interface GetAuthenticatorsResponse { + accountAuthenticators: AccountAuthenticator[]; +} +/** MsgGetAuthenticatorsResponse defines the Msg/GetAuthenticators response type. */ + +export interface GetAuthenticatorsResponseSDKType { + account_authenticators: AccountAuthenticatorSDKType[]; +} +/** MsgGetAuthenticatorRequest defines the Msg/GetAuthenticator request type. */ + +export interface GetAuthenticatorRequest { + account: string; + authenticatorId: Long; +} +/** MsgGetAuthenticatorRequest defines the Msg/GetAuthenticator request type. */ + +export interface GetAuthenticatorRequestSDKType { + account: string; + authenticator_id: Long; +} +/** MsgGetAuthenticatorResponse defines the Msg/GetAuthenticator response type. */ + +export interface GetAuthenticatorResponse { + accountAuthenticator?: AccountAuthenticator; +} +/** MsgGetAuthenticatorResponse defines the Msg/GetAuthenticator response type. */ + +export interface GetAuthenticatorResponseSDKType { + account_authenticator?: AccountAuthenticatorSDKType; +} + +function createBaseAccountStateRequest(): AccountStateRequest { + return { + address: "" + }; +} + +export const AccountStateRequest = { + encode(message: AccountStateRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AccountStateRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAccountStateRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AccountStateRequest { + const message = createBaseAccountStateRequest(); + message.address = object.address ?? ""; + return message; + } + +}; + +function createBaseAccountStateResponse(): AccountStateResponse { + return { + accountState: undefined + }; +} + +export const AccountStateResponse = { + encode(message: AccountStateResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.accountState !== undefined) { + AccountState.encode(message.accountState, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AccountStateResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAccountStateResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.accountState = AccountState.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AccountStateResponse { + const message = createBaseAccountStateResponse(); + message.accountState = object.accountState !== undefined && object.accountState !== null ? AccountState.fromPartial(object.accountState) : undefined; + return message; + } + +}; + +function createBaseQueryParamsRequest(): QueryParamsRequest { + return {}; +} + +export const QueryParamsRequest = { + encode(_: QueryParamsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryParamsRequest { + const message = createBaseQueryParamsRequest(); + return message; + } + +}; + +function createBaseQueryParamsResponse(): QueryParamsResponse { + return { + params: undefined + }; +} + +export const QueryParamsResponse = { + encode(message: QueryParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.params = Params.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryParamsResponse { + const message = createBaseQueryParamsResponse(); + message.params = object.params !== undefined && object.params !== null ? Params.fromPartial(object.params) : undefined; + return message; + } + +}; + +function createBaseGetAuthenticatorsRequest(): GetAuthenticatorsRequest { + return { + account: "" + }; +} + +export const GetAuthenticatorsRequest = { + encode(message: GetAuthenticatorsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.account !== "") { + writer.uint32(10).string(message.account); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetAuthenticatorsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetAuthenticatorsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.account = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GetAuthenticatorsRequest { + const message = createBaseGetAuthenticatorsRequest(); + message.account = object.account ?? ""; + return message; + } + +}; + +function createBaseGetAuthenticatorsResponse(): GetAuthenticatorsResponse { + return { + accountAuthenticators: [] + }; +} + +export const GetAuthenticatorsResponse = { + encode(message: GetAuthenticatorsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.accountAuthenticators) { + AccountAuthenticator.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetAuthenticatorsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetAuthenticatorsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.accountAuthenticators.push(AccountAuthenticator.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GetAuthenticatorsResponse { + const message = createBaseGetAuthenticatorsResponse(); + message.accountAuthenticators = object.accountAuthenticators?.map(e => AccountAuthenticator.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseGetAuthenticatorRequest(): GetAuthenticatorRequest { + return { + account: "", + authenticatorId: Long.UZERO + }; +} + +export const GetAuthenticatorRequest = { + encode(message: GetAuthenticatorRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.account !== "") { + writer.uint32(10).string(message.account); + } + + if (!message.authenticatorId.isZero()) { + writer.uint32(16).uint64(message.authenticatorId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetAuthenticatorRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetAuthenticatorRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.account = reader.string(); + break; + + case 2: + message.authenticatorId = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GetAuthenticatorRequest { + const message = createBaseGetAuthenticatorRequest(); + message.account = object.account ?? ""; + message.authenticatorId = object.authenticatorId !== undefined && object.authenticatorId !== null ? Long.fromValue(object.authenticatorId) : Long.UZERO; + return message; + } + +}; + +function createBaseGetAuthenticatorResponse(): GetAuthenticatorResponse { + return { + accountAuthenticator: undefined + }; +} + +export const GetAuthenticatorResponse = { + encode(message: GetAuthenticatorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.accountAuthenticator !== undefined) { + AccountAuthenticator.encode(message.accountAuthenticator, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetAuthenticatorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetAuthenticatorResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.accountAuthenticator = AccountAuthenticator.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GetAuthenticatorResponse { + const message = createBaseGetAuthenticatorResponse(); + message.accountAuthenticator = object.accountAuthenticator !== undefined && object.accountAuthenticator !== null ? AccountAuthenticator.fromPartial(object.accountAuthenticator) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/tx.rpc.msg.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/tx.rpc.msg.ts new file mode 100644 index 00000000000..9582fd403d9 --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/tx.rpc.msg.ts @@ -0,0 +1,47 @@ +import { Rpc } from "../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { MsgAddAuthenticator, MsgAddAuthenticatorResponse, MsgRemoveAuthenticator, MsgRemoveAuthenticatorResponse, MsgSetActiveState, MsgSetActiveStateResponse } from "./tx"; +/** Msg defines the Msg service. */ + +export interface Msg { + /** AddAuthenticator adds an authenticator to an account. */ + addAuthenticator(request: MsgAddAuthenticator): Promise; + /** RemoveAuthenticator removes an authenticator from an account. */ + + removeAuthenticator(request: MsgRemoveAuthenticator): Promise; + /** + * SetActiveState sets the active state of the authenticator. + * Primarily used for circuit breaking. + */ + + setActiveState(request: MsgSetActiveState): Promise; +} +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.addAuthenticator = this.addAuthenticator.bind(this); + this.removeAuthenticator = this.removeAuthenticator.bind(this); + this.setActiveState = this.setActiveState.bind(this); + } + + addAuthenticator(request: MsgAddAuthenticator): Promise { + const data = MsgAddAuthenticator.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.accountplus.Msg", "AddAuthenticator", data); + return promise.then(data => MsgAddAuthenticatorResponse.decode(new _m0.Reader(data))); + } + + removeAuthenticator(request: MsgRemoveAuthenticator): Promise { + const data = MsgRemoveAuthenticator.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.accountplus.Msg", "RemoveAuthenticator", data); + return promise.then(data => MsgRemoveAuthenticatorResponse.decode(new _m0.Reader(data))); + } + + setActiveState(request: MsgSetActiveState): Promise { + const data = MsgSetActiveState.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.accountplus.Msg", "SetActiveState", data); + return promise.then(data => MsgSetActiveStateResponse.decode(new _m0.Reader(data))); + } + +} \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/tx.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/tx.ts new file mode 100644 index 00000000000..92840d16628 --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/accountplus/tx.ts @@ -0,0 +1,470 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../helpers"; +/** MsgAddAuthenticatorRequest defines the Msg/AddAuthenticator request type. */ + +export interface MsgAddAuthenticator { + sender: string; + authenticatorType: string; + data: Uint8Array; +} +/** MsgAddAuthenticatorRequest defines the Msg/AddAuthenticator request type. */ + +export interface MsgAddAuthenticatorSDKType { + sender: string; + authenticator_type: string; + data: Uint8Array; +} +/** MsgAddAuthenticatorResponse defines the Msg/AddAuthenticator response type. */ + +export interface MsgAddAuthenticatorResponse { + /** MsgAddAuthenticatorResponse defines the Msg/AddAuthenticator response type. */ + success: boolean; +} +/** MsgAddAuthenticatorResponse defines the Msg/AddAuthenticator response type. */ + +export interface MsgAddAuthenticatorResponseSDKType { + /** MsgAddAuthenticatorResponse defines the Msg/AddAuthenticator response type. */ + success: boolean; +} +/** + * MsgRemoveAuthenticatorRequest defines the Msg/RemoveAuthenticator request + * type. + */ + +export interface MsgRemoveAuthenticator { + sender: string; + id: Long; +} +/** + * MsgRemoveAuthenticatorRequest defines the Msg/RemoveAuthenticator request + * type. + */ + +export interface MsgRemoveAuthenticatorSDKType { + sender: string; + id: Long; +} +/** + * MsgRemoveAuthenticatorResponse defines the Msg/RemoveAuthenticator response + * type. + */ + +export interface MsgRemoveAuthenticatorResponse { + /** + * MsgRemoveAuthenticatorResponse defines the Msg/RemoveAuthenticator response + * type. + */ + success: boolean; +} +/** + * MsgRemoveAuthenticatorResponse defines the Msg/RemoveAuthenticator response + * type. + */ + +export interface MsgRemoveAuthenticatorResponseSDKType { + /** + * MsgRemoveAuthenticatorResponse defines the Msg/RemoveAuthenticator response + * type. + */ + success: boolean; +} +/** MsgSetActiveState sets the active state of the module. */ + +export interface MsgSetActiveState { + /** Authority is the address that may send this message. */ + authority: string; + active: boolean; +} +/** MsgSetActiveState sets the active state of the module. */ + +export interface MsgSetActiveStateSDKType { + /** Authority is the address that may send this message. */ + authority: string; + active: boolean; +} +/** MsgSetActiveStateResponse defines the Msg/SetActiveState response type. */ + +export interface MsgSetActiveStateResponse {} +/** MsgSetActiveStateResponse defines the Msg/SetActiveState response type. */ + +export interface MsgSetActiveStateResponseSDKType {} +/** + * TxExtension allows for additional authenticator-specific data in + * transactions. + */ + +export interface TxExtension { + /** + * selected_authenticators holds the authenticator_id for the chosen + * authenticator per message. + */ + selectedAuthenticators: Long[]; +} +/** + * TxExtension allows for additional authenticator-specific data in + * transactions. + */ + +export interface TxExtensionSDKType { + /** + * selected_authenticators holds the authenticator_id for the chosen + * authenticator per message. + */ + selected_authenticators: Long[]; +} + +function createBaseMsgAddAuthenticator(): MsgAddAuthenticator { + return { + sender: "", + authenticatorType: "", + data: new Uint8Array() + }; +} + +export const MsgAddAuthenticator = { + encode(message: MsgAddAuthenticator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.sender !== "") { + writer.uint32(10).string(message.sender); + } + + if (message.authenticatorType !== "") { + writer.uint32(18).string(message.authenticatorType); + } + + if (message.data.length !== 0) { + writer.uint32(26).bytes(message.data); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgAddAuthenticator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgAddAuthenticator(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.sender = reader.string(); + break; + + case 2: + message.authenticatorType = reader.string(); + break; + + case 3: + message.data = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgAddAuthenticator { + const message = createBaseMsgAddAuthenticator(); + message.sender = object.sender ?? ""; + message.authenticatorType = object.authenticatorType ?? ""; + message.data = object.data ?? new Uint8Array(); + return message; + } + +}; + +function createBaseMsgAddAuthenticatorResponse(): MsgAddAuthenticatorResponse { + return { + success: false + }; +} + +export const MsgAddAuthenticatorResponse = { + encode(message: MsgAddAuthenticatorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.success === true) { + writer.uint32(8).bool(message.success); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgAddAuthenticatorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgAddAuthenticatorResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.success = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgAddAuthenticatorResponse { + const message = createBaseMsgAddAuthenticatorResponse(); + message.success = object.success ?? false; + return message; + } + +}; + +function createBaseMsgRemoveAuthenticator(): MsgRemoveAuthenticator { + return { + sender: "", + id: Long.UZERO + }; +} + +export const MsgRemoveAuthenticator = { + encode(message: MsgRemoveAuthenticator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.sender !== "") { + writer.uint32(10).string(message.sender); + } + + if (!message.id.isZero()) { + writer.uint32(16).uint64(message.id); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRemoveAuthenticator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRemoveAuthenticator(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.sender = reader.string(); + break; + + case 2: + message.id = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgRemoveAuthenticator { + const message = createBaseMsgRemoveAuthenticator(); + message.sender = object.sender ?? ""; + message.id = object.id !== undefined && object.id !== null ? Long.fromValue(object.id) : Long.UZERO; + return message; + } + +}; + +function createBaseMsgRemoveAuthenticatorResponse(): MsgRemoveAuthenticatorResponse { + return { + success: false + }; +} + +export const MsgRemoveAuthenticatorResponse = { + encode(message: MsgRemoveAuthenticatorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.success === true) { + writer.uint32(8).bool(message.success); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRemoveAuthenticatorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRemoveAuthenticatorResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.success = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgRemoveAuthenticatorResponse { + const message = createBaseMsgRemoveAuthenticatorResponse(); + message.success = object.success ?? false; + return message; + } + +}; + +function createBaseMsgSetActiveState(): MsgSetActiveState { + return { + authority: "", + active: false + }; +} + +export const MsgSetActiveState = { + encode(message: MsgSetActiveState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authority !== "") { + writer.uint32(10).string(message.authority); + } + + if (message.active === true) { + writer.uint32(16).bool(message.active); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSetActiveState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSetActiveState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.authority = reader.string(); + break; + + case 2: + message.active = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgSetActiveState { + const message = createBaseMsgSetActiveState(); + message.authority = object.authority ?? ""; + message.active = object.active ?? false; + return message; + } + +}; + +function createBaseMsgSetActiveStateResponse(): MsgSetActiveStateResponse { + return {}; +} + +export const MsgSetActiveStateResponse = { + encode(_: MsgSetActiveStateResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSetActiveStateResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSetActiveStateResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgSetActiveStateResponse { + const message = createBaseMsgSetActiveStateResponse(); + return message; + } + +}; + +function createBaseTxExtension(): TxExtension { + return { + selectedAuthenticators: [] + }; +} + +export const TxExtension = { + encode(message: TxExtension, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + + for (const v of message.selectedAuthenticators) { + writer.uint64(v); + } + + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TxExtension { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTxExtension(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.selectedAuthenticators.push((reader.uint64() as Long)); + } + } else { + message.selectedAuthenticators.push((reader.uint64() as Long)); + } + + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): TxExtension { + const message = createBaseTxExtension(); + message.selectedAuthenticators = object.selectedAuthenticators?.map(e => Long.fromValue(e)) || []; + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/affiliates/affiliates.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/affiliates/affiliates.ts new file mode 100644 index 00000000000..f6ded433c8a --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/affiliates/affiliates.ts @@ -0,0 +1,286 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../helpers"; +/** AffiliateTiers defines the affiliate tiers. */ + +export interface AffiliateTiers { + /** All affiliate tiers */ + tiers: AffiliateTiers_Tier[]; +} +/** AffiliateTiers defines the affiliate tiers. */ + +export interface AffiliateTiersSDKType { + /** All affiliate tiers */ + tiers: AffiliateTiers_TierSDKType[]; +} +/** Tier defines an affiliate tier. */ + +export interface AffiliateTiers_Tier { + /** Required all-time referred volume in quote quantums. */ + reqReferredVolumeQuoteQuantums: Long; + /** Required currently staked native tokens (in whole coins). */ + + reqStakedWholeCoins: number; + /** Taker fee share in parts-per-million. */ + + takerFeeSharePpm: number; +} +/** Tier defines an affiliate tier. */ + +export interface AffiliateTiers_TierSDKType { + /** Required all-time referred volume in quote quantums. */ + req_referred_volume_quote_quantums: Long; + /** Required currently staked native tokens (in whole coins). */ + + req_staked_whole_coins: number; + /** Taker fee share in parts-per-million. */ + + taker_fee_share_ppm: number; +} +/** + * AffiliateWhitelist specifies the whitelisted affiliates. + * If an address is in the whitelist, then the affiliate fee share in + * this object will override fee share from the regular affiliate tiers above. + */ + +export interface AffiliateWhitelist { + /** All affiliate whitelist tiers. */ + tiers: AffiliateWhitelist_Tier[]; +} +/** + * AffiliateWhitelist specifies the whitelisted affiliates. + * If an address is in the whitelist, then the affiliate fee share in + * this object will override fee share from the regular affiliate tiers above. + */ + +export interface AffiliateWhitelistSDKType { + /** All affiliate whitelist tiers. */ + tiers: AffiliateWhitelist_TierSDKType[]; +} +/** Tier defines an affiliate whitelist tier. */ + +export interface AffiliateWhitelist_Tier { + /** List of unique whitelisted addresses. */ + addresses: string[]; + /** Taker fee share in parts-per-million. */ + + takerFeeSharePpm: number; +} +/** Tier defines an affiliate whitelist tier. */ + +export interface AffiliateWhitelist_TierSDKType { + /** List of unique whitelisted addresses. */ + addresses: string[]; + /** Taker fee share in parts-per-million. */ + + taker_fee_share_ppm: number; +} + +function createBaseAffiliateTiers(): AffiliateTiers { + return { + tiers: [] + }; +} + +export const AffiliateTiers = { + encode(message: AffiliateTiers, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.tiers) { + AffiliateTiers_Tier.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AffiliateTiers { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAffiliateTiers(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.tiers.push(AffiliateTiers_Tier.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AffiliateTiers { + const message = createBaseAffiliateTiers(); + message.tiers = object.tiers?.map(e => AffiliateTiers_Tier.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseAffiliateTiers_Tier(): AffiliateTiers_Tier { + return { + reqReferredVolumeQuoteQuantums: Long.UZERO, + reqStakedWholeCoins: 0, + takerFeeSharePpm: 0 + }; +} + +export const AffiliateTiers_Tier = { + encode(message: AffiliateTiers_Tier, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.reqReferredVolumeQuoteQuantums.isZero()) { + writer.uint32(8).uint64(message.reqReferredVolumeQuoteQuantums); + } + + if (message.reqStakedWholeCoins !== 0) { + writer.uint32(16).uint32(message.reqStakedWholeCoins); + } + + if (message.takerFeeSharePpm !== 0) { + writer.uint32(24).uint32(message.takerFeeSharePpm); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AffiliateTiers_Tier { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAffiliateTiers_Tier(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.reqReferredVolumeQuoteQuantums = (reader.uint64() as Long); + break; + + case 2: + message.reqStakedWholeCoins = reader.uint32(); + break; + + case 3: + message.takerFeeSharePpm = reader.uint32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AffiliateTiers_Tier { + const message = createBaseAffiliateTiers_Tier(); + message.reqReferredVolumeQuoteQuantums = object.reqReferredVolumeQuoteQuantums !== undefined && object.reqReferredVolumeQuoteQuantums !== null ? Long.fromValue(object.reqReferredVolumeQuoteQuantums) : Long.UZERO; + message.reqStakedWholeCoins = object.reqStakedWholeCoins ?? 0; + message.takerFeeSharePpm = object.takerFeeSharePpm ?? 0; + return message; + } + +}; + +function createBaseAffiliateWhitelist(): AffiliateWhitelist { + return { + tiers: [] + }; +} + +export const AffiliateWhitelist = { + encode(message: AffiliateWhitelist, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.tiers) { + AffiliateWhitelist_Tier.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AffiliateWhitelist { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAffiliateWhitelist(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.tiers.push(AffiliateWhitelist_Tier.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AffiliateWhitelist { + const message = createBaseAffiliateWhitelist(); + message.tiers = object.tiers?.map(e => AffiliateWhitelist_Tier.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseAffiliateWhitelist_Tier(): AffiliateWhitelist_Tier { + return { + addresses: [], + takerFeeSharePpm: 0 + }; +} + +export const AffiliateWhitelist_Tier = { + encode(message: AffiliateWhitelist_Tier, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.addresses) { + writer.uint32(10).string(v!); + } + + if (message.takerFeeSharePpm !== 0) { + writer.uint32(16).uint32(message.takerFeeSharePpm); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AffiliateWhitelist_Tier { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAffiliateWhitelist_Tier(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.addresses.push(reader.string()); + break; + + case 2: + message.takerFeeSharePpm = reader.uint32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AffiliateWhitelist_Tier { + const message = createBaseAffiliateWhitelist_Tier(); + message.addresses = object.addresses?.map(e => e) || []; + message.takerFeeSharePpm = object.takerFeeSharePpm ?? 0; + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/affiliates/genesis.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/affiliates/genesis.ts new file mode 100644 index 00000000000..17ec53fdf64 --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/affiliates/genesis.ts @@ -0,0 +1,60 @@ +import { AffiliateTiers, AffiliateTiersSDKType } from "./affiliates"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** GenesisState defines generis state of `x/affiliates` */ + +export interface GenesisState { + /** The list of affiliate tiers */ + affiliateTiers?: AffiliateTiers; +} +/** GenesisState defines generis state of `x/affiliates` */ + +export interface GenesisStateSDKType { + /** The list of affiliate tiers */ + affiliate_tiers?: AffiliateTiersSDKType; +} + +function createBaseGenesisState(): GenesisState { + return { + affiliateTiers: undefined + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.affiliateTiers !== undefined) { + AffiliateTiers.encode(message.affiliateTiers, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.affiliateTiers = AffiliateTiers.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.affiliateTiers = object.affiliateTiers !== undefined && object.affiliateTiers !== null ? AffiliateTiers.fromPartial(object.affiliateTiers) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/affiliates/query.lcd.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/affiliates/query.lcd.ts new file mode 100644 index 00000000000..4abc5b7a4a1 --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/affiliates/query.lcd.ts @@ -0,0 +1,46 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { AffiliateInfoRequest, AffiliateInfoResponseSDKType, ReferredByRequest, ReferredByResponseSDKType, AllAffiliateTiersRequest, AllAffiliateTiersResponseSDKType, AffiliateWhitelistRequest, AffiliateWhitelistResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.affiliateInfo = this.affiliateInfo.bind(this); + this.referredBy = this.referredBy.bind(this); + this.allAffiliateTiers = this.allAffiliateTiers.bind(this); + this.affiliateWhitelist = this.affiliateWhitelist.bind(this); + } + /* Query AffiliateInfo returns the affiliate info for a given address. */ + + + async affiliateInfo(params: AffiliateInfoRequest): Promise { + const endpoint = `dydxprotocol/affiliates/affiliate_info/${params.address}`; + return await this.req.get(endpoint); + } + /* Query ReferredBy returns the affiliate that referred a given address. */ + + + async referredBy(params: ReferredByRequest): Promise { + const endpoint = `dydxprotocol/affiliates/referred_by/${params.address}`; + return await this.req.get(endpoint); + } + /* Query AllAffiliateTiers returns all affiliate tiers. */ + + + async allAffiliateTiers(_params: AllAffiliateTiersRequest = {}): Promise { + const endpoint = `dydxprotocol/affiliates/all_affiliate_tiers`; + return await this.req.get(endpoint); + } + /* Query AffiliateWhitelist returns the affiliate whitelist. */ + + + async affiliateWhitelist(_params: AffiliateWhitelistRequest = {}): Promise { + const endpoint = `dydxprotocol/affiliates/affiliate_whitelist`; + return await this.req.get(endpoint); + } + +} \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/affiliates/query.rpc.Query.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/affiliates/query.rpc.Query.ts new file mode 100644 index 00000000000..4815a3585df --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/affiliates/query.rpc.Query.ts @@ -0,0 +1,77 @@ +import { Rpc } from "../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { AffiliateInfoRequest, AffiliateInfoResponse, ReferredByRequest, ReferredByResponse, AllAffiliateTiersRequest, AllAffiliateTiersResponse, AffiliateWhitelistRequest, AffiliateWhitelistResponse } from "./query"; +/** Query defines the gRPC querier service. */ + +export interface Query { + /** Query AffiliateInfo returns the affiliate info for a given address. */ + affiliateInfo(request: AffiliateInfoRequest): Promise; + /** Query ReferredBy returns the affiliate that referred a given address. */ + + referredBy(request: ReferredByRequest): Promise; + /** Query AllAffiliateTiers returns all affiliate tiers. */ + + allAffiliateTiers(request?: AllAffiliateTiersRequest): Promise; + /** Query AffiliateWhitelist returns the affiliate whitelist. */ + + affiliateWhitelist(request?: AffiliateWhitelistRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.affiliateInfo = this.affiliateInfo.bind(this); + this.referredBy = this.referredBy.bind(this); + this.allAffiliateTiers = this.allAffiliateTiers.bind(this); + this.affiliateWhitelist = this.affiliateWhitelist.bind(this); + } + + affiliateInfo(request: AffiliateInfoRequest): Promise { + const data = AffiliateInfoRequest.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.affiliates.Query", "AffiliateInfo", data); + return promise.then(data => AffiliateInfoResponse.decode(new _m0.Reader(data))); + } + + referredBy(request: ReferredByRequest): Promise { + const data = ReferredByRequest.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.affiliates.Query", "ReferredBy", data); + return promise.then(data => ReferredByResponse.decode(new _m0.Reader(data))); + } + + allAffiliateTiers(request: AllAffiliateTiersRequest = {}): Promise { + const data = AllAffiliateTiersRequest.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.affiliates.Query", "AllAffiliateTiers", data); + return promise.then(data => AllAffiliateTiersResponse.decode(new _m0.Reader(data))); + } + + affiliateWhitelist(request: AffiliateWhitelistRequest = {}): Promise { + const data = AffiliateWhitelistRequest.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.affiliates.Query", "AffiliateWhitelist", data); + return promise.then(data => AffiliateWhitelistResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + affiliateInfo(request: AffiliateInfoRequest): Promise { + return queryService.affiliateInfo(request); + }, + + referredBy(request: ReferredByRequest): Promise { + return queryService.referredBy(request); + }, + + allAffiliateTiers(request?: AllAffiliateTiersRequest): Promise { + return queryService.allAffiliateTiers(request); + }, + + affiliateWhitelist(request?: AffiliateWhitelistRequest): Promise { + return queryService.affiliateWhitelist(request); + } + + }; +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/affiliates/query.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/affiliates/query.ts new file mode 100644 index 00000000000..1cdebd2f257 --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/affiliates/query.ts @@ -0,0 +1,533 @@ +import { AffiliateTiers, AffiliateTiersSDKType, AffiliateWhitelist, AffiliateWhitelistSDKType } from "./affiliates"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** + * AffiliateInfoRequest is the request type for the Query/AffiliateInfo RPC + * method. + */ + +export interface AffiliateInfoRequest { + address: string; +} +/** + * AffiliateInfoRequest is the request type for the Query/AffiliateInfo RPC + * method. + */ + +export interface AffiliateInfoRequestSDKType { + address: string; +} +/** + * AffiliateInfoResponse is the response type for the Query/AffiliateInfo RPC + * method. + */ + +export interface AffiliateInfoResponse { + /** Whether the address is a whitelisted affiliate (VIP). */ + isWhitelisted: boolean; + /** + * If `is_whiteslisted == false`, the affiliate's tier qualified through + * regular affiliate program. + */ + + tier: number; + /** + * The affiliate's taker fee share in parts-per-million (for both VIP and + * regular affiliate). + */ + + feeSharePpm: number; + /** The affiliate's all-time referred volume in quote quantums. */ + + referredVolume: Uint8Array; + /** The affiliate's currently staked native tokens (in whole coins). */ + + stakedAmount: Uint8Array; +} +/** + * AffiliateInfoResponse is the response type for the Query/AffiliateInfo RPC + * method. + */ + +export interface AffiliateInfoResponseSDKType { + /** Whether the address is a whitelisted affiliate (VIP). */ + is_whitelisted: boolean; + /** + * If `is_whiteslisted == false`, the affiliate's tier qualified through + * regular affiliate program. + */ + + tier: number; + /** + * The affiliate's taker fee share in parts-per-million (for both VIP and + * regular affiliate). + */ + + fee_share_ppm: number; + /** The affiliate's all-time referred volume in quote quantums. */ + + referred_volume: Uint8Array; + /** The affiliate's currently staked native tokens (in whole coins). */ + + staked_amount: Uint8Array; +} +/** ReferredByRequest is the request type for the Query/ReferredBy RPC method. */ + +export interface ReferredByRequest { + /** The address to query. */ + address: string; +} +/** ReferredByRequest is the request type for the Query/ReferredBy RPC method. */ + +export interface ReferredByRequestSDKType { + /** The address to query. */ + address: string; +} +/** ReferredByResponse is the response type for the Query/ReferredBy RPC method. */ + +export interface ReferredByResponse { + /** The affiliate's address that referred the queried address. */ + affiliateAddress: string; +} +/** ReferredByResponse is the response type for the Query/ReferredBy RPC method. */ + +export interface ReferredByResponseSDKType { + /** The affiliate's address that referred the queried address. */ + affiliate_address: string; +} +/** + * AllAffiliateTiersRequest is the request type for the Query/AllAffiliateTiers + * RPC method. + */ + +export interface AllAffiliateTiersRequest {} +/** + * AllAffiliateTiersRequest is the request type for the Query/AllAffiliateTiers + * RPC method. + */ + +export interface AllAffiliateTiersRequestSDKType {} +/** + * AllAffiliateTiersResponse is the response type for the + * Query/AllAffiliateTiers RPC method. + */ + +export interface AllAffiliateTiersResponse { + /** All affiliate tiers information. */ + tiers?: AffiliateTiers; +} +/** + * AllAffiliateTiersResponse is the response type for the + * Query/AllAffiliateTiers RPC method. + */ + +export interface AllAffiliateTiersResponseSDKType { + /** All affiliate tiers information. */ + tiers?: AffiliateTiersSDKType; +} +/** + * AffiliateWhitelistRequest is the request type for the + * Query/AffiliateWhitelist RPC method. + */ + +export interface AffiliateWhitelistRequest {} +/** + * AffiliateWhitelistRequest is the request type for the + * Query/AffiliateWhitelist RPC method. + */ + +export interface AffiliateWhitelistRequestSDKType {} +/** + * AffiliateWhitelistResponse is the response type for the + * Query/AffiliateWhitelist RPC method. + */ + +export interface AffiliateWhitelistResponse { + whitelist?: AffiliateWhitelist; +} +/** + * AffiliateWhitelistResponse is the response type for the + * Query/AffiliateWhitelist RPC method. + */ + +export interface AffiliateWhitelistResponseSDKType { + whitelist?: AffiliateWhitelistSDKType; +} + +function createBaseAffiliateInfoRequest(): AffiliateInfoRequest { + return { + address: "" + }; +} + +export const AffiliateInfoRequest = { + encode(message: AffiliateInfoRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AffiliateInfoRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAffiliateInfoRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AffiliateInfoRequest { + const message = createBaseAffiliateInfoRequest(); + message.address = object.address ?? ""; + return message; + } + +}; + +function createBaseAffiliateInfoResponse(): AffiliateInfoResponse { + return { + isWhitelisted: false, + tier: 0, + feeSharePpm: 0, + referredVolume: new Uint8Array(), + stakedAmount: new Uint8Array() + }; +} + +export const AffiliateInfoResponse = { + encode(message: AffiliateInfoResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.isWhitelisted === true) { + writer.uint32(8).bool(message.isWhitelisted); + } + + if (message.tier !== 0) { + writer.uint32(16).uint32(message.tier); + } + + if (message.feeSharePpm !== 0) { + writer.uint32(24).uint32(message.feeSharePpm); + } + + if (message.referredVolume.length !== 0) { + writer.uint32(34).bytes(message.referredVolume); + } + + if (message.stakedAmount.length !== 0) { + writer.uint32(42).bytes(message.stakedAmount); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AffiliateInfoResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAffiliateInfoResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.isWhitelisted = reader.bool(); + break; + + case 2: + message.tier = reader.uint32(); + break; + + case 3: + message.feeSharePpm = reader.uint32(); + break; + + case 4: + message.referredVolume = reader.bytes(); + break; + + case 5: + message.stakedAmount = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AffiliateInfoResponse { + const message = createBaseAffiliateInfoResponse(); + message.isWhitelisted = object.isWhitelisted ?? false; + message.tier = object.tier ?? 0; + message.feeSharePpm = object.feeSharePpm ?? 0; + message.referredVolume = object.referredVolume ?? new Uint8Array(); + message.stakedAmount = object.stakedAmount ?? new Uint8Array(); + return message; + } + +}; + +function createBaseReferredByRequest(): ReferredByRequest { + return { + address: "" + }; +} + +export const ReferredByRequest = { + encode(message: ReferredByRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ReferredByRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseReferredByRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ReferredByRequest { + const message = createBaseReferredByRequest(); + message.address = object.address ?? ""; + return message; + } + +}; + +function createBaseReferredByResponse(): ReferredByResponse { + return { + affiliateAddress: "" + }; +} + +export const ReferredByResponse = { + encode(message: ReferredByResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.affiliateAddress !== "") { + writer.uint32(10).string(message.affiliateAddress); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ReferredByResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseReferredByResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.affiliateAddress = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ReferredByResponse { + const message = createBaseReferredByResponse(); + message.affiliateAddress = object.affiliateAddress ?? ""; + return message; + } + +}; + +function createBaseAllAffiliateTiersRequest(): AllAffiliateTiersRequest { + return {}; +} + +export const AllAffiliateTiersRequest = { + encode(_: AllAffiliateTiersRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AllAffiliateTiersRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAllAffiliateTiersRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): AllAffiliateTiersRequest { + const message = createBaseAllAffiliateTiersRequest(); + return message; + } + +}; + +function createBaseAllAffiliateTiersResponse(): AllAffiliateTiersResponse { + return { + tiers: undefined + }; +} + +export const AllAffiliateTiersResponse = { + encode(message: AllAffiliateTiersResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.tiers !== undefined) { + AffiliateTiers.encode(message.tiers, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AllAffiliateTiersResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAllAffiliateTiersResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.tiers = AffiliateTiers.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AllAffiliateTiersResponse { + const message = createBaseAllAffiliateTiersResponse(); + message.tiers = object.tiers !== undefined && object.tiers !== null ? AffiliateTiers.fromPartial(object.tiers) : undefined; + return message; + } + +}; + +function createBaseAffiliateWhitelistRequest(): AffiliateWhitelistRequest { + return {}; +} + +export const AffiliateWhitelistRequest = { + encode(_: AffiliateWhitelistRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AffiliateWhitelistRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAffiliateWhitelistRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): AffiliateWhitelistRequest { + const message = createBaseAffiliateWhitelistRequest(); + return message; + } + +}; + +function createBaseAffiliateWhitelistResponse(): AffiliateWhitelistResponse { + return { + whitelist: undefined + }; +} + +export const AffiliateWhitelistResponse = { + encode(message: AffiliateWhitelistResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.whitelist !== undefined) { + AffiliateWhitelist.encode(message.whitelist, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AffiliateWhitelistResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAffiliateWhitelistResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.whitelist = AffiliateWhitelist.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AffiliateWhitelistResponse { + const message = createBaseAffiliateWhitelistResponse(); + message.whitelist = object.whitelist !== undefined && object.whitelist !== null ? AffiliateWhitelist.fromPartial(object.whitelist) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/affiliates/tx.rpc.msg.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/affiliates/tx.rpc.msg.ts new file mode 100644 index 00000000000..da9e7b4ae3f --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/affiliates/tx.rpc.msg.ts @@ -0,0 +1,44 @@ +import { Rpc } from "../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { MsgRegisterAffiliate, MsgRegisterAffiliateResponse, MsgUpdateAffiliateTiers, MsgUpdateAffiliateTiersResponse, MsgUpdateAffiliateWhitelist, MsgUpdateAffiliateWhitelistResponse } from "./tx"; +/** Msg defines the Msg service. */ + +export interface Msg { + /** RegisterAffiliate registers a referee-affiliate relationship */ + registerAffiliate(request: MsgRegisterAffiliate): Promise; + /** UpdateAffiliateTiers updates affiliate tiers */ + + updateAffiliateTiers(request: MsgUpdateAffiliateTiers): Promise; + /** UpdateAffiliateWhitelist updates affiliate whitelist */ + + updateAffiliateWhitelist(request: MsgUpdateAffiliateWhitelist): Promise; +} +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.registerAffiliate = this.registerAffiliate.bind(this); + this.updateAffiliateTiers = this.updateAffiliateTiers.bind(this); + this.updateAffiliateWhitelist = this.updateAffiliateWhitelist.bind(this); + } + + registerAffiliate(request: MsgRegisterAffiliate): Promise { + const data = MsgRegisterAffiliate.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.affiliates.Msg", "RegisterAffiliate", data); + return promise.then(data => MsgRegisterAffiliateResponse.decode(new _m0.Reader(data))); + } + + updateAffiliateTiers(request: MsgUpdateAffiliateTiers): Promise { + const data = MsgUpdateAffiliateTiers.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.affiliates.Msg", "UpdateAffiliateTiers", data); + return promise.then(data => MsgUpdateAffiliateTiersResponse.decode(new _m0.Reader(data))); + } + + updateAffiliateWhitelist(request: MsgUpdateAffiliateWhitelist): Promise { + const data = MsgUpdateAffiliateWhitelist.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.affiliates.Msg", "UpdateAffiliateWhitelist", data); + return promise.then(data => MsgUpdateAffiliateWhitelistResponse.decode(new _m0.Reader(data))); + } + +} \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/affiliates/tx.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/affiliates/tx.ts new file mode 100644 index 00000000000..1e6721656ac --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/affiliates/tx.ts @@ -0,0 +1,342 @@ +import { AffiliateTiers, AffiliateTiersSDKType, AffiliateWhitelist, AffiliateWhitelistSDKType } from "./affiliates"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** Message to register a referee-affiliate relationship */ + +export interface MsgRegisterAffiliate { + /** Address of the referee */ + referee: string; + /** Address of the affiliate */ + + affiliate: string; +} +/** Message to register a referee-affiliate relationship */ + +export interface MsgRegisterAffiliateSDKType { + /** Address of the referee */ + referee: string; + /** Address of the affiliate */ + + affiliate: string; +} +/** Response to MsgRegisterAffiliate */ + +export interface MsgRegisterAffiliateResponse {} +/** Response to MsgRegisterAffiliate */ + +export interface MsgRegisterAffiliateResponseSDKType {} +/** Message to update affiliate tiers */ + +export interface MsgUpdateAffiliateTiers { + /** Authority sending this message. Will be sent by gov */ + authority: string; + /** Updated affiliate tiers information */ + + tiers?: AffiliateTiers; +} +/** Message to update affiliate tiers */ + +export interface MsgUpdateAffiliateTiersSDKType { + /** Authority sending this message. Will be sent by gov */ + authority: string; + /** Updated affiliate tiers information */ + + tiers?: AffiliateTiersSDKType; +} +/** Response to MsgUpdateAffiliateTiers */ + +export interface MsgUpdateAffiliateTiersResponse {} +/** Response to MsgUpdateAffiliateTiers */ + +export interface MsgUpdateAffiliateTiersResponseSDKType {} +/** Message to update affiliate whitelist */ + +export interface MsgUpdateAffiliateWhitelist { + /** Authority sending this message. Will be sent by gov */ + authority: string; + /** Updated affiliate whitelist information */ + + whitelist?: AffiliateWhitelist; +} +/** Message to update affiliate whitelist */ + +export interface MsgUpdateAffiliateWhitelistSDKType { + /** Authority sending this message. Will be sent by gov */ + authority: string; + /** Updated affiliate whitelist information */ + + whitelist?: AffiliateWhitelistSDKType; +} +/** Response to MsgUpdateAffiliateWhitelist */ + +export interface MsgUpdateAffiliateWhitelistResponse {} +/** Response to MsgUpdateAffiliateWhitelist */ + +export interface MsgUpdateAffiliateWhitelistResponseSDKType {} + +function createBaseMsgRegisterAffiliate(): MsgRegisterAffiliate { + return { + referee: "", + affiliate: "" + }; +} + +export const MsgRegisterAffiliate = { + encode(message: MsgRegisterAffiliate, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.referee !== "") { + writer.uint32(10).string(message.referee); + } + + if (message.affiliate !== "") { + writer.uint32(18).string(message.affiliate); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRegisterAffiliate { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRegisterAffiliate(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.referee = reader.string(); + break; + + case 2: + message.affiliate = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgRegisterAffiliate { + const message = createBaseMsgRegisterAffiliate(); + message.referee = object.referee ?? ""; + message.affiliate = object.affiliate ?? ""; + return message; + } + +}; + +function createBaseMsgRegisterAffiliateResponse(): MsgRegisterAffiliateResponse { + return {}; +} + +export const MsgRegisterAffiliateResponse = { + encode(_: MsgRegisterAffiliateResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRegisterAffiliateResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRegisterAffiliateResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgRegisterAffiliateResponse { + const message = createBaseMsgRegisterAffiliateResponse(); + return message; + } + +}; + +function createBaseMsgUpdateAffiliateTiers(): MsgUpdateAffiliateTiers { + return { + authority: "", + tiers: undefined + }; +} + +export const MsgUpdateAffiliateTiers = { + encode(message: MsgUpdateAffiliateTiers, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authority !== "") { + writer.uint32(10).string(message.authority); + } + + if (message.tiers !== undefined) { + AffiliateTiers.encode(message.tiers, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateAffiliateTiers { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateAffiliateTiers(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.authority = reader.string(); + break; + + case 2: + message.tiers = AffiliateTiers.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgUpdateAffiliateTiers { + const message = createBaseMsgUpdateAffiliateTiers(); + message.authority = object.authority ?? ""; + message.tiers = object.tiers !== undefined && object.tiers !== null ? AffiliateTiers.fromPartial(object.tiers) : undefined; + return message; + } + +}; + +function createBaseMsgUpdateAffiliateTiersResponse(): MsgUpdateAffiliateTiersResponse { + return {}; +} + +export const MsgUpdateAffiliateTiersResponse = { + encode(_: MsgUpdateAffiliateTiersResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateAffiliateTiersResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateAffiliateTiersResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgUpdateAffiliateTiersResponse { + const message = createBaseMsgUpdateAffiliateTiersResponse(); + return message; + } + +}; + +function createBaseMsgUpdateAffiliateWhitelist(): MsgUpdateAffiliateWhitelist { + return { + authority: "", + whitelist: undefined + }; +} + +export const MsgUpdateAffiliateWhitelist = { + encode(message: MsgUpdateAffiliateWhitelist, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authority !== "") { + writer.uint32(10).string(message.authority); + } + + if (message.whitelist !== undefined) { + AffiliateWhitelist.encode(message.whitelist, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateAffiliateWhitelist { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateAffiliateWhitelist(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.authority = reader.string(); + break; + + case 2: + message.whitelist = AffiliateWhitelist.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgUpdateAffiliateWhitelist { + const message = createBaseMsgUpdateAffiliateWhitelist(); + message.authority = object.authority ?? ""; + message.whitelist = object.whitelist !== undefined && object.whitelist !== null ? AffiliateWhitelist.fromPartial(object.whitelist) : undefined; + return message; + } + +}; + +function createBaseMsgUpdateAffiliateWhitelistResponse(): MsgUpdateAffiliateWhitelistResponse { + return {}; +} + +export const MsgUpdateAffiliateWhitelistResponse = { + encode(_: MsgUpdateAffiliateWhitelistResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateAffiliateWhitelistResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateAffiliateWhitelistResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgUpdateAffiliateWhitelistResponse { + const message = createBaseMsgUpdateAffiliateWhitelistResponse(); + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/blocktime/params.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/blocktime/params.ts index 7f327d2f3c3..05483e50249 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/blocktime/params.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/blocktime/params.ts @@ -19,6 +19,32 @@ export interface DowntimeParamsSDKType { */ durations: DurationSDKType[]; } +/** SynchronyParams defines the parameters for block synchrony. */ + +export interface SynchronyParams { + /** + * next_block_delay replaces the locally configured timeout_commit in + * CometBFT. It determines the amount of time the CometBFT waits after the + * `CommitTime` (subjective time when +2/3 precommits were received), before + * moving to next height. + * If the application sends next_block_delay = 0 to the consensus engine, the + * latter defaults back to using timeout_commit. + */ + nextBlockDelay?: Duration; +} +/** SynchronyParams defines the parameters for block synchrony. */ + +export interface SynchronyParamsSDKType { + /** + * next_block_delay replaces the locally configured timeout_commit in + * CometBFT. It determines the amount of time the CometBFT waits after the + * `CommitTime` (subjective time when +2/3 precommits were received), before + * moving to next height. + * If the application sends next_block_delay = 0 to the consensus engine, the + * latter defaults back to using timeout_commit. + */ + next_block_delay?: DurationSDKType; +} function createBaseDowntimeParams(): DowntimeParams { return { @@ -63,4 +89,49 @@ export const DowntimeParams = { return message; } +}; + +function createBaseSynchronyParams(): SynchronyParams { + return { + nextBlockDelay: undefined + }; +} + +export const SynchronyParams = { + encode(message: SynchronyParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.nextBlockDelay !== undefined) { + Duration.encode(message.nextBlockDelay, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SynchronyParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSynchronyParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.nextBlockDelay = Duration.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SynchronyParams { + const message = createBaseSynchronyParams(); + message.nextBlockDelay = object.nextBlockDelay !== undefined && object.nextBlockDelay !== null ? Duration.fromPartial(object.nextBlockDelay) : undefined; + return message; + } + }; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/blocktime/query.lcd.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/blocktime/query.lcd.ts index 0ce2b8ff63f..4f93ab00e4a 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/blocktime/query.lcd.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/blocktime/query.lcd.ts @@ -1,5 +1,5 @@ import { LCDClient } from "@osmonauts/lcd"; -import { QueryDowntimeParamsRequest, QueryDowntimeParamsResponseSDKType, QueryAllDowntimeInfoRequest, QueryAllDowntimeInfoResponseSDKType } from "./query"; +import { QueryDowntimeParamsRequest, QueryDowntimeParamsResponseSDKType, QueryAllDowntimeInfoRequest, QueryAllDowntimeInfoResponseSDKType, QuerySynchronyParamsRequest, QuerySynchronyParamsResponseSDKType } from "./query"; export class LCDQueryClient { req: LCDClient; @@ -11,6 +11,7 @@ export class LCDQueryClient { this.req = requestClient; this.downtimeParams = this.downtimeParams.bind(this); this.allDowntimeInfo = this.allDowntimeInfo.bind(this); + this.synchronyParams = this.synchronyParams.bind(this); } /* Queries the DowntimeParams. */ @@ -26,5 +27,12 @@ export class LCDQueryClient { const endpoint = `dydxprotocol/v4/blocktime/all_downtime_info`; return await this.req.get(endpoint); } + /* Queries the SynchronyParams. */ + + + async synchronyParams(_params: QuerySynchronyParamsRequest = {}): Promise { + const endpoint = `dydxprotocol/v4/blocktime/synchrony_params`; + return await this.req.get(endpoint); + } } \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/blocktime/query.rpc.Query.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/blocktime/query.rpc.Query.ts index 7f7b23cd07b..eb65ce6ff66 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/blocktime/query.rpc.Query.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/blocktime/query.rpc.Query.ts @@ -1,7 +1,7 @@ import { Rpc } from "../../helpers"; import * as _m0 from "protobufjs/minimal"; import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; -import { QueryDowntimeParamsRequest, QueryDowntimeParamsResponse, QueryPreviousBlockInfoRequest, QueryPreviousBlockInfoResponse, QueryAllDowntimeInfoRequest, QueryAllDowntimeInfoResponse } from "./query"; +import { QueryDowntimeParamsRequest, QueryDowntimeParamsResponse, QueryPreviousBlockInfoRequest, QueryPreviousBlockInfoResponse, QueryAllDowntimeInfoRequest, QueryAllDowntimeInfoResponse, QuerySynchronyParamsRequest, QuerySynchronyParamsResponse } from "./query"; /** Query defines the gRPC querier service. */ export interface Query { @@ -13,6 +13,9 @@ export interface Query { /** Queries all recorded downtime info. */ allDowntimeInfo(request?: QueryAllDowntimeInfoRequest): Promise; + /** Queries the SynchronyParams. */ + + synchronyParams(request?: QuerySynchronyParamsRequest): Promise; } export class QueryClientImpl implements Query { private readonly rpc: Rpc; @@ -22,6 +25,7 @@ export class QueryClientImpl implements Query { this.downtimeParams = this.downtimeParams.bind(this); this.previousBlockInfo = this.previousBlockInfo.bind(this); this.allDowntimeInfo = this.allDowntimeInfo.bind(this); + this.synchronyParams = this.synchronyParams.bind(this); } downtimeParams(request: QueryDowntimeParamsRequest = {}): Promise { @@ -42,6 +46,12 @@ export class QueryClientImpl implements Query { return promise.then(data => QueryAllDowntimeInfoResponse.decode(new _m0.Reader(data))); } + synchronyParams(request: QuerySynchronyParamsRequest = {}): Promise { + const data = QuerySynchronyParamsRequest.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.blocktime.Query", "SynchronyParams", data); + return promise.then(data => QuerySynchronyParamsResponse.decode(new _m0.Reader(data))); + } + } export const createRpcQueryExtension = (base: QueryClient) => { const rpc = createProtobufRpcClient(base); @@ -57,6 +67,10 @@ export const createRpcQueryExtension = (base: QueryClient) => { allDowntimeInfo(request?: QueryAllDowntimeInfoRequest): Promise { return queryService.allDowntimeInfo(request); + }, + + synchronyParams(request?: QuerySynchronyParamsRequest): Promise { + return queryService.synchronyParams(request); } }; diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/blocktime/query.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/blocktime/query.ts index 0ec8d42f5d5..33a39ac84dd 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/blocktime/query.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/blocktime/query.ts @@ -1,7 +1,23 @@ -import { DowntimeParams, DowntimeParamsSDKType } from "./params"; +import { SynchronyParams, SynchronyParamsSDKType, DowntimeParams, DowntimeParamsSDKType } from "./params"; import { BlockInfo, BlockInfoSDKType, AllDowntimeInfo, AllDowntimeInfoSDKType } from "./blocktime"; import * as _m0 from "protobufjs/minimal"; import { DeepPartial } from "../../helpers"; +/** QuerySynchronyParamsRequest is a request type for the SynchronyParams */ + +export interface QuerySynchronyParamsRequest {} +/** QuerySynchronyParamsRequest is a request type for the SynchronyParams */ + +export interface QuerySynchronyParamsRequestSDKType {} +/** QuerySynchronyParamsResponse is a response type for the SynchronyParams */ + +export interface QuerySynchronyParamsResponse { + params?: SynchronyParams; +} +/** QuerySynchronyParamsResponse is a response type for the SynchronyParams */ + +export interface QuerySynchronyParamsResponseSDKType { + params?: SynchronyParamsSDKType; +} /** * QueryDowntimeParamsRequest is a request type for the DowntimeParams * RPC method. @@ -103,6 +119,85 @@ export interface QueryAllDowntimeInfoResponseSDKType { info?: AllDowntimeInfoSDKType; } +function createBaseQuerySynchronyParamsRequest(): QuerySynchronyParamsRequest { + return {}; +} + +export const QuerySynchronyParamsRequest = { + encode(_: QuerySynchronyParamsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySynchronyParamsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySynchronyParamsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QuerySynchronyParamsRequest { + const message = createBaseQuerySynchronyParamsRequest(); + return message; + } + +}; + +function createBaseQuerySynchronyParamsResponse(): QuerySynchronyParamsResponse { + return { + params: undefined + }; +} + +export const QuerySynchronyParamsResponse = { + encode(message: QuerySynchronyParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.params !== undefined) { + SynchronyParams.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySynchronyParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySynchronyParamsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.params = SynchronyParams.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QuerySynchronyParamsResponse { + const message = createBaseQuerySynchronyParamsResponse(); + message.params = object.params !== undefined && object.params !== null ? SynchronyParams.fromPartial(object.params) : undefined; + return message; + } + +}; + function createBaseQueryDowntimeParamsRequest(): QueryDowntimeParamsRequest { return {}; } diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/blocktime/tx.rpc.msg.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/blocktime/tx.rpc.msg.ts index c878a926c08..d0db3c98ff2 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/blocktime/tx.rpc.msg.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/blocktime/tx.rpc.msg.ts @@ -1,11 +1,14 @@ import { Rpc } from "../../helpers"; import * as _m0 from "protobufjs/minimal"; -import { MsgUpdateDowntimeParams, MsgUpdateDowntimeParamsResponse } from "./tx"; +import { MsgUpdateDowntimeParams, MsgUpdateDowntimeParamsResponse, MsgUpdateSynchronyParams, MsgUpdateSynchronyParamsResponse } from "./tx"; /** Msg defines the Msg service. */ export interface Msg { /** UpdateDowntimeParams updates the DowntimeParams in state. */ updateDowntimeParams(request: MsgUpdateDowntimeParams): Promise; + /** UpdateSynchronyParams updates the SynchronyParams in state. */ + + updateSynchronyParams(request: MsgUpdateSynchronyParams): Promise; } export class MsgClientImpl implements Msg { private readonly rpc: Rpc; @@ -13,6 +16,7 @@ export class MsgClientImpl implements Msg { constructor(rpc: Rpc) { this.rpc = rpc; this.updateDowntimeParams = this.updateDowntimeParams.bind(this); + this.updateSynchronyParams = this.updateSynchronyParams.bind(this); } updateDowntimeParams(request: MsgUpdateDowntimeParams): Promise { @@ -21,4 +25,10 @@ export class MsgClientImpl implements Msg { return promise.then(data => MsgUpdateDowntimeParamsResponse.decode(new _m0.Reader(data))); } + updateSynchronyParams(request: MsgUpdateSynchronyParams): Promise { + const data = MsgUpdateSynchronyParams.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.blocktime.Msg", "UpdateSynchronyParams", data); + return promise.then(data => MsgUpdateSynchronyParamsResponse.decode(new _m0.Reader(data))); + } + } \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/blocktime/tx.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/blocktime/tx.ts index 33655a77327..9d56e359681 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/blocktime/tx.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/blocktime/tx.ts @@ -1,4 +1,4 @@ -import { DowntimeParams, DowntimeParamsSDKType } from "./params"; +import { DowntimeParams, DowntimeParamsSDKType, SynchronyParams, SynchronyParamsSDKType } from "./params"; import * as _m0 from "protobufjs/minimal"; import { DeepPartial } from "../../helpers"; /** MsgUpdateDowntimeParams is the Msg/UpdateDowntimeParams request type. */ @@ -29,6 +29,34 @@ export interface MsgUpdateDowntimeParamsResponse {} */ export interface MsgUpdateDowntimeParamsResponseSDKType {} +/** MsgUpdateSynchronyParams is the Msg/UpdateSynchronyParams request type. */ + +export interface MsgUpdateSynchronyParams { + authority: string; + /** Defines the parameters to update. All parameters must be supplied. */ + + params?: SynchronyParams; +} +/** MsgUpdateSynchronyParams is the Msg/UpdateSynchronyParams request type. */ + +export interface MsgUpdateSynchronyParamsSDKType { + authority: string; + /** Defines the parameters to update. All parameters must be supplied. */ + + params?: SynchronyParamsSDKType; +} +/** + * MsgUpdateSynchronyParamsResponse is the Msg/UpdateSynchronyParams response + * type. + */ + +export interface MsgUpdateSynchronyParamsResponse {} +/** + * MsgUpdateSynchronyParamsResponse is the Msg/UpdateSynchronyParams response + * type. + */ + +export interface MsgUpdateSynchronyParamsResponseSDKType {} function createBaseMsgUpdateDowntimeParams(): MsgUpdateDowntimeParams { return { @@ -117,4 +145,93 @@ export const MsgUpdateDowntimeParamsResponse = { return message; } +}; + +function createBaseMsgUpdateSynchronyParams(): MsgUpdateSynchronyParams { + return { + authority: "", + params: undefined + }; +} + +export const MsgUpdateSynchronyParams = { + encode(message: MsgUpdateSynchronyParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authority !== "") { + writer.uint32(10).string(message.authority); + } + + if (message.params !== undefined) { + SynchronyParams.encode(message.params, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateSynchronyParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateSynchronyParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.authority = reader.string(); + break; + + case 2: + message.params = SynchronyParams.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgUpdateSynchronyParams { + const message = createBaseMsgUpdateSynchronyParams(); + message.authority = object.authority ?? ""; + message.params = object.params !== undefined && object.params !== null ? SynchronyParams.fromPartial(object.params) : undefined; + return message; + } + +}; + +function createBaseMsgUpdateSynchronyParamsResponse(): MsgUpdateSynchronyParamsResponse { + return {}; +} + +export const MsgUpdateSynchronyParamsResponse = { + encode(_: MsgUpdateSynchronyParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateSynchronyParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateSynchronyParamsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgUpdateSynchronyParamsResponse { + const message = createBaseMsgUpdateSynchronyParamsResponse(); + return message; + } + }; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/bundle.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/bundle.ts index 3ac4f1e9804..69bef10149e 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/bundle.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/bundle.ts @@ -1,326 +1,412 @@ -import * as _5 from "./assets/asset"; -import * as _6 from "./assets/genesis"; -import * as _7 from "./assets/query"; -import * as _8 from "./assets/tx"; -import * as _9 from "./blocktime/blocktime"; -import * as _10 from "./blocktime/genesis"; -import * as _11 from "./blocktime/params"; -import * as _12 from "./blocktime/query"; -import * as _13 from "./blocktime/tx"; -import * as _14 from "./bridge/bridge_event_info"; -import * as _15 from "./bridge/bridge_event"; -import * as _16 from "./bridge/genesis"; -import * as _17 from "./bridge/params"; -import * as _18 from "./bridge/query"; -import * as _19 from "./bridge/tx"; -import * as _20 from "./clob/block_rate_limit_config"; -import * as _21 from "./clob/clob_pair"; -import * as _22 from "./clob/equity_tier_limit_config"; -import * as _23 from "./clob/genesis"; -import * as _24 from "./clob/liquidations_config"; -import * as _25 from "./clob/liquidations"; -import * as _26 from "./clob/matches"; -import * as _27 from "./clob/mev"; -import * as _28 from "./clob/operation"; -import * as _29 from "./clob/order_removals"; -import * as _30 from "./clob/order"; -import * as _31 from "./clob/process_proposer_matches_events"; -import * as _32 from "./clob/query"; -import * as _33 from "./clob/tx"; -import * as _34 from "./daemons/bridge/bridge"; -import * as _35 from "./daemons/liquidation/liquidation"; -import * as _36 from "./daemons/pricefeed/price_feed"; -import * as _37 from "./delaymsg/block_message_ids"; -import * as _38 from "./delaymsg/delayed_message"; -import * as _39 from "./delaymsg/genesis"; -import * as _40 from "./delaymsg/query"; -import * as _41 from "./delaymsg/tx"; -import * as _42 from "./epochs/epoch_info"; -import * as _43 from "./epochs/genesis"; -import * as _44 from "./epochs/query"; -import * as _45 from "./feetiers/genesis"; -import * as _46 from "./feetiers/params"; -import * as _47 from "./feetiers/query"; -import * as _48 from "./feetiers/tx"; -import * as _49 from "./govplus/genesis"; -import * as _50 from "./govplus/query"; -import * as _51 from "./govplus/tx"; -import * as _52 from "./indexer/events/events"; -import * as _53 from "./indexer/indexer_manager/event"; -import * as _54 from "./indexer/off_chain_updates/off_chain_updates"; -import * as _55 from "./indexer/protocol/v1/clob"; -import * as _56 from "./indexer/protocol/v1/subaccount"; -import * as _57 from "./indexer/redis/redis_order"; -import * as _58 from "./indexer/shared/removal_reason"; -import * as _59 from "./indexer/socks/messages"; -import * as _60 from "./perpetuals/genesis"; -import * as _61 from "./perpetuals/params"; -import * as _62 from "./perpetuals/perpetual"; -import * as _63 from "./perpetuals/query"; -import * as _64 from "./perpetuals/tx"; -import * as _65 from "./prices/genesis"; -import * as _66 from "./prices/market_param"; -import * as _67 from "./prices/market_price"; -import * as _68 from "./prices/query"; -import * as _69 from "./prices/tx"; -import * as _70 from "./ratelimit/capacity"; -import * as _71 from "./ratelimit/genesis"; -import * as _72 from "./ratelimit/limit_params"; -import * as _73 from "./ratelimit/query"; -import * as _74 from "./ratelimit/tx"; -import * as _75 from "./rewards/genesis"; -import * as _76 from "./rewards/params"; -import * as _77 from "./rewards/query"; -import * as _78 from "./rewards/reward_share"; -import * as _79 from "./rewards/tx"; -import * as _80 from "./sending/genesis"; -import * as _81 from "./sending/query"; -import * as _82 from "./sending/transfer"; -import * as _83 from "./sending/tx"; -import * as _84 from "./stats/genesis"; -import * as _85 from "./stats/params"; -import * as _86 from "./stats/query"; -import * as _87 from "./stats/stats"; -import * as _88 from "./stats/tx"; -import * as _89 from "./subaccounts/asset_position"; -import * as _90 from "./subaccounts/genesis"; -import * as _91 from "./subaccounts/perpetual_position"; -import * as _92 from "./subaccounts/query"; -import * as _93 from "./subaccounts/subaccount"; -import * as _94 from "./vault/genesis"; -import * as _95 from "./vault/query"; -import * as _96 from "./vault/tx"; -import * as _97 from "./vault/vault"; -import * as _98 from "./vest/genesis"; -import * as _99 from "./vest/query"; -import * as _100 from "./vest/tx"; -import * as _101 from "./vest/vest_entry"; -import * as _109 from "./assets/query.lcd"; -import * as _110 from "./blocktime/query.lcd"; -import * as _111 from "./bridge/query.lcd"; -import * as _112 from "./clob/query.lcd"; -import * as _113 from "./delaymsg/query.lcd"; -import * as _114 from "./epochs/query.lcd"; -import * as _115 from "./feetiers/query.lcd"; -import * as _116 from "./perpetuals/query.lcd"; -import * as _117 from "./prices/query.lcd"; -import * as _118 from "./ratelimit/query.lcd"; -import * as _119 from "./rewards/query.lcd"; -import * as _120 from "./stats/query.lcd"; -import * as _121 from "./subaccounts/query.lcd"; -import * as _122 from "./vest/query.lcd"; -import * as _123 from "./assets/query.rpc.Query"; -import * as _124 from "./blocktime/query.rpc.Query"; -import * as _125 from "./bridge/query.rpc.Query"; -import * as _126 from "./clob/query.rpc.Query"; -import * as _127 from "./delaymsg/query.rpc.Query"; -import * as _128 from "./epochs/query.rpc.Query"; -import * as _129 from "./feetiers/query.rpc.Query"; -import * as _130 from "./govplus/query.rpc.Query"; -import * as _131 from "./perpetuals/query.rpc.Query"; -import * as _132 from "./prices/query.rpc.Query"; -import * as _133 from "./ratelimit/query.rpc.Query"; -import * as _134 from "./rewards/query.rpc.Query"; -import * as _135 from "./sending/query.rpc.Query"; -import * as _136 from "./stats/query.rpc.Query"; -import * as _137 from "./subaccounts/query.rpc.Query"; -import * as _138 from "./vault/query.rpc.Query"; -import * as _139 from "./vest/query.rpc.Query"; -import * as _140 from "./blocktime/tx.rpc.msg"; -import * as _141 from "./bridge/tx.rpc.msg"; -import * as _142 from "./clob/tx.rpc.msg"; -import * as _143 from "./delaymsg/tx.rpc.msg"; -import * as _144 from "./feetiers/tx.rpc.msg"; -import * as _145 from "./govplus/tx.rpc.msg"; -import * as _146 from "./perpetuals/tx.rpc.msg"; -import * as _147 from "./prices/tx.rpc.msg"; -import * as _148 from "./ratelimit/tx.rpc.msg"; -import * as _149 from "./rewards/tx.rpc.msg"; -import * as _150 from "./sending/tx.rpc.msg"; -import * as _151 from "./stats/tx.rpc.msg"; -import * as _152 from "./vault/tx.rpc.msg"; -import * as _153 from "./vest/tx.rpc.msg"; -import * as _154 from "./lcd"; -import * as _155 from "./rpc.query"; -import * as _156 from "./rpc.tx"; +import * as _5 from "./accountplus/accountplus"; +import * as _6 from "./accountplus/genesis"; +import * as _7 from "./accountplus/models"; +import * as _8 from "./accountplus/params"; +import * as _9 from "./accountplus/query"; +import * as _10 from "./accountplus/tx"; +import * as _11 from "./affiliates/affiliates"; +import * as _12 from "./affiliates/genesis"; +import * as _13 from "./affiliates/query"; +import * as _14 from "./affiliates/tx"; +import * as _15 from "./assets/asset"; +import * as _16 from "./assets/genesis"; +import * as _17 from "./assets/query"; +import * as _18 from "./assets/tx"; +import * as _19 from "./blocktime/blocktime"; +import * as _20 from "./blocktime/genesis"; +import * as _21 from "./blocktime/params"; +import * as _22 from "./blocktime/query"; +import * as _23 from "./blocktime/tx"; +import * as _24 from "./bridge/bridge_event_info"; +import * as _25 from "./bridge/bridge_event"; +import * as _26 from "./bridge/genesis"; +import * as _27 from "./bridge/params"; +import * as _28 from "./bridge/query"; +import * as _29 from "./bridge/tx"; +import * as _30 from "./clob/block_rate_limit_config"; +import * as _31 from "./clob/clob_pair"; +import * as _32 from "./clob/equity_tier_limit_config"; +import * as _33 from "./clob/finalize_block"; +import * as _34 from "./clob/genesis"; +import * as _35 from "./clob/liquidations_config"; +import * as _36 from "./clob/liquidations"; +import * as _37 from "./clob/matches"; +import * as _38 from "./clob/mev"; +import * as _39 from "./clob/operation"; +import * as _40 from "./clob/order_removals"; +import * as _41 from "./clob/order"; +import * as _42 from "./clob/process_proposer_matches_events"; +import * as _43 from "./clob/query"; +import * as _44 from "./clob/streaming"; +import * as _45 from "./clob/tx"; +import * as _46 from "./daemons/bridge/bridge"; +import * as _47 from "./daemons/liquidation/liquidation"; +import * as _48 from "./daemons/pricefeed/price_feed"; +import * as _49 from "./delaymsg/block_message_ids"; +import * as _50 from "./delaymsg/delayed_message"; +import * as _51 from "./delaymsg/genesis"; +import * as _52 from "./delaymsg/query"; +import * as _53 from "./delaymsg/tx"; +import * as _54 from "./epochs/epoch_info"; +import * as _55 from "./epochs/genesis"; +import * as _56 from "./epochs/query"; +import * as _57 from "./feetiers/genesis"; +import * as _58 from "./feetiers/params"; +import * as _59 from "./feetiers/query"; +import * as _60 from "./feetiers/tx"; +import * as _61 from "./govplus/genesis"; +import * as _62 from "./govplus/query"; +import * as _63 from "./govplus/tx"; +import * as _64 from "./indexer/events/events"; +import * as _65 from "./indexer/indexer_manager/event"; +import * as _66 from "./indexer/off_chain_updates/off_chain_updates"; +import * as _67 from "./indexer/protocol/v1/clob"; +import * as _68 from "./indexer/protocol/v1/perpetual"; +import * as _69 from "./indexer/protocol/v1/subaccount"; +import * as _70 from "./indexer/protocol/v1/vault"; +import * as _71 from "./indexer/redis/redis_order"; +import * as _72 from "./indexer/shared/removal_reason"; +import * as _73 from "./indexer/socks/messages"; +import * as _74 from "./listing/genesis"; +import * as _75 from "./listing/params"; +import * as _76 from "./listing/query"; +import * as _77 from "./listing/tx"; +import * as _78 from "./perpetuals/genesis"; +import * as _79 from "./perpetuals/params"; +import * as _80 from "./perpetuals/perpetual"; +import * as _81 from "./perpetuals/query"; +import * as _82 from "./perpetuals/tx"; +import * as _83 from "./prices/genesis"; +import * as _84 from "./prices/market_param"; +import * as _85 from "./prices/market_price"; +import * as _86 from "./prices/query"; +import * as _87 from "./prices/streaming"; +import * as _88 from "./prices/tx"; +import * as _89 from "./ratelimit/capacity"; +import * as _90 from "./ratelimit/genesis"; +import * as _91 from "./ratelimit/limit_params"; +import * as _92 from "./ratelimit/pending_send_packet"; +import * as _93 from "./ratelimit/query"; +import * as _94 from "./ratelimit/tx"; +import * as _95 from "./revshare/genesis"; +import * as _96 from "./revshare/params"; +import * as _97 from "./revshare/query"; +import * as _98 from "./revshare/revshare"; +import * as _99 from "./revshare/tx"; +import * as _100 from "./rewards/genesis"; +import * as _101 from "./rewards/params"; +import * as _102 from "./rewards/query"; +import * as _103 from "./rewards/reward_share"; +import * as _104 from "./rewards/tx"; +import * as _105 from "./sending/genesis"; +import * as _106 from "./sending/query"; +import * as _107 from "./sending/transfer"; +import * as _108 from "./sending/tx"; +import * as _109 from "./stats/genesis"; +import * as _110 from "./stats/params"; +import * as _111 from "./stats/query"; +import * as _112 from "./stats/stats"; +import * as _113 from "./stats/tx"; +import * as _114 from "./subaccounts/asset_position"; +import * as _115 from "./subaccounts/genesis"; +import * as _116 from "./subaccounts/perpetual_position"; +import * as _117 from "./subaccounts/query"; +import * as _118 from "./subaccounts/streaming"; +import * as _119 from "./subaccounts/subaccount"; +import * as _120 from "./vault/genesis"; +import * as _121 from "./vault/params"; +import * as _122 from "./vault/query"; +import * as _123 from "./vault/share"; +import * as _124 from "./vault/tx"; +import * as _125 from "./vault/vault"; +import * as _126 from "./vest/genesis"; +import * as _127 from "./vest/query"; +import * as _128 from "./vest/tx"; +import * as _129 from "./vest/vest_entry"; +import * as _137 from "./accountplus/query.lcd"; +import * as _138 from "./affiliates/query.lcd"; +import * as _139 from "./assets/query.lcd"; +import * as _140 from "./blocktime/query.lcd"; +import * as _141 from "./bridge/query.lcd"; +import * as _142 from "./clob/query.lcd"; +import * as _143 from "./delaymsg/query.lcd"; +import * as _144 from "./epochs/query.lcd"; +import * as _145 from "./feetiers/query.lcd"; +import * as _146 from "./listing/query.lcd"; +import * as _147 from "./perpetuals/query.lcd"; +import * as _148 from "./prices/query.lcd"; +import * as _149 from "./ratelimit/query.lcd"; +import * as _150 from "./revshare/query.lcd"; +import * as _151 from "./rewards/query.lcd"; +import * as _152 from "./stats/query.lcd"; +import * as _153 from "./subaccounts/query.lcd"; +import * as _154 from "./vault/query.lcd"; +import * as _155 from "./vest/query.lcd"; +import * as _156 from "./accountplus/query.rpc.Query"; +import * as _157 from "./affiliates/query.rpc.Query"; +import * as _158 from "./assets/query.rpc.Query"; +import * as _159 from "./blocktime/query.rpc.Query"; +import * as _160 from "./bridge/query.rpc.Query"; +import * as _161 from "./clob/query.rpc.Query"; +import * as _162 from "./delaymsg/query.rpc.Query"; +import * as _163 from "./epochs/query.rpc.Query"; +import * as _164 from "./feetiers/query.rpc.Query"; +import * as _165 from "./govplus/query.rpc.Query"; +import * as _166 from "./listing/query.rpc.Query"; +import * as _167 from "./perpetuals/query.rpc.Query"; +import * as _168 from "./prices/query.rpc.Query"; +import * as _169 from "./ratelimit/query.rpc.Query"; +import * as _170 from "./revshare/query.rpc.Query"; +import * as _171 from "./rewards/query.rpc.Query"; +import * as _172 from "./sending/query.rpc.Query"; +import * as _173 from "./stats/query.rpc.Query"; +import * as _174 from "./subaccounts/query.rpc.Query"; +import * as _175 from "./vault/query.rpc.Query"; +import * as _176 from "./vest/query.rpc.Query"; +import * as _177 from "./accountplus/tx.rpc.msg"; +import * as _178 from "./affiliates/tx.rpc.msg"; +import * as _179 from "./blocktime/tx.rpc.msg"; +import * as _180 from "./bridge/tx.rpc.msg"; +import * as _181 from "./clob/tx.rpc.msg"; +import * as _182 from "./delaymsg/tx.rpc.msg"; +import * as _183 from "./feetiers/tx.rpc.msg"; +import * as _184 from "./govplus/tx.rpc.msg"; +import * as _185 from "./listing/tx.rpc.msg"; +import * as _186 from "./perpetuals/tx.rpc.msg"; +import * as _187 from "./prices/tx.rpc.msg"; +import * as _188 from "./ratelimit/tx.rpc.msg"; +import * as _189 from "./revshare/tx.rpc.msg"; +import * as _190 from "./rewards/tx.rpc.msg"; +import * as _191 from "./sending/tx.rpc.msg"; +import * as _192 from "./stats/tx.rpc.msg"; +import * as _193 from "./vault/tx.rpc.msg"; +import * as _194 from "./vest/tx.rpc.msg"; +import * as _195 from "./lcd"; +import * as _196 from "./rpc.query"; +import * as _197 from "./rpc.tx"; export namespace dydxprotocol { - export const assets = { ..._5, + export const accountplus = { ..._5, ..._6, ..._7, ..._8, - ..._109, - ..._123 - }; - export const blocktime = { ..._9, + ..._9, ..._10, - ..._11, + ..._137, + ..._156, + ..._177 + }; + export const affiliates = { ..._11, ..._12, ..._13, - ..._110, - ..._124, - ..._140 + ..._14, + ..._138, + ..._157, + ..._178 }; - export const bridge = { ..._14, - ..._15, + export const assets = { ..._15, ..._16, ..._17, ..._18, - ..._19, - ..._111, - ..._125, - ..._141 + ..._139, + ..._158 }; - export const clob = { ..._20, + export const blocktime = { ..._19, + ..._20, ..._21, ..._22, ..._23, - ..._24, + ..._140, + ..._159, + ..._179 + }; + export const bridge = { ..._24, ..._25, ..._26, ..._27, ..._28, ..._29, - ..._30, + ..._141, + ..._160, + ..._180 + }; + export const clob = { ..._30, ..._31, ..._32, ..._33, - ..._112, - ..._126, - ..._142 - }; - export namespace daemons { - export const bridge = { ..._34 - }; - export const liquidation = { ..._35 - }; - export const pricefeed = { ..._36 - }; - } - export const delaymsg = { ..._37, + ..._34, + ..._35, + ..._36, + ..._37, ..._38, ..._39, ..._40, ..._41, - ..._113, - ..._127, - ..._143 - }; - export const epochs = { ..._42, + ..._42, ..._43, ..._44, - ..._114, - ..._128 + ..._45, + ..._142, + ..._161, + ..._181 }; - export const feetiers = { ..._45, - ..._46, - ..._47, - ..._48, - ..._115, - ..._129, - ..._144 - }; - export const govplus = { ..._49, + export namespace daemons { + export const bridge = { ..._46 + }; + export const liquidation = { ..._47 + }; + export const pricefeed = { ..._48 + }; + } + export const delaymsg = { ..._49, ..._50, ..._51, - ..._130, - ..._145 + ..._52, + ..._53, + ..._143, + ..._162, + ..._182 + }; + export const epochs = { ..._54, + ..._55, + ..._56, + ..._144, + ..._163 + }; + export const feetiers = { ..._57, + ..._58, + ..._59, + ..._60, + ..._145, + ..._164, + ..._183 + }; + export const govplus = { ..._61, + ..._62, + ..._63, + ..._165, + ..._184 }; export namespace indexer { - export const events = { ..._52 + export const events = { ..._64 }; - export const indexer_manager = { ..._53 + export const indexer_manager = { ..._65 }; - export const off_chain_updates = { ..._54 + export const off_chain_updates = { ..._66 }; export namespace protocol { - export const v1 = { ..._55, - ..._56 + export const v1 = { ..._67, + ..._68, + ..._69, + ..._70 }; } - export const redis = { ..._57 + export const redis = { ..._71 }; - export const shared = { ..._58 + export const shared = { ..._72 }; - export const socks = { ..._59 + export const socks = { ..._73 }; } - export const perpetuals = { ..._60, - ..._61, - ..._62, - ..._63, - ..._64, - ..._116, - ..._131, - ..._146 - }; - export const prices = { ..._65, - ..._66, - ..._67, - ..._68, - ..._69, - ..._117, - ..._132, - ..._147 - }; - export const ratelimit = { ..._70, - ..._71, - ..._72, - ..._73, - ..._74, - ..._118, - ..._133, - ..._148 - }; - export const rewards = { ..._75, + export const listing = { ..._74, + ..._75, ..._76, ..._77, - ..._78, - ..._79, - ..._119, - ..._134, - ..._149 + ..._146, + ..._166, + ..._185 }; - export const sending = { ..._80, + export const perpetuals = { ..._78, + ..._79, + ..._80, ..._81, ..._82, - ..._83, - ..._135, - ..._150 + ..._147, + ..._167, + ..._186 }; - export const stats = { ..._84, + export const prices = { ..._83, + ..._84, ..._85, ..._86, ..._87, ..._88, - ..._120, - ..._136, - ..._151 + ..._148, + ..._168, + ..._187 }; - export const subaccounts = { ..._89, + export const ratelimit = { ..._89, ..._90, ..._91, ..._92, ..._93, - ..._121, - ..._137 + ..._94, + ..._149, + ..._169, + ..._188 }; - export const vault = { ..._94, - ..._95, + export const revshare = { ..._95, ..._96, ..._97, - ..._138, - ..._152 - }; - export const vest = { ..._98, + ..._98, ..._99, - ..._100, + ..._150, + ..._170, + ..._189 + }; + export const rewards = { ..._100, ..._101, + ..._102, + ..._103, + ..._104, + ..._151, + ..._171, + ..._190 + }; + export const sending = { ..._105, + ..._106, + ..._107, + ..._108, + ..._172, + ..._191 + }; + export const stats = { ..._109, + ..._110, + ..._111, + ..._112, + ..._113, + ..._152, + ..._173, + ..._192 + }; + export const subaccounts = { ..._114, + ..._115, + ..._116, + ..._117, + ..._118, + ..._119, + ..._153, + ..._174 + }; + export const vault = { ..._120, + ..._121, ..._122, - ..._139, - ..._153 + ..._123, + ..._124, + ..._125, + ..._154, + ..._175, + ..._193 }; - export const ClientFactory = { ..._154, + export const vest = { ..._126, + ..._127, + ..._128, + ..._129, ..._155, - ..._156 + ..._176, + ..._194 + }; + export const ClientFactory = { ..._195, + ..._196, + ..._197 }; } \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/block_rate_limit_config.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/block_rate_limit_config.ts index 4db5bf31a3d..7da63ae44f7 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/block_rate_limit_config.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/block_rate_limit_config.ts @@ -10,7 +10,11 @@ export interface BlockRateLimitConfiguration { * configurations. * * Specifying 0 values disables this rate limit. + * Deprecated in favor of `max_short_term_orders_and_cancels_per_n_blocks` + * for v5.x onwards. */ + + /** @deprecated */ maxShortTermOrdersPerNBlocks: MaxPerNBlocksRateLimit[]; /** * How many stateful order attempts (successful and failed) are allowed for @@ -22,7 +26,10 @@ export interface BlockRateLimitConfiguration { */ maxStatefulOrdersPerNBlocks: MaxPerNBlocksRateLimit[]; + /** @deprecated */ + maxShortTermOrderCancellationsPerNBlocks: MaxPerNBlocksRateLimit[]; + maxShortTermOrdersAndCancelsPerNBlocks: MaxPerNBlocksRateLimit[]; } /** Defines the block rate limits for CLOB specific operations. */ @@ -34,7 +41,11 @@ export interface BlockRateLimitConfigurationSDKType { * configurations. * * Specifying 0 values disables this rate limit. + * Deprecated in favor of `max_short_term_orders_and_cancels_per_n_blocks` + * for v5.x onwards. */ + + /** @deprecated */ max_short_term_orders_per_n_blocks: MaxPerNBlocksRateLimitSDKType[]; /** * How many stateful order attempts (successful and failed) are allowed for @@ -46,7 +57,10 @@ export interface BlockRateLimitConfigurationSDKType { */ max_stateful_orders_per_n_blocks: MaxPerNBlocksRateLimitSDKType[]; + /** @deprecated */ + max_short_term_order_cancellations_per_n_blocks: MaxPerNBlocksRateLimitSDKType[]; + max_short_term_orders_and_cancels_per_n_blocks: MaxPerNBlocksRateLimitSDKType[]; } /** Defines a rate limit over a specific number of blocks. */ @@ -83,7 +97,8 @@ function createBaseBlockRateLimitConfiguration(): BlockRateLimitConfiguration { return { maxShortTermOrdersPerNBlocks: [], maxStatefulOrdersPerNBlocks: [], - maxShortTermOrderCancellationsPerNBlocks: [] + maxShortTermOrderCancellationsPerNBlocks: [], + maxShortTermOrdersAndCancelsPerNBlocks: [] }; } @@ -101,6 +116,10 @@ export const BlockRateLimitConfiguration = { MaxPerNBlocksRateLimit.encode(v!, writer.uint32(26).fork()).ldelim(); } + for (const v of message.maxShortTermOrdersAndCancelsPerNBlocks) { + MaxPerNBlocksRateLimit.encode(v!, writer.uint32(34).fork()).ldelim(); + } + return writer; }, @@ -125,6 +144,10 @@ export const BlockRateLimitConfiguration = { message.maxShortTermOrderCancellationsPerNBlocks.push(MaxPerNBlocksRateLimit.decode(reader, reader.uint32())); break; + case 4: + message.maxShortTermOrdersAndCancelsPerNBlocks.push(MaxPerNBlocksRateLimit.decode(reader, reader.uint32())); + break; + default: reader.skipType(tag & 7); break; @@ -139,6 +162,7 @@ export const BlockRateLimitConfiguration = { message.maxShortTermOrdersPerNBlocks = object.maxShortTermOrdersPerNBlocks?.map(e => MaxPerNBlocksRateLimit.fromPartial(e)) || []; message.maxStatefulOrdersPerNBlocks = object.maxStatefulOrdersPerNBlocks?.map(e => MaxPerNBlocksRateLimit.fromPartial(e)) || []; message.maxShortTermOrderCancellationsPerNBlocks = object.maxShortTermOrderCancellationsPerNBlocks?.map(e => MaxPerNBlocksRateLimit.fromPartial(e)) || []; + message.maxShortTermOrdersAndCancelsPerNBlocks = object.maxShortTermOrdersAndCancelsPerNBlocks?.map(e => MaxPerNBlocksRateLimit.fromPartial(e)) || []; return message; } diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/finalize_block.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/finalize_block.ts new file mode 100644 index 00000000000..ac6382d2c94 --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/finalize_block.ts @@ -0,0 +1,66 @@ +import { ClobPair, ClobPairSDKType } from "./clob_pair"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** + * ClobStagedFinalizeBlockEvent defines a CLOB event staged during + * FinalizeBlock. + */ + +export interface ClobStagedFinalizeBlockEvent { + /** create_clob_pair indicates a new CLOB pair creation. */ + createClobPair?: ClobPair; +} +/** + * ClobStagedFinalizeBlockEvent defines a CLOB event staged during + * FinalizeBlock. + */ + +export interface ClobStagedFinalizeBlockEventSDKType { + /** create_clob_pair indicates a new CLOB pair creation. */ + create_clob_pair?: ClobPairSDKType; +} + +function createBaseClobStagedFinalizeBlockEvent(): ClobStagedFinalizeBlockEvent { + return { + createClobPair: undefined + }; +} + +export const ClobStagedFinalizeBlockEvent = { + encode(message: ClobStagedFinalizeBlockEvent, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.createClobPair !== undefined) { + ClobPair.encode(message.createClobPair, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ClobStagedFinalizeBlockEvent { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseClobStagedFinalizeBlockEvent(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.createClobPair = ClobPair.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ClobStagedFinalizeBlockEvent { + const message = createBaseClobStagedFinalizeBlockEvent(); + message.createClobPair = object.createClobPair !== undefined && object.createClobPair !== null ? ClobPair.fromPartial(object.createClobPair) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/order.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/order.ts index 3c714e5979e..6d166bff82e 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/order.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/order.ts @@ -1,4 +1,5 @@ import { SubaccountId, SubaccountIdSDKType } from "../subaccounts/subaccount"; +import { PerpetualLiquidationInfo, PerpetualLiquidationInfoSDKType } from "./liquidations"; import * as _m0 from "protobufjs/minimal"; import { DeepPartial, Long } from "../../helpers"; /** @@ -101,9 +102,8 @@ export enum Order_TimeInForce { TIME_IN_FORCE_POST_ONLY = 2, /** - * TIME_IN_FORCE_FILL_OR_KILL - TIME_IN_FORCE_FILL_OR_KILL enforces that an order will either be filled - * completely and immediately by maker orders on the book or canceled if the - * entire amount can‘t be matched. + * TIME_IN_FORCE_FILL_OR_KILL - TIME_IN_FORCE_FILL_OR_KILL has been deprecated and will be removed in + * future versions. */ TIME_IN_FORCE_FILL_OR_KILL = 3, UNRECOGNIZED = -1, @@ -138,9 +138,8 @@ export enum Order_TimeInForceSDKType { TIME_IN_FORCE_POST_ONLY = 2, /** - * TIME_IN_FORCE_FILL_OR_KILL - TIME_IN_FORCE_FILL_OR_KILL enforces that an order will either be filled - * completely and immediately by maker orders on the book or canceled if the - * entire amount can‘t be matched. + * TIME_IN_FORCE_FILL_OR_KILL - TIME_IN_FORCE_FILL_OR_KILL has been deprecated and will be removed in + * future versions. */ TIME_IN_FORCE_FILL_OR_KILL = 3, UNRECOGNIZED = -1, @@ -702,6 +701,60 @@ export interface TransactionOrderingSDKType { transaction_index: number; } +/** + * StreamLiquidationOrder represents an protocol-generated IOC liquidation + * order. Used in full node streaming. + */ + +export interface StreamLiquidationOrder { + /** Information about this liquidation order. */ + liquidationInfo?: PerpetualLiquidationInfo; + /** + * CLOB pair ID of the CLOB pair the liquidation order will be matched + * against. + */ + + clobPairId: number; + /** + * True if this is a buy order liquidating a short position, false if vice + * versa. + */ + + isBuy: boolean; + /** The number of base quantums for this liquidation order. */ + + quantums: Long; + /** The subticks this liquidation order will be submitted at. */ + + subticks: Long; +} +/** + * StreamLiquidationOrder represents an protocol-generated IOC liquidation + * order. Used in full node streaming. + */ + +export interface StreamLiquidationOrderSDKType { + /** Information about this liquidation order. */ + liquidation_info?: PerpetualLiquidationInfoSDKType; + /** + * CLOB pair ID of the CLOB pair the liquidation order will be matched + * against. + */ + + clob_pair_id: number; + /** + * True if this is a buy order liquidating a short position, false if vice + * versa. + */ + + is_buy: boolean; + /** The number of base quantums for this liquidation order. */ + + quantums: Long; + /** The subticks this liquidation order will be submitted at. */ + + subticks: Long; +} function createBaseOrderId(): OrderId { return { @@ -1286,4 +1339,89 @@ export const TransactionOrdering = { return message; } +}; + +function createBaseStreamLiquidationOrder(): StreamLiquidationOrder { + return { + liquidationInfo: undefined, + clobPairId: 0, + isBuy: false, + quantums: Long.UZERO, + subticks: Long.UZERO + }; +} + +export const StreamLiquidationOrder = { + encode(message: StreamLiquidationOrder, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.liquidationInfo !== undefined) { + PerpetualLiquidationInfo.encode(message.liquidationInfo, writer.uint32(10).fork()).ldelim(); + } + + if (message.clobPairId !== 0) { + writer.uint32(16).uint32(message.clobPairId); + } + + if (message.isBuy === true) { + writer.uint32(24).bool(message.isBuy); + } + + if (!message.quantums.isZero()) { + writer.uint32(32).uint64(message.quantums); + } + + if (!message.subticks.isZero()) { + writer.uint32(40).uint64(message.subticks); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): StreamLiquidationOrder { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStreamLiquidationOrder(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.liquidationInfo = PerpetualLiquidationInfo.decode(reader, reader.uint32()); + break; + + case 2: + message.clobPairId = reader.uint32(); + break; + + case 3: + message.isBuy = reader.bool(); + break; + + case 4: + message.quantums = (reader.uint64() as Long); + break; + + case 5: + message.subticks = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): StreamLiquidationOrder { + const message = createBaseStreamLiquidationOrder(); + message.liquidationInfo = object.liquidationInfo !== undefined && object.liquidationInfo !== null ? PerpetualLiquidationInfo.fromPartial(object.liquidationInfo) : undefined; + message.clobPairId = object.clobPairId ?? 0; + message.isBuy = object.isBuy ?? false; + message.quantums = object.quantums !== undefined && object.quantums !== null ? Long.fromValue(object.quantums) : Long.UZERO; + message.subticks = object.subticks !== undefined && object.subticks !== null ? Long.fromValue(object.subticks) : Long.UZERO; + return message; + } + }; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/order_removals.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/order_removals.ts index 6a5b9437a7b..378e69347fe 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/order_removals.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/order_removals.ts @@ -59,6 +59,18 @@ export enum OrderRemoval_RemovalReason { * was fully filled and should therefore be removed from state. */ REMOVAL_REASON_FULLY_FILLED = 7, + + /** + * REMOVAL_REASON_VIOLATES_ISOLATED_SUBACCOUNT_CONSTRAINTS - REMOVAL_REASON_FULLY_FILLED represents a removal of an order that + * would lead to the subaccount violating isolated subaccount constraints. + */ + REMOVAL_REASON_VIOLATES_ISOLATED_SUBACCOUNT_CONSTRAINTS = 8, + + /** + * REMOVAL_REASON_PERMISSIONED_KEY_EXPIRED - REMOVAL_REASON_PERMISSIONED_KEY_EXPIRED represents a removal of an order + * that was placed using an expired permissioned key. + */ + REMOVAL_REASON_PERMISSIONED_KEY_EXPIRED = 9, UNRECOGNIZED = -1, } export enum OrderRemoval_RemovalReasonSDKType { @@ -119,6 +131,18 @@ export enum OrderRemoval_RemovalReasonSDKType { * was fully filled and should therefore be removed from state. */ REMOVAL_REASON_FULLY_FILLED = 7, + + /** + * REMOVAL_REASON_VIOLATES_ISOLATED_SUBACCOUNT_CONSTRAINTS - REMOVAL_REASON_FULLY_FILLED represents a removal of an order that + * would lead to the subaccount violating isolated subaccount constraints. + */ + REMOVAL_REASON_VIOLATES_ISOLATED_SUBACCOUNT_CONSTRAINTS = 8, + + /** + * REMOVAL_REASON_PERMISSIONED_KEY_EXPIRED - REMOVAL_REASON_PERMISSIONED_KEY_EXPIRED represents a removal of an order + * that was placed using an expired permissioned key. + */ + REMOVAL_REASON_PERMISSIONED_KEY_EXPIRED = 9, UNRECOGNIZED = -1, } export function orderRemoval_RemovalReasonFromJSON(object: any): OrderRemoval_RemovalReason { @@ -155,6 +179,14 @@ export function orderRemoval_RemovalReasonFromJSON(object: any): OrderRemoval_Re case "REMOVAL_REASON_FULLY_FILLED": return OrderRemoval_RemovalReason.REMOVAL_REASON_FULLY_FILLED; + case 8: + case "REMOVAL_REASON_VIOLATES_ISOLATED_SUBACCOUNT_CONSTRAINTS": + return OrderRemoval_RemovalReason.REMOVAL_REASON_VIOLATES_ISOLATED_SUBACCOUNT_CONSTRAINTS; + + case 9: + case "REMOVAL_REASON_PERMISSIONED_KEY_EXPIRED": + return OrderRemoval_RemovalReason.REMOVAL_REASON_PERMISSIONED_KEY_EXPIRED; + case -1: case "UNRECOGNIZED": default: @@ -187,6 +219,12 @@ export function orderRemoval_RemovalReasonToJSON(object: OrderRemoval_RemovalRea case OrderRemoval_RemovalReason.REMOVAL_REASON_FULLY_FILLED: return "REMOVAL_REASON_FULLY_FILLED"; + case OrderRemoval_RemovalReason.REMOVAL_REASON_VIOLATES_ISOLATED_SUBACCOUNT_CONSTRAINTS: + return "REMOVAL_REASON_VIOLATES_ISOLATED_SUBACCOUNT_CONSTRAINTS"; + + case OrderRemoval_RemovalReason.REMOVAL_REASON_PERMISSIONED_KEY_EXPIRED: + return "REMOVAL_REASON_PERMISSIONED_KEY_EXPIRED"; + case OrderRemoval_RemovalReason.UNRECOGNIZED: default: return "UNRECOGNIZED"; diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/process_proposer_matches_events.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/process_proposer_matches_events.ts index f0bf7761165..f028203ec3f 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/process_proposer_matches_events.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/process_proposer_matches_events.ts @@ -16,12 +16,17 @@ import { DeepPartial } from "../../helpers"; */ export interface ProcessProposerMatchesEvents { + /** @deprecated */ placedLongTermOrderIds: OrderId[]; expiredStatefulOrderIds: OrderId[]; orderIdsFilledInLastBlock: OrderId[]; + /** @deprecated */ + placedStatefulCancellationOrderIds: OrderId[]; removedStatefulOrderIds: OrderId[]; conditionalOrderIdsTriggeredInLastBlock: OrderId[]; + /** @deprecated */ + placedConditionalOrderIds: OrderId[]; blockHeight: number; } @@ -40,12 +45,17 @@ export interface ProcessProposerMatchesEvents { */ export interface ProcessProposerMatchesEventsSDKType { + /** @deprecated */ placed_long_term_order_ids: OrderIdSDKType[]; expired_stateful_order_ids: OrderIdSDKType[]; order_ids_filled_in_last_block: OrderIdSDKType[]; + /** @deprecated */ + placed_stateful_cancellation_order_ids: OrderIdSDKType[]; removed_stateful_order_ids: OrderIdSDKType[]; conditional_order_ids_triggered_in_last_block: OrderIdSDKType[]; + /** @deprecated */ + placed_conditional_order_ids: OrderIdSDKType[]; block_height: number; } diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/query.lcd.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/query.lcd.ts index 4ef29497240..034cbad3db2 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/query.lcd.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/query.lcd.ts @@ -1,6 +1,6 @@ import { setPaginationParams } from "../../helpers"; import { LCDClient } from "@osmonauts/lcd"; -import { QueryGetClobPairRequest, QueryClobPairResponseSDKType, QueryAllClobPairRequest, QueryClobPairAllResponseSDKType, QueryEquityTierLimitConfigurationRequest, QueryEquityTierLimitConfigurationResponseSDKType, QueryBlockRateLimitConfigurationRequest, QueryBlockRateLimitConfigurationResponseSDKType, QueryLiquidationsConfigurationRequest, QueryLiquidationsConfigurationResponseSDKType } from "./query"; +import { QueryGetClobPairRequest, QueryClobPairResponseSDKType, QueryAllClobPairRequest, QueryClobPairAllResponseSDKType, QueryEquityTierLimitConfigurationRequest, QueryEquityTierLimitConfigurationResponseSDKType, QueryBlockRateLimitConfigurationRequest, QueryBlockRateLimitConfigurationResponseSDKType, QueryLiquidationsConfigurationRequest, QueryLiquidationsConfigurationResponseSDKType, QueryNextClobPairIdRequest, QueryNextClobPairIdResponseSDKType } from "./query"; export class LCDQueryClient { req: LCDClient; @@ -15,6 +15,7 @@ export class LCDQueryClient { this.equityTierLimitConfiguration = this.equityTierLimitConfiguration.bind(this); this.blockRateLimitConfiguration = this.blockRateLimitConfiguration.bind(this); this.liquidationsConfiguration = this.liquidationsConfiguration.bind(this); + this.nextClobPairId = this.nextClobPairId.bind(this); } /* Queries a ClobPair by id. */ @@ -61,5 +62,12 @@ export class LCDQueryClient { const endpoint = `dydxprotocol/clob/liquidations_config`; return await this.req.get(endpoint); } + /* Queries the next clob pair id. */ + + + async nextClobPairId(_params: QueryNextClobPairIdRequest = {}): Promise { + const endpoint = `dydxprotocol/clob/next_clob_pair_id`; + return await this.req.get(endpoint); + } } \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/query.rpc.Query.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/query.rpc.Query.ts index 4b5ccc331b3..5c87afce0f1 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/query.rpc.Query.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/query.rpc.Query.ts @@ -1,7 +1,7 @@ import { Rpc } from "../../helpers"; import * as _m0 from "protobufjs/minimal"; import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; -import { QueryGetClobPairRequest, QueryClobPairResponse, QueryAllClobPairRequest, QueryClobPairAllResponse, MevNodeToNodeCalculationRequest, MevNodeToNodeCalculationResponse, QueryEquityTierLimitConfigurationRequest, QueryEquityTierLimitConfigurationResponse, QueryBlockRateLimitConfigurationRequest, QueryBlockRateLimitConfigurationResponse, QueryLiquidationsConfigurationRequest, QueryLiquidationsConfigurationResponse, StreamOrderbookUpdatesRequest, StreamOrderbookUpdatesResponse } from "./query"; +import { QueryGetClobPairRequest, QueryClobPairResponse, QueryAllClobPairRequest, QueryClobPairAllResponse, MevNodeToNodeCalculationRequest, MevNodeToNodeCalculationResponse, QueryEquityTierLimitConfigurationRequest, QueryEquityTierLimitConfigurationResponse, QueryBlockRateLimitConfigurationRequest, QueryBlockRateLimitConfigurationResponse, QueryLiquidationsConfigurationRequest, QueryLiquidationsConfigurationResponse, QueryStatefulOrderRequest, QueryStatefulOrderResponse, QueryNextClobPairIdRequest, QueryNextClobPairIdResponse, StreamOrderbookUpdatesRequest, StreamOrderbookUpdatesResponse } from "./query"; /** Query defines the gRPC querier service. */ export interface Query { @@ -22,7 +22,16 @@ export interface Query { /** Queries LiquidationsConfiguration. */ liquidationsConfiguration(request?: QueryLiquidationsConfigurationRequest): Promise; - /** Streams orderbook updates. */ + /** Queries the stateful order for a given order id. */ + + statefulOrder(request: QueryStatefulOrderRequest): Promise; + /** Queries the next clob pair id. */ + + nextClobPairId(request?: QueryNextClobPairIdRequest): Promise; + /** + * Streams orderbook updates. Updates contain orderbook data + * such as order placements, updates, and fills. + */ streamOrderbookUpdates(request: StreamOrderbookUpdatesRequest): Promise; } @@ -37,6 +46,8 @@ export class QueryClientImpl implements Query { this.equityTierLimitConfiguration = this.equityTierLimitConfiguration.bind(this); this.blockRateLimitConfiguration = this.blockRateLimitConfiguration.bind(this); this.liquidationsConfiguration = this.liquidationsConfiguration.bind(this); + this.statefulOrder = this.statefulOrder.bind(this); + this.nextClobPairId = this.nextClobPairId.bind(this); this.streamOrderbookUpdates = this.streamOrderbookUpdates.bind(this); } @@ -78,6 +89,18 @@ export class QueryClientImpl implements Query { return promise.then(data => QueryLiquidationsConfigurationResponse.decode(new _m0.Reader(data))); } + statefulOrder(request: QueryStatefulOrderRequest): Promise { + const data = QueryStatefulOrderRequest.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.clob.Query", "StatefulOrder", data); + return promise.then(data => QueryStatefulOrderResponse.decode(new _m0.Reader(data))); + } + + nextClobPairId(request: QueryNextClobPairIdRequest = {}): Promise { + const data = QueryNextClobPairIdRequest.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.clob.Query", "NextClobPairId", data); + return promise.then(data => QueryNextClobPairIdResponse.decode(new _m0.Reader(data))); + } + streamOrderbookUpdates(request: StreamOrderbookUpdatesRequest): Promise { const data = StreamOrderbookUpdatesRequest.encode(request).finish(); const promise = this.rpc.request("dydxprotocol.clob.Query", "StreamOrderbookUpdates", data); @@ -113,6 +136,14 @@ export const createRpcQueryExtension = (base: QueryClient) => { return queryService.liquidationsConfiguration(request); }, + statefulOrder(request: QueryStatefulOrderRequest): Promise { + return queryService.statefulOrder(request); + }, + + nextClobPairId(request?: QueryNextClobPairIdRequest): Promise { + return queryService.nextClobPairId(request); + }, + streamOrderbookUpdates(request: StreamOrderbookUpdatesRequest): Promise { return queryService.streamOrderbookUpdates(request); } diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/query.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/query.ts index ddf8e9bd2d8..b6032561973 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/query.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/query.ts @@ -1,10 +1,15 @@ import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../cosmos/base/query/v1beta1/pagination"; import { ValidatorMevMatches, ValidatorMevMatchesSDKType, MevNodeToNodeMetrics, MevNodeToNodeMetricsSDKType } from "./mev"; +import { OrderId, OrderIdSDKType, LongTermOrderPlacement, LongTermOrderPlacementSDKType, Order, OrderSDKType, StreamLiquidationOrder, StreamLiquidationOrderSDKType } from "./order"; +import { SubaccountId, SubaccountIdSDKType } from "../subaccounts/subaccount"; import { ClobPair, ClobPairSDKType } from "./clob_pair"; import { EquityTierLimitConfiguration, EquityTierLimitConfigurationSDKType } from "./equity_tier_limit_config"; import { BlockRateLimitConfiguration, BlockRateLimitConfigurationSDKType } from "./block_rate_limit_config"; import { LiquidationsConfig, LiquidationsConfigSDKType } from "./liquidations_config"; +import { StreamSubaccountUpdate, StreamSubaccountUpdateSDKType } from "../subaccounts/streaming"; +import { StreamPriceUpdate, StreamPriceUpdateSDKType } from "../prices/streaming"; import { OffChainUpdateV1, OffChainUpdateV1SDKType } from "../indexer/off_chain_updates/off_chain_updates"; +import { ClobMatch, ClobMatchSDKType } from "./matches"; import * as _m0 from "protobufjs/minimal"; import { DeepPartial, Long } from "../../helpers"; /** QueryGetClobPairRequest is request type for the ClobPair method. */ @@ -171,6 +176,48 @@ export interface QueryBlockRateLimitConfigurationResponse { export interface QueryBlockRateLimitConfigurationResponseSDKType { block_rate_limit_config?: BlockRateLimitConfigurationSDKType; } +/** QueryStatefulOrderRequest is a request message for StatefulOrder. */ + +export interface QueryStatefulOrderRequest { + /** Order id to query. */ + orderId?: OrderId; +} +/** QueryStatefulOrderRequest is a request message for StatefulOrder. */ + +export interface QueryStatefulOrderRequestSDKType { + /** Order id to query. */ + order_id?: OrderIdSDKType; +} +/** + * QueryStatefulOrderResponse is a response message that contains the stateful + * order. + */ + +export interface QueryStatefulOrderResponse { + /** Stateful order placement. */ + orderPlacement?: LongTermOrderPlacement; + /** Fill amounts. */ + + fillAmount: Long; + /** Triggered status. */ + + triggered: boolean; +} +/** + * QueryStatefulOrderResponse is a response message that contains the stateful + * order. + */ + +export interface QueryStatefulOrderResponseSDKType { + /** Stateful order placement. */ + order_placement?: LongTermOrderPlacementSDKType; + /** Fill amounts. */ + + fill_amount: Long; + /** Triggered status. */ + + triggered: boolean; +} /** * QueryLiquidationsConfigurationRequest is a request message for * LiquidationsConfiguration. @@ -199,6 +246,24 @@ export interface QueryLiquidationsConfigurationResponse { export interface QueryLiquidationsConfigurationResponseSDKType { liquidations_config?: LiquidationsConfigSDKType; } +/** QueryNextClobPairIdRequest is a request message for the next clob pair id */ + +export interface QueryNextClobPairIdRequest {} +/** QueryNextClobPairIdRequest is a request message for the next clob pair id */ + +export interface QueryNextClobPairIdRequestSDKType {} +/** QueryNextClobPairIdResponse is a response message for the next clob pair id */ + +export interface QueryNextClobPairIdResponse { + /** QueryNextClobPairIdResponse is a response message for the next clob pair id */ + nextClobPairId: number; +} +/** QueryNextClobPairIdResponse is a response message for the next clob pair id */ + +export interface QueryNextClobPairIdResponseSDKType { + /** QueryNextClobPairIdResponse is a response message for the next clob pair id */ + next_clob_pair_id: number; +} /** * StreamOrderbookUpdatesRequest is a request message for the * StreamOrderbookUpdates method. @@ -207,6 +272,12 @@ export interface QueryLiquidationsConfigurationResponseSDKType { export interface StreamOrderbookUpdatesRequest { /** Clob pair ids to stream orderbook updates for. */ clobPairId: number[]; + /** Subaccount ids to stream subaccount updates for. */ + + subaccountIds: SubaccountId[]; + /** Market ids for price updates. */ + + marketIds: number[]; } /** * StreamOrderbookUpdatesRequest is a request message for the @@ -216,6 +287,12 @@ export interface StreamOrderbookUpdatesRequest { export interface StreamOrderbookUpdatesRequestSDKType { /** Clob pair ids to stream orderbook updates for. */ clob_pair_id: number[]; + /** Subaccount ids to stream subaccount updates for. */ + + subaccount_ids: SubaccountIdSDKType[]; + /** Market ids for price updates. */ + + market_ids: number[]; } /** * StreamOrderbookUpdatesResponse is a response message for the @@ -223,33 +300,217 @@ export interface StreamOrderbookUpdatesRequestSDKType { */ export interface StreamOrderbookUpdatesResponse { - /** Orderbook updates for the clob pair. */ - updates: OffChainUpdateV1[]; + /** Batch of updates for the clob pair. */ + updates: StreamUpdate[]; +} +/** + * StreamOrderbookUpdatesResponse is a response message for the + * StreamOrderbookUpdates method. + */ + +export interface StreamOrderbookUpdatesResponseSDKType { + /** Batch of updates for the clob pair. */ + updates: StreamUpdateSDKType[]; +} +/** + * StreamUpdate is an update that will be pushed through the + * GRPC stream. + */ + +export interface StreamUpdate { + /** Block height of the update. */ + blockHeight: number; + /** Exec mode of the update. */ + + execMode: number; + orderbookUpdate?: StreamOrderbookUpdate; + orderFill?: StreamOrderbookFill; + takerOrder?: StreamTakerOrder; + subaccountUpdate?: StreamSubaccountUpdate; + priceUpdate?: StreamPriceUpdate; +} +/** + * StreamUpdate is an update that will be pushed through the + * GRPC stream. + */ + +export interface StreamUpdateSDKType { + /** Block height of the update. */ + block_height: number; + /** Exec mode of the update. */ + + exec_mode: number; + orderbook_update?: StreamOrderbookUpdateSDKType; + order_fill?: StreamOrderbookFillSDKType; + taker_order?: StreamTakerOrderSDKType; + subaccount_update?: StreamSubaccountUpdateSDKType; + price_update?: StreamPriceUpdateSDKType; +} +/** + * StreamOrderbookUpdate provides information on an orderbook update. Used in + * the full node GRPC stream. + */ + +export interface StreamOrderbookUpdate { /** * Snapshot indicates if the response is from a snapshot of the orderbook. - * This is true for the initial response and false for all subsequent updates. - * Note that if the snapshot is true, then all previous entries should be + * All updates should be ignored until snapshot is recieved. + * If the snapshot is true, then all previous entries should be * discarded and the orderbook should be resynced. */ - snapshot: boolean; + /** + * Orderbook updates for the clob pair. Can contain order place, removals, + * or updates. + */ + + updates: OffChainUpdateV1[]; } /** - * StreamOrderbookUpdatesResponse is a response message for the - * StreamOrderbookUpdates method. + * StreamOrderbookUpdate provides information on an orderbook update. Used in + * the full node GRPC stream. */ -export interface StreamOrderbookUpdatesResponseSDKType { - /** Orderbook updates for the clob pair. */ - updates: OffChainUpdateV1SDKType[]; +export interface StreamOrderbookUpdateSDKType { /** * Snapshot indicates if the response is from a snapshot of the orderbook. - * This is true for the initial response and false for all subsequent updates. - * Note that if the snapshot is true, then all previous entries should be + * All updates should be ignored until snapshot is recieved. + * If the snapshot is true, then all previous entries should be * discarded and the orderbook should be resynced. */ - snapshot: boolean; + /** + * Orderbook updates for the clob pair. Can contain order place, removals, + * or updates. + */ + + updates: OffChainUpdateV1SDKType[]; +} +/** + * StreamOrderbookFill provides information on an orderbook fill. Used in + * the full node GRPC stream. + */ + +export interface StreamOrderbookFill { + /** + * Clob match. Provides information on which orders were matched + * and the type of order. + */ + clobMatch?: ClobMatch; + /** + * All orders involved in the specified clob match. Used to look up + * price of a match through a given maker order id. + */ + + orders: Order[]; + /** Resulting fill amounts for each order in the orders array. */ + + fillAmounts: Long[]; +} +/** + * StreamOrderbookFill provides information on an orderbook fill. Used in + * the full node GRPC stream. + */ + +export interface StreamOrderbookFillSDKType { + /** + * Clob match. Provides information on which orders were matched + * and the type of order. + */ + clob_match?: ClobMatchSDKType; + /** + * All orders involved in the specified clob match. Used to look up + * price of a match through a given maker order id. + */ + + orders: OrderSDKType[]; + /** Resulting fill amounts for each order in the orders array. */ + + fill_amounts: Long[]; +} +/** + * StreamTakerOrder provides information on a taker order that was attempted + * to be matched on the orderbook. + * It is intended to be used only in full node streaming. + */ + +export interface StreamTakerOrder { + order?: Order; + liquidationOrder?: StreamLiquidationOrder; + /** + * Information on the taker order after it is matched on the book, + * either successfully or unsuccessfully. + */ + + takerOrderStatus?: StreamTakerOrderStatus; +} +/** + * StreamTakerOrder provides information on a taker order that was attempted + * to be matched on the orderbook. + * It is intended to be used only in full node streaming. + */ + +export interface StreamTakerOrderSDKType { + order?: OrderSDKType; + liquidation_order?: StreamLiquidationOrderSDKType; + /** + * Information on the taker order after it is matched on the book, + * either successfully or unsuccessfully. + */ + + taker_order_status?: StreamTakerOrderStatusSDKType; +} +/** + * StreamTakerOrderStatus is a representation of a taker order + * after it is attempted to be matched on the orderbook. + * It is intended to be used only in full node streaming. + */ + +export interface StreamTakerOrderStatus { + /** + * The state of the taker order after attempting to match it against the + * orderbook. Possible enum values can be found here: + * https://github.com/dydxprotocol/v4-chain/blob/main/protocol/x/clob/types/orderbook.go#L105 + */ + orderStatus: number; + /** The amount of remaining (non-matched) base quantums of this taker order. */ + + remainingQuantums: Long; + /** + * The amount of base quantums that were *optimistically* filled for this + * taker order when the order is matched against the orderbook. Note that if + * any quantums of this order were optimistically filled or filled in state + * before this invocation of the matching loop, this value will not include + * them. + */ + + optimisticallyFilledQuantums: Long; +} +/** + * StreamTakerOrderStatus is a representation of a taker order + * after it is attempted to be matched on the orderbook. + * It is intended to be used only in full node streaming. + */ + +export interface StreamTakerOrderStatusSDKType { + /** + * The state of the taker order after attempting to match it against the + * orderbook. Possible enum values can be found here: + * https://github.com/dydxprotocol/v4-chain/blob/main/protocol/x/clob/types/orderbook.go#L105 + */ + order_status: number; + /** The amount of remaining (non-matched) base quantums of this taker order. */ + + remaining_quantums: Long; + /** + * The amount of base quantums that were *optimistically* filled for this + * taker order when the order is matched against the orderbook. Note that if + * any quantums of this order were optimistically filled or filled in state + * before this invocation of the matching loop, this value will not include + * them. + */ + + optimistically_filled_quantums: Long; } function createBaseQueryGetClobPairRequest(): QueryGetClobPairRequest { @@ -765,6 +1026,116 @@ export const QueryBlockRateLimitConfigurationResponse = { }; +function createBaseQueryStatefulOrderRequest(): QueryStatefulOrderRequest { + return { + orderId: undefined + }; +} + +export const QueryStatefulOrderRequest = { + encode(message: QueryStatefulOrderRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.orderId !== undefined) { + OrderId.encode(message.orderId, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryStatefulOrderRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryStatefulOrderRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.orderId = OrderId.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryStatefulOrderRequest { + const message = createBaseQueryStatefulOrderRequest(); + message.orderId = object.orderId !== undefined && object.orderId !== null ? OrderId.fromPartial(object.orderId) : undefined; + return message; + } + +}; + +function createBaseQueryStatefulOrderResponse(): QueryStatefulOrderResponse { + return { + orderPlacement: undefined, + fillAmount: Long.UZERO, + triggered: false + }; +} + +export const QueryStatefulOrderResponse = { + encode(message: QueryStatefulOrderResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.orderPlacement !== undefined) { + LongTermOrderPlacement.encode(message.orderPlacement, writer.uint32(10).fork()).ldelim(); + } + + if (!message.fillAmount.isZero()) { + writer.uint32(16).uint64(message.fillAmount); + } + + if (message.triggered === true) { + writer.uint32(24).bool(message.triggered); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryStatefulOrderResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryStatefulOrderResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.orderPlacement = LongTermOrderPlacement.decode(reader, reader.uint32()); + break; + + case 2: + message.fillAmount = (reader.uint64() as Long); + break; + + case 3: + message.triggered = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryStatefulOrderResponse { + const message = createBaseQueryStatefulOrderResponse(); + message.orderPlacement = object.orderPlacement !== undefined && object.orderPlacement !== null ? LongTermOrderPlacement.fromPartial(object.orderPlacement) : undefined; + message.fillAmount = object.fillAmount !== undefined && object.fillAmount !== null ? Long.fromValue(object.fillAmount) : Long.UZERO; + message.triggered = object.triggered ?? false; + return message; + } + +}; + function createBaseQueryLiquidationsConfigurationRequest(): QueryLiquidationsConfigurationRequest { return {}; } @@ -844,46 +1215,24 @@ export const QueryLiquidationsConfigurationResponse = { }; -function createBaseStreamOrderbookUpdatesRequest(): StreamOrderbookUpdatesRequest { - return { - clobPairId: [] - }; +function createBaseQueryNextClobPairIdRequest(): QueryNextClobPairIdRequest { + return {}; } -export const StreamOrderbookUpdatesRequest = { - encode(message: StreamOrderbookUpdatesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - writer.uint32(10).fork(); - - for (const v of message.clobPairId) { - writer.uint32(v); - } - - writer.ldelim(); +export const QueryNextClobPairIdRequest = { + encode(_: QueryNextClobPairIdRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): StreamOrderbookUpdatesRequest { + decode(input: _m0.Reader | Uint8Array, length?: number): QueryNextClobPairIdRequest { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseStreamOrderbookUpdatesRequest(); + const message = createBaseQueryNextClobPairIdRequest(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: - if ((tag & 7) === 2) { - const end2 = reader.uint32() + reader.pos; - - while (reader.pos < end2) { - message.clobPairId.push(reader.uint32()); - } - } else { - message.clobPairId.push(reader.uint32()); - } - - break; - default: reader.skipType(tag & 7); break; @@ -893,49 +1242,39 @@ export const StreamOrderbookUpdatesRequest = { return message; }, - fromPartial(object: DeepPartial): StreamOrderbookUpdatesRequest { - const message = createBaseStreamOrderbookUpdatesRequest(); - message.clobPairId = object.clobPairId?.map(e => e) || []; + fromPartial(_: DeepPartial): QueryNextClobPairIdRequest { + const message = createBaseQueryNextClobPairIdRequest(); return message; } }; -function createBaseStreamOrderbookUpdatesResponse(): StreamOrderbookUpdatesResponse { +function createBaseQueryNextClobPairIdResponse(): QueryNextClobPairIdResponse { return { - updates: [], - snapshot: false + nextClobPairId: 0 }; } -export const StreamOrderbookUpdatesResponse = { - encode(message: StreamOrderbookUpdatesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - for (const v of message.updates) { - OffChainUpdateV1.encode(v!, writer.uint32(10).fork()).ldelim(); - } - - if (message.snapshot === true) { - writer.uint32(16).bool(message.snapshot); +export const QueryNextClobPairIdResponse = { + encode(message: QueryNextClobPairIdResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.nextClobPairId !== 0) { + writer.uint32(8).uint32(message.nextClobPairId); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): StreamOrderbookUpdatesResponse { + decode(input: _m0.Reader | Uint8Array, length?: number): QueryNextClobPairIdResponse { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseStreamOrderbookUpdatesResponse(); + const message = createBaseQueryNextClobPairIdResponse(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.updates.push(OffChainUpdateV1.decode(reader, reader.uint32())); - break; - - case 2: - message.snapshot = reader.bool(); + message.nextClobPairId = reader.uint32(); break; default: @@ -947,10 +1286,511 @@ export const StreamOrderbookUpdatesResponse = { return message; }, - fromPartial(object: DeepPartial): StreamOrderbookUpdatesResponse { - const message = createBaseStreamOrderbookUpdatesResponse(); - message.updates = object.updates?.map(e => OffChainUpdateV1.fromPartial(e)) || []; - message.snapshot = object.snapshot ?? false; + fromPartial(object: DeepPartial): QueryNextClobPairIdResponse { + const message = createBaseQueryNextClobPairIdResponse(); + message.nextClobPairId = object.nextClobPairId ?? 0; + return message; + } + +}; + +function createBaseStreamOrderbookUpdatesRequest(): StreamOrderbookUpdatesRequest { + return { + clobPairId: [], + subaccountIds: [], + marketIds: [] + }; +} + +export const StreamOrderbookUpdatesRequest = { + encode(message: StreamOrderbookUpdatesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + + for (const v of message.clobPairId) { + writer.uint32(v); + } + + writer.ldelim(); + + for (const v of message.subaccountIds) { + SubaccountId.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + writer.uint32(26).fork(); + + for (const v of message.marketIds) { + writer.uint32(v); + } + + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): StreamOrderbookUpdatesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStreamOrderbookUpdatesRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.clobPairId.push(reader.uint32()); + } + } else { + message.clobPairId.push(reader.uint32()); + } + + break; + + case 2: + message.subaccountIds.push(SubaccountId.decode(reader, reader.uint32())); + break; + + case 3: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.marketIds.push(reader.uint32()); + } + } else { + message.marketIds.push(reader.uint32()); + } + + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): StreamOrderbookUpdatesRequest { + const message = createBaseStreamOrderbookUpdatesRequest(); + message.clobPairId = object.clobPairId?.map(e => e) || []; + message.subaccountIds = object.subaccountIds?.map(e => SubaccountId.fromPartial(e)) || []; + message.marketIds = object.marketIds?.map(e => e) || []; + return message; + } + +}; + +function createBaseStreamOrderbookUpdatesResponse(): StreamOrderbookUpdatesResponse { + return { + updates: [] + }; +} + +export const StreamOrderbookUpdatesResponse = { + encode(message: StreamOrderbookUpdatesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.updates) { + StreamUpdate.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): StreamOrderbookUpdatesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStreamOrderbookUpdatesResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.updates.push(StreamUpdate.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): StreamOrderbookUpdatesResponse { + const message = createBaseStreamOrderbookUpdatesResponse(); + message.updates = object.updates?.map(e => StreamUpdate.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseStreamUpdate(): StreamUpdate { + return { + blockHeight: 0, + execMode: 0, + orderbookUpdate: undefined, + orderFill: undefined, + takerOrder: undefined, + subaccountUpdate: undefined, + priceUpdate: undefined + }; +} + +export const StreamUpdate = { + encode(message: StreamUpdate, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.blockHeight !== 0) { + writer.uint32(8).uint32(message.blockHeight); + } + + if (message.execMode !== 0) { + writer.uint32(16).uint32(message.execMode); + } + + if (message.orderbookUpdate !== undefined) { + StreamOrderbookUpdate.encode(message.orderbookUpdate, writer.uint32(26).fork()).ldelim(); + } + + if (message.orderFill !== undefined) { + StreamOrderbookFill.encode(message.orderFill, writer.uint32(34).fork()).ldelim(); + } + + if (message.takerOrder !== undefined) { + StreamTakerOrder.encode(message.takerOrder, writer.uint32(42).fork()).ldelim(); + } + + if (message.subaccountUpdate !== undefined) { + StreamSubaccountUpdate.encode(message.subaccountUpdate, writer.uint32(50).fork()).ldelim(); + } + + if (message.priceUpdate !== undefined) { + StreamPriceUpdate.encode(message.priceUpdate, writer.uint32(58).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): StreamUpdate { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStreamUpdate(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.blockHeight = reader.uint32(); + break; + + case 2: + message.execMode = reader.uint32(); + break; + + case 3: + message.orderbookUpdate = StreamOrderbookUpdate.decode(reader, reader.uint32()); + break; + + case 4: + message.orderFill = StreamOrderbookFill.decode(reader, reader.uint32()); + break; + + case 5: + message.takerOrder = StreamTakerOrder.decode(reader, reader.uint32()); + break; + + case 6: + message.subaccountUpdate = StreamSubaccountUpdate.decode(reader, reader.uint32()); + break; + + case 7: + message.priceUpdate = StreamPriceUpdate.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): StreamUpdate { + const message = createBaseStreamUpdate(); + message.blockHeight = object.blockHeight ?? 0; + message.execMode = object.execMode ?? 0; + message.orderbookUpdate = object.orderbookUpdate !== undefined && object.orderbookUpdate !== null ? StreamOrderbookUpdate.fromPartial(object.orderbookUpdate) : undefined; + message.orderFill = object.orderFill !== undefined && object.orderFill !== null ? StreamOrderbookFill.fromPartial(object.orderFill) : undefined; + message.takerOrder = object.takerOrder !== undefined && object.takerOrder !== null ? StreamTakerOrder.fromPartial(object.takerOrder) : undefined; + message.subaccountUpdate = object.subaccountUpdate !== undefined && object.subaccountUpdate !== null ? StreamSubaccountUpdate.fromPartial(object.subaccountUpdate) : undefined; + message.priceUpdate = object.priceUpdate !== undefined && object.priceUpdate !== null ? StreamPriceUpdate.fromPartial(object.priceUpdate) : undefined; + return message; + } + +}; + +function createBaseStreamOrderbookUpdate(): StreamOrderbookUpdate { + return { + snapshot: false, + updates: [] + }; +} + +export const StreamOrderbookUpdate = { + encode(message: StreamOrderbookUpdate, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.snapshot === true) { + writer.uint32(8).bool(message.snapshot); + } + + for (const v of message.updates) { + OffChainUpdateV1.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): StreamOrderbookUpdate { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStreamOrderbookUpdate(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.snapshot = reader.bool(); + break; + + case 2: + message.updates.push(OffChainUpdateV1.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): StreamOrderbookUpdate { + const message = createBaseStreamOrderbookUpdate(); + message.snapshot = object.snapshot ?? false; + message.updates = object.updates?.map(e => OffChainUpdateV1.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseStreamOrderbookFill(): StreamOrderbookFill { + return { + clobMatch: undefined, + orders: [], + fillAmounts: [] + }; +} + +export const StreamOrderbookFill = { + encode(message: StreamOrderbookFill, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.clobMatch !== undefined) { + ClobMatch.encode(message.clobMatch, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.orders) { + Order.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + writer.uint32(26).fork(); + + for (const v of message.fillAmounts) { + writer.uint64(v); + } + + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): StreamOrderbookFill { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStreamOrderbookFill(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.clobMatch = ClobMatch.decode(reader, reader.uint32()); + break; + + case 2: + message.orders.push(Order.decode(reader, reader.uint32())); + break; + + case 3: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.fillAmounts.push((reader.uint64() as Long)); + } + } else { + message.fillAmounts.push((reader.uint64() as Long)); + } + + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): StreamOrderbookFill { + const message = createBaseStreamOrderbookFill(); + message.clobMatch = object.clobMatch !== undefined && object.clobMatch !== null ? ClobMatch.fromPartial(object.clobMatch) : undefined; + message.orders = object.orders?.map(e => Order.fromPartial(e)) || []; + message.fillAmounts = object.fillAmounts?.map(e => Long.fromValue(e)) || []; + return message; + } + +}; + +function createBaseStreamTakerOrder(): StreamTakerOrder { + return { + order: undefined, + liquidationOrder: undefined, + takerOrderStatus: undefined + }; +} + +export const StreamTakerOrder = { + encode(message: StreamTakerOrder, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.order !== undefined) { + Order.encode(message.order, writer.uint32(10).fork()).ldelim(); + } + + if (message.liquidationOrder !== undefined) { + StreamLiquidationOrder.encode(message.liquidationOrder, writer.uint32(18).fork()).ldelim(); + } + + if (message.takerOrderStatus !== undefined) { + StreamTakerOrderStatus.encode(message.takerOrderStatus, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): StreamTakerOrder { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStreamTakerOrder(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.order = Order.decode(reader, reader.uint32()); + break; + + case 2: + message.liquidationOrder = StreamLiquidationOrder.decode(reader, reader.uint32()); + break; + + case 3: + message.takerOrderStatus = StreamTakerOrderStatus.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): StreamTakerOrder { + const message = createBaseStreamTakerOrder(); + message.order = object.order !== undefined && object.order !== null ? Order.fromPartial(object.order) : undefined; + message.liquidationOrder = object.liquidationOrder !== undefined && object.liquidationOrder !== null ? StreamLiquidationOrder.fromPartial(object.liquidationOrder) : undefined; + message.takerOrderStatus = object.takerOrderStatus !== undefined && object.takerOrderStatus !== null ? StreamTakerOrderStatus.fromPartial(object.takerOrderStatus) : undefined; + return message; + } + +}; + +function createBaseStreamTakerOrderStatus(): StreamTakerOrderStatus { + return { + orderStatus: 0, + remainingQuantums: Long.UZERO, + optimisticallyFilledQuantums: Long.UZERO + }; +} + +export const StreamTakerOrderStatus = { + encode(message: StreamTakerOrderStatus, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.orderStatus !== 0) { + writer.uint32(8).uint32(message.orderStatus); + } + + if (!message.remainingQuantums.isZero()) { + writer.uint32(16).uint64(message.remainingQuantums); + } + + if (!message.optimisticallyFilledQuantums.isZero()) { + writer.uint32(24).uint64(message.optimisticallyFilledQuantums); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): StreamTakerOrderStatus { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStreamTakerOrderStatus(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.orderStatus = reader.uint32(); + break; + + case 2: + message.remainingQuantums = (reader.uint64() as Long); + break; + + case 3: + message.optimisticallyFilledQuantums = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): StreamTakerOrderStatus { + const message = createBaseStreamTakerOrderStatus(); + message.orderStatus = object.orderStatus ?? 0; + message.remainingQuantums = object.remainingQuantums !== undefined && object.remainingQuantums !== null ? Long.fromValue(object.remainingQuantums) : Long.UZERO; + message.optimisticallyFilledQuantums = object.optimisticallyFilledQuantums !== undefined && object.optimisticallyFilledQuantums !== null ? Long.fromValue(object.optimisticallyFilledQuantums) : Long.UZERO; return message; } diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/streaming.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/streaming.ts new file mode 100644 index 00000000000..d0ad4cd6ed5 --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/clob/streaming.ts @@ -0,0 +1,96 @@ +import { StreamOrderbookFill, StreamOrderbookFillSDKType, StreamOrderbookUpdate, StreamOrderbookUpdateSDKType } from "./query"; +import { StreamSubaccountUpdate, StreamSubaccountUpdateSDKType } from "../subaccounts/streaming"; +import { StreamPriceUpdate, StreamPriceUpdateSDKType } from "../prices/streaming"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** StagedFinalizeBlockEvent is an event staged during `FinalizeBlock`. */ + +export interface StagedFinalizeBlockEvent { + orderFill?: StreamOrderbookFill; + subaccountUpdate?: StreamSubaccountUpdate; + orderbookUpdate?: StreamOrderbookUpdate; + priceUpdate?: StreamPriceUpdate; +} +/** StagedFinalizeBlockEvent is an event staged during `FinalizeBlock`. */ + +export interface StagedFinalizeBlockEventSDKType { + order_fill?: StreamOrderbookFillSDKType; + subaccount_update?: StreamSubaccountUpdateSDKType; + orderbook_update?: StreamOrderbookUpdateSDKType; + price_update?: StreamPriceUpdateSDKType; +} + +function createBaseStagedFinalizeBlockEvent(): StagedFinalizeBlockEvent { + return { + orderFill: undefined, + subaccountUpdate: undefined, + orderbookUpdate: undefined, + priceUpdate: undefined + }; +} + +export const StagedFinalizeBlockEvent = { + encode(message: StagedFinalizeBlockEvent, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.orderFill !== undefined) { + StreamOrderbookFill.encode(message.orderFill, writer.uint32(10).fork()).ldelim(); + } + + if (message.subaccountUpdate !== undefined) { + StreamSubaccountUpdate.encode(message.subaccountUpdate, writer.uint32(18).fork()).ldelim(); + } + + if (message.orderbookUpdate !== undefined) { + StreamOrderbookUpdate.encode(message.orderbookUpdate, writer.uint32(26).fork()).ldelim(); + } + + if (message.priceUpdate !== undefined) { + StreamPriceUpdate.encode(message.priceUpdate, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): StagedFinalizeBlockEvent { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStagedFinalizeBlockEvent(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.orderFill = StreamOrderbookFill.decode(reader, reader.uint32()); + break; + + case 2: + message.subaccountUpdate = StreamSubaccountUpdate.decode(reader, reader.uint32()); + break; + + case 3: + message.orderbookUpdate = StreamOrderbookUpdate.decode(reader, reader.uint32()); + break; + + case 4: + message.priceUpdate = StreamPriceUpdate.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): StagedFinalizeBlockEvent { + const message = createBaseStagedFinalizeBlockEvent(); + message.orderFill = object.orderFill !== undefined && object.orderFill !== null ? StreamOrderbookFill.fromPartial(object.orderFill) : undefined; + message.subaccountUpdate = object.subaccountUpdate !== undefined && object.subaccountUpdate !== null ? StreamSubaccountUpdate.fromPartial(object.subaccountUpdate) : undefined; + message.orderbookUpdate = object.orderbookUpdate !== undefined && object.orderbookUpdate !== null ? StreamOrderbookUpdate.fromPartial(object.orderbookUpdate) : undefined; + message.priceUpdate = object.priceUpdate !== undefined && object.priceUpdate !== null ? StreamPriceUpdate.fromPartial(object.priceUpdate) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/indexer/events/events.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/indexer/events/events.ts index 2c6482734d6..f439504077d 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/indexer/events/events.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/indexer/events/events.ts @@ -1,6 +1,8 @@ import { IndexerSubaccountId, IndexerSubaccountIdSDKType, IndexerPerpetualPosition, IndexerPerpetualPositionSDKType, IndexerAssetPosition, IndexerAssetPositionSDKType } from "../protocol/v1/subaccount"; import { IndexerOrder, IndexerOrderSDKType, IndexerOrderId, IndexerOrderIdSDKType, ClobPairStatus, ClobPairStatusSDKType } from "../protocol/v1/clob"; import { OrderRemovalReason, OrderRemovalReasonSDKType } from "../shared/removal_reason"; +import { PerpetualMarketType, PerpetualMarketTypeSDKType } from "../protocol/v1/perpetual"; +import { VaultStatus, VaultStatusSDKType } from "../protocol/v1/vault"; import * as _m0 from "protobufjs/minimal"; import { DeepPartial, Long } from "../../../helpers"; /** Type is the type for funding values. */ @@ -414,6 +416,9 @@ export interface OrderFillEventV1 { /** Total filled of the taker order in base quantums. */ totalFilledTaker: Long; + /** rev share for affiliates in USDC quantums. */ + + affiliateRevShare: Long; } /** * OrderFillEvent message contains all the information from an order match in @@ -443,6 +448,9 @@ export interface OrderFillEventV1SDKType { /** Total filled of the taker order in base quantums. */ total_filled_taker: Long; + /** rev share for affiliates in USDC quantums. */ + + affiliate_rev_share: Long; } /** * DeleveragingEvent message contains all the information for a deleveraging @@ -618,11 +626,13 @@ export interface SubaccountUpdateEventV1SDKType { */ export interface StatefulOrderEventV1 { + /** @deprecated */ orderPlace?: StatefulOrderEventV1_StatefulOrderPlacementV1; orderRemoval?: StatefulOrderEventV1_StatefulOrderRemovalV1; conditionalOrderPlacement?: StatefulOrderEventV1_ConditionalOrderPlacementV1; conditionalOrderTriggered?: StatefulOrderEventV1_ConditionalOrderTriggeredV1; longTermOrderPlacement?: StatefulOrderEventV1_LongTermOrderPlacementV1; + orderReplacement?: StatefulOrderEventV1_LongTermOrderReplacementV1; } /** * StatefulOrderEvent message contains information about a change to a stateful @@ -632,18 +642,26 @@ export interface StatefulOrderEventV1 { */ export interface StatefulOrderEventV1SDKType { + /** @deprecated */ order_place?: StatefulOrderEventV1_StatefulOrderPlacementV1SDKType; order_removal?: StatefulOrderEventV1_StatefulOrderRemovalV1SDKType; conditional_order_placement?: StatefulOrderEventV1_ConditionalOrderPlacementV1SDKType; conditional_order_triggered?: StatefulOrderEventV1_ConditionalOrderTriggeredV1SDKType; long_term_order_placement?: StatefulOrderEventV1_LongTermOrderPlacementV1SDKType; + order_replacement?: StatefulOrderEventV1_LongTermOrderReplacementV1SDKType; } -/** A stateful order placement contains an order. */ +/** + * A stateful order placement contains an order. + * Deprecated in favor of LongTermOrderPlacementV1. + */ export interface StatefulOrderEventV1_StatefulOrderPlacementV1 { order?: IndexerOrder; } -/** A stateful order placement contains an order. */ +/** + * A stateful order placement contains an order. + * Deprecated in favor of LongTermOrderPlacementV1. + */ export interface StatefulOrderEventV1_StatefulOrderPlacementV1SDKType { order?: IndexerOrderSDKType; @@ -708,6 +726,20 @@ export interface StatefulOrderEventV1_LongTermOrderPlacementV1 { export interface StatefulOrderEventV1_LongTermOrderPlacementV1SDKType { order?: IndexerOrderSDKType; } +/** A long term order replacement contains an old order ID and the new order. */ + +export interface StatefulOrderEventV1_LongTermOrderReplacementV1 { + /** vault replaces orders with a different order ID */ + oldOrderId?: IndexerOrderId; + order?: IndexerOrder; +} +/** A long term order replacement contains an old order ID and the new order. */ + +export interface StatefulOrderEventV1_LongTermOrderReplacementV1SDKType { + /** vault replaces orders with a different order ID */ + old_order_id?: IndexerOrderIdSDKType; + order?: IndexerOrderSDKType; +} /** * AssetCreateEventV1 message contains all the information about an new Asset on * the dYdX chain. @@ -779,8 +811,12 @@ export interface AssetCreateEventV1SDKType { /** * PerpetualMarketCreateEventV1 message contains all the information about a * new Perpetual Market on the dYdX chain. + * Deprecated. See PerpetualMarketCreateEventV2 for the most up to date message + * for the event to create a new Perpetual Market. */ +/** @deprecated */ + export interface PerpetualMarketCreateEventV1 { /** * Unique Perpetual id. @@ -849,8 +885,12 @@ export interface PerpetualMarketCreateEventV1 { /** * PerpetualMarketCreateEventV1 message contains all the information about a * new Perpetual Market on the dYdX chain. + * Deprecated. See PerpetualMarketCreateEventV2 for the most up to date message + * for the event to create a new Perpetual Market. */ +/** @deprecated */ + export interface PerpetualMarketCreateEventV1SDKType { /** * Unique Perpetual id. @@ -917,86 +957,37 @@ export interface PerpetualMarketCreateEventV1SDKType { liquidity_tier: number; } /** - * LiquidityTierUpsertEventV1 message contains all the information to - * create/update a Liquidity Tier on the dYdX chain. + * PerpetualMarketCreateEventV2 message contains all the information about a + * new Perpetual Market on the dYdX chain. + * Deprecated. Use PerpetualMarketCreateEventV3 for the most up to date message */ -export interface LiquidityTierUpsertEventV1 { - /** Unique id. */ - id: number; - /** The name of the tier purely for mnemonic purposes, e.g. "Gold". */ - - name: string; - /** - * The margin fraction needed to open a position. - * In parts-per-million. - */ - - initialMarginPpm: number; - /** - * The fraction of the initial-margin that the maintenance-margin is, - * e.g. 50%. In parts-per-million. - */ +/** @deprecated */ - maintenanceFractionPpm: number; +export interface PerpetualMarketCreateEventV2 { /** - * The maximum position size at which the margin requirements are - * not increased over the default values. Above this position size, - * the margin requirements increase at a rate of sqrt(size). - * - * Deprecated since v3.x. + * Unique Perpetual id. + * Defined in perpetuals.perpetual */ - - /** @deprecated */ - - basePositionNotional: Long; -} -/** - * LiquidityTierUpsertEventV1 message contains all the information to - * create/update a Liquidity Tier on the dYdX chain. - */ - -export interface LiquidityTierUpsertEventV1SDKType { - /** Unique id. */ id: number; - /** The name of the tier purely for mnemonic purposes, e.g. "Gold". */ - - name: string; /** - * The margin fraction needed to open a position. - * In parts-per-million. + * Unique clob pair Id associated with this perpetual market + * Defined in clob.clob_pair */ - initial_margin_ppm: number; + clobPairId: number; /** - * The fraction of the initial-margin that the maintenance-margin is, - * e.g. 50%. In parts-per-million. + * The name of the `Perpetual` (e.g. `BTC-USD`). + * Defined in perpetuals.perpetual */ - maintenance_fraction_ppm: number; + ticker: string; /** - * The maximum position size at which the margin requirements are - * not increased over the default values. Above this position size, - * the margin requirements increase at a rate of sqrt(size). - * - * Deprecated since v3.x. + * Unique id of market param associated with this perpetual market. + * Defined in perpetuals.perpetual */ - /** @deprecated */ - - base_position_notional: Long; -} -/** - * UpdateClobPairEventV1 message contains all the information about an update to - * a clob pair on the dYdX chain. - */ - -export interface UpdateClobPairEventV1 { - /** - * Unique clob pair Id associated with this perpetual market - * Defined in clob.clob_pair - */ - clobPairId: number; + marketId: number; /** Status of the CLOB */ status: ClobPairStatus; @@ -1007,6 +998,15 @@ export interface UpdateClobPairEventV1 { */ quantumConversionExponent: number; + /** + * The exponent for converting an atomic amount (`size = 1`) + * to a full coin. For example, if `AtomicResolution = -8` + * then a `PerpetualPosition` with `size = 1e8` is equivalent to + * a position size of one full coin. + * Defined in perpetuals.perpetual + */ + + atomicResolution: number; /** * Defines the tick size of the orderbook by defining how many subticks * are in one tick. That is, the subticks of any valid order must be a @@ -1022,18 +1022,48 @@ export interface UpdateClobPairEventV1 { */ stepBaseQuantums: Long; + /** + * The liquidity_tier that this perpetual is associated with. + * Defined in perpetuals.perpetual + */ + + liquidityTier: number; + /** Market type of the perpetual. */ + + marketType: PerpetualMarketType; } /** - * UpdateClobPairEventV1 message contains all the information about an update to - * a clob pair on the dYdX chain. + * PerpetualMarketCreateEventV2 message contains all the information about a + * new Perpetual Market on the dYdX chain. + * Deprecated. Use PerpetualMarketCreateEventV3 for the most up to date message */ -export interface UpdateClobPairEventV1SDKType { +/** @deprecated */ + +export interface PerpetualMarketCreateEventV2SDKType { + /** + * Unique Perpetual id. + * Defined in perpetuals.perpetual + */ + id: number; /** * Unique clob pair Id associated with this perpetual market * Defined in clob.clob_pair */ + clob_pair_id: number; + /** + * The name of the `Perpetual` (e.g. `BTC-USD`). + * Defined in perpetuals.perpetual + */ + + ticker: string; + /** + * Unique id of market param associated with this perpetual market. + * Defined in perpetuals.perpetual + */ + + market_id: number; /** Status of the CLOB */ status: ClobPairStatusSDKType; @@ -1044,6 +1074,15 @@ export interface UpdateClobPairEventV1SDKType { */ quantum_conversion_exponent: number; + /** + * The exponent for converting an atomic amount (`size = 1`) + * to a full coin. For example, if `AtomicResolution = -8` + * then a `PerpetualPosition` with `size = 1e8` is equivalent to + * a position size of one full coin. + * Defined in perpetuals.perpetual + */ + + atomic_resolution: number; /** * Defines the tick size of the orderbook by defining how many subticks * are in one tick. That is, the subticks of any valid order must be a @@ -1059,18 +1098,33 @@ export interface UpdateClobPairEventV1SDKType { */ step_base_quantums: Long; + /** + * The liquidity_tier that this perpetual is associated with. + * Defined in perpetuals.perpetual + */ + + liquidity_tier: number; + /** Market type of the perpetual. */ + + market_type: PerpetualMarketTypeSDKType; } /** - * UpdatePerpetualEventV1 message contains all the information about an update - * to a perpetual on the dYdX chain. + * PerpetualMarketCreateEventV3 message contains all the information about a + * new Perpetual Market on the dYdX chain. */ -export interface UpdatePerpetualEventV1 { +export interface PerpetualMarketCreateEventV3 { /** * Unique Perpetual id. * Defined in perpetuals.perpetual */ id: number; + /** + * Unique clob pair Id associated with this perpetual market + * Defined in clob.clob_pair + */ + + clobPairId: number; /** * The name of the `Perpetual` (e.g. `BTC-USD`). * Defined in perpetuals.perpetual @@ -1083,6 +1137,16 @@ export interface UpdatePerpetualEventV1 { */ marketId: number; + /** Status of the CLOB */ + + status: ClobPairStatus; + /** + * `10^Exponent` gives the number of QuoteQuantums traded per BaseQuantum + * per Subtick. + * Defined in clob.clob_pair + */ + + quantumConversionExponent: number; /** * The exponent for converting an atomic amount (`size = 1`) * to a full coin. For example, if `AtomicResolution = -8` @@ -1092,24 +1156,51 @@ export interface UpdatePerpetualEventV1 { */ atomicResolution: number; + /** + * Defines the tick size of the orderbook by defining how many subticks + * are in one tick. That is, the subticks of any valid order must be a + * multiple of this value. Generally this value should start `>= 100`to + * allow room for decreasing it. + * Defined in clob.clob_pair + */ + + subticksPerTick: number; + /** + * Minimum increment in the size of orders on the CLOB, in base quantums. + * Defined in clob.clob_pair + */ + + stepBaseQuantums: Long; /** * The liquidity_tier that this perpetual is associated with. * Defined in perpetuals.perpetual */ liquidityTier: number; + /** Market type of the perpetual. */ + + marketType: PerpetualMarketType; + /** Default 8hr funding rate in parts-per-million. */ + + defaultFunding8hrPpm: number; } /** - * UpdatePerpetualEventV1 message contains all the information about an update - * to a perpetual on the dYdX chain. + * PerpetualMarketCreateEventV3 message contains all the information about a + * new Perpetual Market on the dYdX chain. */ -export interface UpdatePerpetualEventV1SDKType { +export interface PerpetualMarketCreateEventV3SDKType { /** * Unique Perpetual id. * Defined in perpetuals.perpetual */ id: number; + /** + * Unique clob pair Id associated with this perpetual market + * Defined in clob.clob_pair + */ + + clob_pair_id: number; /** * The name of the `Perpetual` (e.g. `BTC-USD`). * Defined in perpetuals.perpetual @@ -1122,6 +1213,16 @@ export interface UpdatePerpetualEventV1SDKType { */ market_id: number; + /** Status of the CLOB */ + + status: ClobPairStatusSDKType; + /** + * `10^Exponent` gives the number of QuoteQuantums traded per BaseQuantum + * per Subtick. + * Defined in clob.clob_pair + */ + + quantum_conversion_exponent: number; /** * The exponent for converting an atomic amount (`size = 1`) * to a full coin. For example, if `AtomicResolution = -8` @@ -1131,106 +1232,1511 @@ export interface UpdatePerpetualEventV1SDKType { */ atomic_resolution: number; + /** + * Defines the tick size of the orderbook by defining how many subticks + * are in one tick. That is, the subticks of any valid order must be a + * multiple of this value. Generally this value should start `>= 100`to + * allow room for decreasing it. + * Defined in clob.clob_pair + */ + + subticks_per_tick: number; + /** + * Minimum increment in the size of orders on the CLOB, in base quantums. + * Defined in clob.clob_pair + */ + + step_base_quantums: Long; /** * The liquidity_tier that this perpetual is associated with. * Defined in perpetuals.perpetual */ liquidity_tier: number; -} -/** - * TradingRewardsEventV1 is communicates all trading rewards for all accounts - * that receive trade rewards in the block. - */ + /** Market type of the perpetual. */ -export interface TradingRewardsEventV1 { - /** The list of all trading rewards in the block. */ - tradingRewards: AddressTradingReward[]; -} -/** - * TradingRewardsEventV1 is communicates all trading rewards for all accounts - * that receive trade rewards in the block. - */ + market_type: PerpetualMarketTypeSDKType; + /** Default 8hr funding rate in parts-per-million. */ -export interface TradingRewardsEventV1SDKType { - /** The list of all trading rewards in the block. */ - trading_rewards: AddressTradingRewardSDKType[]; + default_funding8hr_ppm: number; } /** - * AddressTradingReward contains info on an instance of an address receiving a - * reward + * LiquidityTierUpsertEventV1 message contains all the information to + * create/update a Liquidity Tier on the dYdX chain. */ -export interface AddressTradingReward { - /** The address of the wallet that will receive the trading reward. */ - owner: string; +export interface LiquidityTierUpsertEventV1 { + /** Unique id. */ + id: number; + /** The name of the tier purely for mnemonic purposes, e.g. "Gold". */ + + name: string; /** - * The amount of trading rewards earned by the address above in denoms. 1e18 - * denoms is equivalent to a single coin. + * The margin fraction needed to open a position. + * In parts-per-million. */ - denomAmount: Uint8Array; -} -/** - * AddressTradingReward contains info on an instance of an address receiving a - * reward + initialMarginPpm: number; + /** + * The fraction of the initial-margin that the maintenance-margin is, + * e.g. 50%. In parts-per-million. + */ + + maintenanceFractionPpm: number; + /** + * The maximum position size at which the margin requirements are + * not increased over the default values. Above this position size, + * the margin requirements increase at a rate of sqrt(size). + * + * Deprecated since v3.x. + */ + + /** @deprecated */ + + basePositionNotional: Long; +} +/** + * LiquidityTierUpsertEventV1 message contains all the information to + * create/update a Liquidity Tier on the dYdX chain. + */ + +export interface LiquidityTierUpsertEventV1SDKType { + /** Unique id. */ + id: number; + /** The name of the tier purely for mnemonic purposes, e.g. "Gold". */ + + name: string; + /** + * The margin fraction needed to open a position. + * In parts-per-million. + */ + + initial_margin_ppm: number; + /** + * The fraction of the initial-margin that the maintenance-margin is, + * e.g. 50%. In parts-per-million. + */ + + maintenance_fraction_ppm: number; + /** + * The maximum position size at which the margin requirements are + * not increased over the default values. Above this position size, + * the margin requirements increase at a rate of sqrt(size). + * + * Deprecated since v3.x. + */ + + /** @deprecated */ + + base_position_notional: Long; +} +/** + * UpdateClobPairEventV1 message contains all the information about an update to + * a clob pair on the dYdX chain. + */ + +export interface UpdateClobPairEventV1 { + /** + * Unique clob pair Id associated with this perpetual market + * Defined in clob.clob_pair + */ + clobPairId: number; + /** Status of the CLOB */ + + status: ClobPairStatus; + /** + * `10^Exponent` gives the number of QuoteQuantums traded per BaseQuantum + * per Subtick. + * Defined in clob.clob_pair + */ + + quantumConversionExponent: number; + /** + * Defines the tick size of the orderbook by defining how many subticks + * are in one tick. That is, the subticks of any valid order must be a + * multiple of this value. Generally this value should start `>= 100`to + * allow room for decreasing it. + * Defined in clob.clob_pair + */ + + subticksPerTick: number; + /** + * Minimum increment in the size of orders on the CLOB, in base quantums. + * Defined in clob.clob_pair + */ + + stepBaseQuantums: Long; +} +/** + * UpdateClobPairEventV1 message contains all the information about an update to + * a clob pair on the dYdX chain. + */ + +export interface UpdateClobPairEventV1SDKType { + /** + * Unique clob pair Id associated with this perpetual market + * Defined in clob.clob_pair + */ + clob_pair_id: number; + /** Status of the CLOB */ + + status: ClobPairStatusSDKType; + /** + * `10^Exponent` gives the number of QuoteQuantums traded per BaseQuantum + * per Subtick. + * Defined in clob.clob_pair + */ + + quantum_conversion_exponent: number; + /** + * Defines the tick size of the orderbook by defining how many subticks + * are in one tick. That is, the subticks of any valid order must be a + * multiple of this value. Generally this value should start `>= 100`to + * allow room for decreasing it. + * Defined in clob.clob_pair + */ + + subticks_per_tick: number; + /** + * Minimum increment in the size of orders on the CLOB, in base quantums. + * Defined in clob.clob_pair + */ + + step_base_quantums: Long; +} +/** + * UpdatePerpetualEventV1 message contains all the information about an update + * to a perpetual on the dYdX chain. + * Deprecated. See UpdatePerpetualEventV2 for the most up to date message + * for the event to update a perpetual. + */ + +/** @deprecated */ + +export interface UpdatePerpetualEventV1 { + /** + * Unique Perpetual id. + * Defined in perpetuals.perpetual + */ + id: number; + /** + * The name of the `Perpetual` (e.g. `BTC-USD`). + * Defined in perpetuals.perpetual + */ + + ticker: string; + /** + * Unique id of market param associated with this perpetual market. + * Defined in perpetuals.perpetual + */ + + marketId: number; + /** + * The exponent for converting an atomic amount (`size = 1`) + * to a full coin. For example, if `AtomicResolution = -8` + * then a `PerpetualPosition` with `size = 1e8` is equivalent to + * a position size of one full coin. + * Defined in perpetuals.perpetual + */ + + atomicResolution: number; + /** + * The liquidity_tier that this perpetual is associated with. + * Defined in perpetuals.perpetual + */ + + liquidityTier: number; +} +/** + * UpdatePerpetualEventV1 message contains all the information about an update + * to a perpetual on the dYdX chain. + * Deprecated. See UpdatePerpetualEventV2 for the most up to date message + * for the event to update a perpetual. + */ + +/** @deprecated */ + +export interface UpdatePerpetualEventV1SDKType { + /** + * Unique Perpetual id. + * Defined in perpetuals.perpetual + */ + id: number; + /** + * The name of the `Perpetual` (e.g. `BTC-USD`). + * Defined in perpetuals.perpetual + */ + + ticker: string; + /** + * Unique id of market param associated with this perpetual market. + * Defined in perpetuals.perpetual + */ + + market_id: number; + /** + * The exponent for converting an atomic amount (`size = 1`) + * to a full coin. For example, if `AtomicResolution = -8` + * then a `PerpetualPosition` with `size = 1e8` is equivalent to + * a position size of one full coin. + * Defined in perpetuals.perpetual + */ + + atomic_resolution: number; + /** + * The liquidity_tier that this perpetual is associated with. + * Defined in perpetuals.perpetual + */ + + liquidity_tier: number; +} +/** + * UpdatePerpetualEventV2 message contains all the information about an update + * to a perpetual on the dYdX chain. + * Deprecated. Use UpdatePerpetualEventV3. + */ + +/** @deprecated */ + +export interface UpdatePerpetualEventV2 { + /** + * Unique Perpetual id. + * Defined in perpetuals.perpetual + */ + id: number; + /** + * The name of the `Perpetual` (e.g. `BTC-USD`). + * Defined in perpetuals.perpetual + */ + + ticker: string; + /** + * Unique id of market param associated with this perpetual market. + * Defined in perpetuals.perpetual + */ + + marketId: number; + /** + * The exponent for converting an atomic amount (`size = 1`) + * to a full coin. For example, if `AtomicResolution = -8` + * then a `PerpetualPosition` with `size = 1e8` is equivalent to + * a position size of one full coin. + * Defined in perpetuals.perpetual + */ + + atomicResolution: number; + /** + * The liquidity_tier that this perpetual is associated with. + * Defined in perpetuals.perpetual + */ + + liquidityTier: number; + /** Market type of the perpetual. */ + + marketType: PerpetualMarketType; +} +/** + * UpdatePerpetualEventV2 message contains all the information about an update + * to a perpetual on the dYdX chain. + * Deprecated. Use UpdatePerpetualEventV3. */ -export interface AddressTradingRewardSDKType { - /** The address of the wallet that will receive the trading reward. */ - owner: string; - /** - * The amount of trading rewards earned by the address above in denoms. 1e18 - * denoms is equivalent to a single coin. - */ +/** @deprecated */ + +export interface UpdatePerpetualEventV2SDKType { + /** + * Unique Perpetual id. + * Defined in perpetuals.perpetual + */ + id: number; + /** + * The name of the `Perpetual` (e.g. `BTC-USD`). + * Defined in perpetuals.perpetual + */ + + ticker: string; + /** + * Unique id of market param associated with this perpetual market. + * Defined in perpetuals.perpetual + */ + + market_id: number; + /** + * The exponent for converting an atomic amount (`size = 1`) + * to a full coin. For example, if `AtomicResolution = -8` + * then a `PerpetualPosition` with `size = 1e8` is equivalent to + * a position size of one full coin. + * Defined in perpetuals.perpetual + */ + + atomic_resolution: number; + /** + * The liquidity_tier that this perpetual is associated with. + * Defined in perpetuals.perpetual + */ + + liquidity_tier: number; + /** Market type of the perpetual. */ + + market_type: PerpetualMarketTypeSDKType; +} +/** + * UpdatePerpetualEventV3 message contains all the information about an update + * to a perpetual on the dYdX chain. + */ + +export interface UpdatePerpetualEventV3 { + /** + * Unique Perpetual id. + * Defined in perpetuals.perpetual + */ + id: number; + /** + * The name of the `Perpetual` (e.g. `BTC-USD`). + * Defined in perpetuals.perpetual + */ + + ticker: string; + /** + * Unique id of market param associated with this perpetual market. + * Defined in perpetuals.perpetual + */ + + marketId: number; + /** + * The exponent for converting an atomic amount (`size = 1`) + * to a full coin. For example, if `AtomicResolution = -8` + * then a `PerpetualPosition` with `size = 1e8` is equivalent to + * a position size of one full coin. + * Defined in perpetuals.perpetual + */ + + atomicResolution: number; + /** + * The liquidity_tier that this perpetual is associated with. + * Defined in perpetuals.perpetual + */ + + liquidityTier: number; + /** Market type of the perpetual. */ + + marketType: PerpetualMarketType; + /** Default 8hr funding rate in parts-per-million. */ + + defaultFunding8hrPpm: number; +} +/** + * UpdatePerpetualEventV3 message contains all the information about an update + * to a perpetual on the dYdX chain. + */ + +export interface UpdatePerpetualEventV3SDKType { + /** + * Unique Perpetual id. + * Defined in perpetuals.perpetual + */ + id: number; + /** + * The name of the `Perpetual` (e.g. `BTC-USD`). + * Defined in perpetuals.perpetual + */ + + ticker: string; + /** + * Unique id of market param associated with this perpetual market. + * Defined in perpetuals.perpetual + */ + + market_id: number; + /** + * The exponent for converting an atomic amount (`size = 1`) + * to a full coin. For example, if `AtomicResolution = -8` + * then a `PerpetualPosition` with `size = 1e8` is equivalent to + * a position size of one full coin. + * Defined in perpetuals.perpetual + */ + + atomic_resolution: number; + /** + * The liquidity_tier that this perpetual is associated with. + * Defined in perpetuals.perpetual + */ + + liquidity_tier: number; + /** Market type of the perpetual. */ + + market_type: PerpetualMarketTypeSDKType; + /** Default 8hr funding rate in parts-per-million. */ + + default_funding8hr_ppm: number; +} +/** + * TradingRewardsEventV1 is communicates all trading rewards for all accounts + * that receive trade rewards in the block. + */ + +export interface TradingRewardsEventV1 { + /** The list of all trading rewards in the block. */ + tradingRewards: AddressTradingReward[]; +} +/** + * TradingRewardsEventV1 is communicates all trading rewards for all accounts + * that receive trade rewards in the block. + */ + +export interface TradingRewardsEventV1SDKType { + /** The list of all trading rewards in the block. */ + trading_rewards: AddressTradingRewardSDKType[]; +} +/** + * AddressTradingReward contains info on an instance of an address receiving a + * reward + */ + +export interface AddressTradingReward { + /** The address of the wallet that will receive the trading reward. */ + owner: string; + /** + * The amount of trading rewards earned by the address above in denoms. 1e18 + * denoms is equivalent to a single coin. + */ + + denomAmount: Uint8Array; +} +/** + * AddressTradingReward contains info on an instance of an address receiving a + * reward + */ + +export interface AddressTradingRewardSDKType { + /** The address of the wallet that will receive the trading reward. */ + owner: string; + /** + * The amount of trading rewards earned by the address above in denoms. 1e18 + * denoms is equivalent to a single coin. + */ + + denom_amount: Uint8Array; +} +/** + * OpenInterestUpdateEventV1 is used for open interest update events + * Deprecated. + */ + +/** @deprecated */ + +export interface OpenInterestUpdateEventV1 { + openInterestUpdates: OpenInterestUpdate[]; +} +/** + * OpenInterestUpdateEventV1 is used for open interest update events + * Deprecated. + */ + +/** @deprecated */ + +export interface OpenInterestUpdateEventV1SDKType { + open_interest_updates: OpenInterestUpdateSDKType[]; +} +/** + * OpenInterestUpdate contains a single open interest update for a perpetual + * Deprecated. + */ + +/** @deprecated */ + +export interface OpenInterestUpdate { + perpetualId: number; + /** The new open interest value for the perpetual market. */ + + openInterest: Uint8Array; +} +/** + * OpenInterestUpdate contains a single open interest update for a perpetual + * Deprecated. + */ + +/** @deprecated */ + +export interface OpenInterestUpdateSDKType { + perpetual_id: number; + /** The new open interest value for the perpetual market. */ + + open_interest: Uint8Array; +} +/** + * LiquidationEventV2 message contains all the information needed to update + * the liquidity tiers. It contains all the fields from V1 along with the + * open interest caps. + */ + +export interface LiquidityTierUpsertEventV2 { + /** Unique id. */ + id: number; + /** The name of the tier purely for mnemonic purposes, e.g. "Gold". */ + + name: string; + /** + * The margin fraction needed to open a position. + * In parts-per-million. + */ + + initialMarginPpm: number; + /** + * The fraction of the initial-margin that the maintenance-margin is, + * e.g. 50%. In parts-per-million. + */ + + maintenanceFractionPpm: number; + /** + * The maximum position size at which the margin requirements are + * not increased over the default values. Above this position size, + * the margin requirements increase at a rate of sqrt(size). + * + * Deprecated since v3.x. + */ + + /** @deprecated */ + + basePositionNotional: Long; + /** Lower cap of open interest in quote quantums. optional */ + + openInterestLowerCap: Long; + /** Upper cap of open interest in quote quantums. */ + + openInterestUpperCap: Long; +} +/** + * LiquidationEventV2 message contains all the information needed to update + * the liquidity tiers. It contains all the fields from V1 along with the + * open interest caps. + */ + +export interface LiquidityTierUpsertEventV2SDKType { + /** Unique id. */ + id: number; + /** The name of the tier purely for mnemonic purposes, e.g. "Gold". */ + + name: string; + /** + * The margin fraction needed to open a position. + * In parts-per-million. + */ + + initial_margin_ppm: number; + /** + * The fraction of the initial-margin that the maintenance-margin is, + * e.g. 50%. In parts-per-million. + */ + + maintenance_fraction_ppm: number; + /** + * The maximum position size at which the margin requirements are + * not increased over the default values. Above this position size, + * the margin requirements increase at a rate of sqrt(size). + * + * Deprecated since v3.x. + */ + + /** @deprecated */ + + base_position_notional: Long; + /** Lower cap of open interest in quote quantums. optional */ + + open_interest_lower_cap: Long; + /** Upper cap of open interest in quote quantums. */ + + open_interest_upper_cap: Long; +} +/** Event emitted when a referee is registered with an affiliate. */ + +export interface RegisterAffiliateEventV1 { + /** Address of the referee being registered. */ + referee: string; + /** Address of the affiliate associated with the referee. */ + + affiliate: string; +} +/** Event emitted when a referee is registered with an affiliate. */ + +export interface RegisterAffiliateEventV1SDKType { + /** Address of the referee being registered. */ + referee: string; + /** Address of the affiliate associated with the referee. */ + + affiliate: string; +} +/** Event emitted when a vault is created / updated. */ + +export interface UpsertVaultEventV1 { + /** Address of the vault. */ + address: string; + /** Clob pair Id associated with the vault. */ + + clobPairId: number; + /** Status of the vault. */ + + status: VaultStatus; +} +/** Event emitted when a vault is created / updated. */ + +export interface UpsertVaultEventV1SDKType { + /** Address of the vault. */ + address: string; + /** Clob pair Id associated with the vault. */ + + clob_pair_id: number; + /** Status of the vault. */ + + status: VaultStatusSDKType; +} + +function createBaseFundingUpdateV1(): FundingUpdateV1 { + return { + perpetualId: 0, + fundingValuePpm: 0, + fundingIndex: new Uint8Array() + }; +} + +export const FundingUpdateV1 = { + encode(message: FundingUpdateV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.perpetualId !== 0) { + writer.uint32(8).uint32(message.perpetualId); + } + + if (message.fundingValuePpm !== 0) { + writer.uint32(16).int32(message.fundingValuePpm); + } + + if (message.fundingIndex.length !== 0) { + writer.uint32(26).bytes(message.fundingIndex); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FundingUpdateV1 { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFundingUpdateV1(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.perpetualId = reader.uint32(); + break; + + case 2: + message.fundingValuePpm = reader.int32(); + break; + + case 3: + message.fundingIndex = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): FundingUpdateV1 { + const message = createBaseFundingUpdateV1(); + message.perpetualId = object.perpetualId ?? 0; + message.fundingValuePpm = object.fundingValuePpm ?? 0; + message.fundingIndex = object.fundingIndex ?? new Uint8Array(); + return message; + } + +}; + +function createBaseFundingEventV1(): FundingEventV1 { + return { + updates: [], + type: 0 + }; +} + +export const FundingEventV1 = { + encode(message: FundingEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.updates) { + FundingUpdateV1.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.type !== 0) { + writer.uint32(16).int32(message.type); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FundingEventV1 { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFundingEventV1(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.updates.push(FundingUpdateV1.decode(reader, reader.uint32())); + break; + + case 2: + message.type = (reader.int32() as any); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): FundingEventV1 { + const message = createBaseFundingEventV1(); + message.updates = object.updates?.map(e => FundingUpdateV1.fromPartial(e)) || []; + message.type = object.type ?? 0; + return message; + } + +}; + +function createBaseMarketEventV1(): MarketEventV1 { + return { + marketId: 0, + priceUpdate: undefined, + marketCreate: undefined, + marketModify: undefined + }; +} + +export const MarketEventV1 = { + encode(message: MarketEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.marketId !== 0) { + writer.uint32(8).uint32(message.marketId); + } + + if (message.priceUpdate !== undefined) { + MarketPriceUpdateEventV1.encode(message.priceUpdate, writer.uint32(18).fork()).ldelim(); + } + + if (message.marketCreate !== undefined) { + MarketCreateEventV1.encode(message.marketCreate, writer.uint32(26).fork()).ldelim(); + } + + if (message.marketModify !== undefined) { + MarketModifyEventV1.encode(message.marketModify, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MarketEventV1 { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMarketEventV1(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.marketId = reader.uint32(); + break; + + case 2: + message.priceUpdate = MarketPriceUpdateEventV1.decode(reader, reader.uint32()); + break; + + case 3: + message.marketCreate = MarketCreateEventV1.decode(reader, reader.uint32()); + break; + + case 4: + message.marketModify = MarketModifyEventV1.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MarketEventV1 { + const message = createBaseMarketEventV1(); + message.marketId = object.marketId ?? 0; + message.priceUpdate = object.priceUpdate !== undefined && object.priceUpdate !== null ? MarketPriceUpdateEventV1.fromPartial(object.priceUpdate) : undefined; + message.marketCreate = object.marketCreate !== undefined && object.marketCreate !== null ? MarketCreateEventV1.fromPartial(object.marketCreate) : undefined; + message.marketModify = object.marketModify !== undefined && object.marketModify !== null ? MarketModifyEventV1.fromPartial(object.marketModify) : undefined; + return message; + } + +}; + +function createBaseMarketPriceUpdateEventV1(): MarketPriceUpdateEventV1 { + return { + priceWithExponent: Long.UZERO + }; +} + +export const MarketPriceUpdateEventV1 = { + encode(message: MarketPriceUpdateEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.priceWithExponent.isZero()) { + writer.uint32(8).uint64(message.priceWithExponent); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MarketPriceUpdateEventV1 { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMarketPriceUpdateEventV1(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.priceWithExponent = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MarketPriceUpdateEventV1 { + const message = createBaseMarketPriceUpdateEventV1(); + message.priceWithExponent = object.priceWithExponent !== undefined && object.priceWithExponent !== null ? Long.fromValue(object.priceWithExponent) : Long.UZERO; + return message; + } + +}; + +function createBaseMarketBaseEventV1(): MarketBaseEventV1 { + return { + pair: "", + minPriceChangePpm: 0 + }; +} + +export const MarketBaseEventV1 = { + encode(message: MarketBaseEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pair !== "") { + writer.uint32(10).string(message.pair); + } + + if (message.minPriceChangePpm !== 0) { + writer.uint32(16).uint32(message.minPriceChangePpm); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MarketBaseEventV1 { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMarketBaseEventV1(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.pair = reader.string(); + break; + + case 2: + message.minPriceChangePpm = reader.uint32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MarketBaseEventV1 { + const message = createBaseMarketBaseEventV1(); + message.pair = object.pair ?? ""; + message.minPriceChangePpm = object.minPriceChangePpm ?? 0; + return message; + } + +}; + +function createBaseMarketCreateEventV1(): MarketCreateEventV1 { + return { + base: undefined, + exponent: 0 + }; +} + +export const MarketCreateEventV1 = { + encode(message: MarketCreateEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.base !== undefined) { + MarketBaseEventV1.encode(message.base, writer.uint32(10).fork()).ldelim(); + } + + if (message.exponent !== 0) { + writer.uint32(16).sint32(message.exponent); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MarketCreateEventV1 { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMarketCreateEventV1(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.base = MarketBaseEventV1.decode(reader, reader.uint32()); + break; + + case 2: + message.exponent = reader.sint32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MarketCreateEventV1 { + const message = createBaseMarketCreateEventV1(); + message.base = object.base !== undefined && object.base !== null ? MarketBaseEventV1.fromPartial(object.base) : undefined; + message.exponent = object.exponent ?? 0; + return message; + } + +}; + +function createBaseMarketModifyEventV1(): MarketModifyEventV1 { + return { + base: undefined + }; +} + +export const MarketModifyEventV1 = { + encode(message: MarketModifyEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.base !== undefined) { + MarketBaseEventV1.encode(message.base, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MarketModifyEventV1 { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMarketModifyEventV1(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.base = MarketBaseEventV1.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MarketModifyEventV1 { + const message = createBaseMarketModifyEventV1(); + message.base = object.base !== undefined && object.base !== null ? MarketBaseEventV1.fromPartial(object.base) : undefined; + return message; + } + +}; + +function createBaseSourceOfFunds(): SourceOfFunds { + return { + subaccountId: undefined, + address: undefined + }; +} + +export const SourceOfFunds = { + encode(message: SourceOfFunds, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subaccountId !== undefined) { + IndexerSubaccountId.encode(message.subaccountId, writer.uint32(10).fork()).ldelim(); + } + + if (message.address !== undefined) { + writer.uint32(18).string(message.address); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceOfFunds { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceOfFunds(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.subaccountId = IndexerSubaccountId.decode(reader, reader.uint32()); + break; + + case 2: + message.address = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SourceOfFunds { + const message = createBaseSourceOfFunds(); + message.subaccountId = object.subaccountId !== undefined && object.subaccountId !== null ? IndexerSubaccountId.fromPartial(object.subaccountId) : undefined; + message.address = object.address ?? undefined; + return message; + } + +}; + +function createBaseTransferEventV1(): TransferEventV1 { + return { + senderSubaccountId: undefined, + recipientSubaccountId: undefined, + assetId: 0, + amount: Long.UZERO, + sender: undefined, + recipient: undefined + }; +} + +export const TransferEventV1 = { + encode(message: TransferEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.senderSubaccountId !== undefined) { + IndexerSubaccountId.encode(message.senderSubaccountId, writer.uint32(10).fork()).ldelim(); + } + + if (message.recipientSubaccountId !== undefined) { + IndexerSubaccountId.encode(message.recipientSubaccountId, writer.uint32(18).fork()).ldelim(); + } + + if (message.assetId !== 0) { + writer.uint32(24).uint32(message.assetId); + } + + if (!message.amount.isZero()) { + writer.uint32(32).uint64(message.amount); + } + + if (message.sender !== undefined) { + SourceOfFunds.encode(message.sender, writer.uint32(42).fork()).ldelim(); + } + + if (message.recipient !== undefined) { + SourceOfFunds.encode(message.recipient, writer.uint32(50).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TransferEventV1 { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTransferEventV1(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.senderSubaccountId = IndexerSubaccountId.decode(reader, reader.uint32()); + break; + + case 2: + message.recipientSubaccountId = IndexerSubaccountId.decode(reader, reader.uint32()); + break; + + case 3: + message.assetId = reader.uint32(); + break; + + case 4: + message.amount = (reader.uint64() as Long); + break; + + case 5: + message.sender = SourceOfFunds.decode(reader, reader.uint32()); + break; + + case 6: + message.recipient = SourceOfFunds.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): TransferEventV1 { + const message = createBaseTransferEventV1(); + message.senderSubaccountId = object.senderSubaccountId !== undefined && object.senderSubaccountId !== null ? IndexerSubaccountId.fromPartial(object.senderSubaccountId) : undefined; + message.recipientSubaccountId = object.recipientSubaccountId !== undefined && object.recipientSubaccountId !== null ? IndexerSubaccountId.fromPartial(object.recipientSubaccountId) : undefined; + message.assetId = object.assetId ?? 0; + message.amount = object.amount !== undefined && object.amount !== null ? Long.fromValue(object.amount) : Long.UZERO; + message.sender = object.sender !== undefined && object.sender !== null ? SourceOfFunds.fromPartial(object.sender) : undefined; + message.recipient = object.recipient !== undefined && object.recipient !== null ? SourceOfFunds.fromPartial(object.recipient) : undefined; + return message; + } + +}; - denom_amount: Uint8Array; +function createBaseOrderFillEventV1(): OrderFillEventV1 { + return { + makerOrder: undefined, + order: undefined, + liquidationOrder: undefined, + fillAmount: Long.UZERO, + makerFee: Long.ZERO, + takerFee: Long.ZERO, + totalFilledMaker: Long.UZERO, + totalFilledTaker: Long.UZERO, + affiliateRevShare: Long.UZERO + }; } -function createBaseFundingUpdateV1(): FundingUpdateV1 { +export const OrderFillEventV1 = { + encode(message: OrderFillEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.makerOrder !== undefined) { + IndexerOrder.encode(message.makerOrder, writer.uint32(10).fork()).ldelim(); + } + + if (message.order !== undefined) { + IndexerOrder.encode(message.order, writer.uint32(18).fork()).ldelim(); + } + + if (message.liquidationOrder !== undefined) { + LiquidationOrderV1.encode(message.liquidationOrder, writer.uint32(34).fork()).ldelim(); + } + + if (!message.fillAmount.isZero()) { + writer.uint32(24).uint64(message.fillAmount); + } + + if (!message.makerFee.isZero()) { + writer.uint32(40).sint64(message.makerFee); + } + + if (!message.takerFee.isZero()) { + writer.uint32(48).sint64(message.takerFee); + } + + if (!message.totalFilledMaker.isZero()) { + writer.uint32(56).uint64(message.totalFilledMaker); + } + + if (!message.totalFilledTaker.isZero()) { + writer.uint32(64).uint64(message.totalFilledTaker); + } + + if (!message.affiliateRevShare.isZero()) { + writer.uint32(72).uint64(message.affiliateRevShare); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OrderFillEventV1 { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOrderFillEventV1(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.makerOrder = IndexerOrder.decode(reader, reader.uint32()); + break; + + case 2: + message.order = IndexerOrder.decode(reader, reader.uint32()); + break; + + case 4: + message.liquidationOrder = LiquidationOrderV1.decode(reader, reader.uint32()); + break; + + case 3: + message.fillAmount = (reader.uint64() as Long); + break; + + case 5: + message.makerFee = (reader.sint64() as Long); + break; + + case 6: + message.takerFee = (reader.sint64() as Long); + break; + + case 7: + message.totalFilledMaker = (reader.uint64() as Long); + break; + + case 8: + message.totalFilledTaker = (reader.uint64() as Long); + break; + + case 9: + message.affiliateRevShare = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): OrderFillEventV1 { + const message = createBaseOrderFillEventV1(); + message.makerOrder = object.makerOrder !== undefined && object.makerOrder !== null ? IndexerOrder.fromPartial(object.makerOrder) : undefined; + message.order = object.order !== undefined && object.order !== null ? IndexerOrder.fromPartial(object.order) : undefined; + message.liquidationOrder = object.liquidationOrder !== undefined && object.liquidationOrder !== null ? LiquidationOrderV1.fromPartial(object.liquidationOrder) : undefined; + message.fillAmount = object.fillAmount !== undefined && object.fillAmount !== null ? Long.fromValue(object.fillAmount) : Long.UZERO; + message.makerFee = object.makerFee !== undefined && object.makerFee !== null ? Long.fromValue(object.makerFee) : Long.ZERO; + message.takerFee = object.takerFee !== undefined && object.takerFee !== null ? Long.fromValue(object.takerFee) : Long.ZERO; + message.totalFilledMaker = object.totalFilledMaker !== undefined && object.totalFilledMaker !== null ? Long.fromValue(object.totalFilledMaker) : Long.UZERO; + message.totalFilledTaker = object.totalFilledTaker !== undefined && object.totalFilledTaker !== null ? Long.fromValue(object.totalFilledTaker) : Long.UZERO; + message.affiliateRevShare = object.affiliateRevShare !== undefined && object.affiliateRevShare !== null ? Long.fromValue(object.affiliateRevShare) : Long.UZERO; + return message; + } + +}; + +function createBaseDeleveragingEventV1(): DeleveragingEventV1 { return { + liquidated: undefined, + offsetting: undefined, perpetualId: 0, - fundingValuePpm: 0, - fundingIndex: new Uint8Array() + fillAmount: Long.UZERO, + totalQuoteQuantums: Long.UZERO, + isBuy: false, + isFinalSettlement: false }; } -export const FundingUpdateV1 = { - encode(message: FundingUpdateV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const DeleveragingEventV1 = { + encode(message: DeleveragingEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.liquidated !== undefined) { + IndexerSubaccountId.encode(message.liquidated, writer.uint32(10).fork()).ldelim(); + } + + if (message.offsetting !== undefined) { + IndexerSubaccountId.encode(message.offsetting, writer.uint32(18).fork()).ldelim(); + } + if (message.perpetualId !== 0) { - writer.uint32(8).uint32(message.perpetualId); + writer.uint32(24).uint32(message.perpetualId); + } + + if (!message.fillAmount.isZero()) { + writer.uint32(32).uint64(message.fillAmount); + } + + if (!message.totalQuoteQuantums.isZero()) { + writer.uint32(40).uint64(message.totalQuoteQuantums); + } + + if (message.isBuy === true) { + writer.uint32(48).bool(message.isBuy); + } + + if (message.isFinalSettlement === true) { + writer.uint32(56).bool(message.isFinalSettlement); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DeleveragingEventV1 { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDeleveragingEventV1(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.liquidated = IndexerSubaccountId.decode(reader, reader.uint32()); + break; + + case 2: + message.offsetting = IndexerSubaccountId.decode(reader, reader.uint32()); + break; + + case 3: + message.perpetualId = reader.uint32(); + break; + + case 4: + message.fillAmount = (reader.uint64() as Long); + break; + + case 5: + message.totalQuoteQuantums = (reader.uint64() as Long); + break; + + case 6: + message.isBuy = reader.bool(); + break; + + case 7: + message.isFinalSettlement = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): DeleveragingEventV1 { + const message = createBaseDeleveragingEventV1(); + message.liquidated = object.liquidated !== undefined && object.liquidated !== null ? IndexerSubaccountId.fromPartial(object.liquidated) : undefined; + message.offsetting = object.offsetting !== undefined && object.offsetting !== null ? IndexerSubaccountId.fromPartial(object.offsetting) : undefined; + message.perpetualId = object.perpetualId ?? 0; + message.fillAmount = object.fillAmount !== undefined && object.fillAmount !== null ? Long.fromValue(object.fillAmount) : Long.UZERO; + message.totalQuoteQuantums = object.totalQuoteQuantums !== undefined && object.totalQuoteQuantums !== null ? Long.fromValue(object.totalQuoteQuantums) : Long.UZERO; + message.isBuy = object.isBuy ?? false; + message.isFinalSettlement = object.isFinalSettlement ?? false; + return message; + } + +}; + +function createBaseLiquidationOrderV1(): LiquidationOrderV1 { + return { + liquidated: undefined, + clobPairId: 0, + perpetualId: 0, + totalSize: Long.UZERO, + isBuy: false, + subticks: Long.UZERO + }; +} + +export const LiquidationOrderV1 = { + encode(message: LiquidationOrderV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.liquidated !== undefined) { + IndexerSubaccountId.encode(message.liquidated, writer.uint32(10).fork()).ldelim(); + } + + if (message.clobPairId !== 0) { + writer.uint32(16).uint32(message.clobPairId); + } + + if (message.perpetualId !== 0) { + writer.uint32(24).uint32(message.perpetualId); } - if (message.fundingValuePpm !== 0) { - writer.uint32(16).int32(message.fundingValuePpm); + if (!message.totalSize.isZero()) { + writer.uint32(32).uint64(message.totalSize); } - if (message.fundingIndex.length !== 0) { - writer.uint32(26).bytes(message.fundingIndex); + if (message.isBuy === true) { + writer.uint32(40).bool(message.isBuy); + } + + if (!message.subticks.isZero()) { + writer.uint32(48).uint64(message.subticks); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): FundingUpdateV1 { + decode(input: _m0.Reader | Uint8Array, length?: number): LiquidationOrderV1 { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseFundingUpdateV1(); + const message = createBaseLiquidationOrderV1(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.perpetualId = reader.uint32(); + message.liquidated = IndexerSubaccountId.decode(reader, reader.uint32()); break; case 2: - message.fundingValuePpm = reader.int32(); + message.clobPairId = reader.uint32(); break; case 3: - message.fundingIndex = reader.bytes(); + message.perpetualId = reader.uint32(); + break; + + case 4: + message.totalSize = (reader.uint64() as Long); + break; + + case 5: + message.isBuy = reader.bool(); + break; + + case 6: + message.subticks = (reader.uint64() as Long); break; default: @@ -1242,51 +2748,63 @@ export const FundingUpdateV1 = { return message; }, - fromPartial(object: DeepPartial): FundingUpdateV1 { - const message = createBaseFundingUpdateV1(); + fromPartial(object: DeepPartial): LiquidationOrderV1 { + const message = createBaseLiquidationOrderV1(); + message.liquidated = object.liquidated !== undefined && object.liquidated !== null ? IndexerSubaccountId.fromPartial(object.liquidated) : undefined; + message.clobPairId = object.clobPairId ?? 0; message.perpetualId = object.perpetualId ?? 0; - message.fundingValuePpm = object.fundingValuePpm ?? 0; - message.fundingIndex = object.fundingIndex ?? new Uint8Array(); + message.totalSize = object.totalSize !== undefined && object.totalSize !== null ? Long.fromValue(object.totalSize) : Long.UZERO; + message.isBuy = object.isBuy ?? false; + message.subticks = object.subticks !== undefined && object.subticks !== null ? Long.fromValue(object.subticks) : Long.UZERO; return message; } }; -function createBaseFundingEventV1(): FundingEventV1 { +function createBaseSubaccountUpdateEventV1(): SubaccountUpdateEventV1 { return { - updates: [], - type: 0 + subaccountId: undefined, + updatedPerpetualPositions: [], + updatedAssetPositions: [] }; } -export const FundingEventV1 = { - encode(message: FundingEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - for (const v of message.updates) { - FundingUpdateV1.encode(v!, writer.uint32(10).fork()).ldelim(); +export const SubaccountUpdateEventV1 = { + encode(message: SubaccountUpdateEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subaccountId !== undefined) { + IndexerSubaccountId.encode(message.subaccountId, writer.uint32(10).fork()).ldelim(); } - if (message.type !== 0) { - writer.uint32(16).int32(message.type); + for (const v of message.updatedPerpetualPositions) { + IndexerPerpetualPosition.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + for (const v of message.updatedAssetPositions) { + IndexerAssetPosition.encode(v!, writer.uint32(34).fork()).ldelim(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): FundingEventV1 { + decode(input: _m0.Reader | Uint8Array, length?: number): SubaccountUpdateEventV1 { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseFundingEventV1(); + const message = createBaseSubaccountUpdateEventV1(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.updates.push(FundingUpdateV1.decode(reader, reader.uint32())); + message.subaccountId = IndexerSubaccountId.decode(reader, reader.uint32()); break; - case 2: - message.type = (reader.int32() as any); + case 3: + message.updatedPerpetualPositions.push(IndexerPerpetualPosition.decode(reader, reader.uint32())); + break; + + case 4: + message.updatedAssetPositions.push(IndexerAssetPosition.decode(reader, reader.uint32())); break; default: @@ -1298,68 +2816,87 @@ export const FundingEventV1 = { return message; }, - fromPartial(object: DeepPartial): FundingEventV1 { - const message = createBaseFundingEventV1(); - message.updates = object.updates?.map(e => FundingUpdateV1.fromPartial(e)) || []; - message.type = object.type ?? 0; + fromPartial(object: DeepPartial): SubaccountUpdateEventV1 { + const message = createBaseSubaccountUpdateEventV1(); + message.subaccountId = object.subaccountId !== undefined && object.subaccountId !== null ? IndexerSubaccountId.fromPartial(object.subaccountId) : undefined; + message.updatedPerpetualPositions = object.updatedPerpetualPositions?.map(e => IndexerPerpetualPosition.fromPartial(e)) || []; + message.updatedAssetPositions = object.updatedAssetPositions?.map(e => IndexerAssetPosition.fromPartial(e)) || []; return message; } }; -function createBaseMarketEventV1(): MarketEventV1 { +function createBaseStatefulOrderEventV1(): StatefulOrderEventV1 { return { - marketId: 0, - priceUpdate: undefined, - marketCreate: undefined, - marketModify: undefined + orderPlace: undefined, + orderRemoval: undefined, + conditionalOrderPlacement: undefined, + conditionalOrderTriggered: undefined, + longTermOrderPlacement: undefined, + orderReplacement: undefined }; } -export const MarketEventV1 = { - encode(message: MarketEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.marketId !== 0) { - writer.uint32(8).uint32(message.marketId); +export const StatefulOrderEventV1 = { + encode(message: StatefulOrderEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.orderPlace !== undefined) { + StatefulOrderEventV1_StatefulOrderPlacementV1.encode(message.orderPlace, writer.uint32(10).fork()).ldelim(); } - if (message.priceUpdate !== undefined) { - MarketPriceUpdateEventV1.encode(message.priceUpdate, writer.uint32(18).fork()).ldelim(); + if (message.orderRemoval !== undefined) { + StatefulOrderEventV1_StatefulOrderRemovalV1.encode(message.orderRemoval, writer.uint32(34).fork()).ldelim(); } - if (message.marketCreate !== undefined) { - MarketCreateEventV1.encode(message.marketCreate, writer.uint32(26).fork()).ldelim(); + if (message.conditionalOrderPlacement !== undefined) { + StatefulOrderEventV1_ConditionalOrderPlacementV1.encode(message.conditionalOrderPlacement, writer.uint32(42).fork()).ldelim(); } - if (message.marketModify !== undefined) { - MarketModifyEventV1.encode(message.marketModify, writer.uint32(34).fork()).ldelim(); + if (message.conditionalOrderTriggered !== undefined) { + StatefulOrderEventV1_ConditionalOrderTriggeredV1.encode(message.conditionalOrderTriggered, writer.uint32(50).fork()).ldelim(); + } + + if (message.longTermOrderPlacement !== undefined) { + StatefulOrderEventV1_LongTermOrderPlacementV1.encode(message.longTermOrderPlacement, writer.uint32(58).fork()).ldelim(); + } + + if (message.orderReplacement !== undefined) { + StatefulOrderEventV1_LongTermOrderReplacementV1.encode(message.orderReplacement, writer.uint32(66).fork()).ldelim(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): MarketEventV1 { + decode(input: _m0.Reader | Uint8Array, length?: number): StatefulOrderEventV1 { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseMarketEventV1(); + const message = createBaseStatefulOrderEventV1(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.marketId = reader.uint32(); + message.orderPlace = StatefulOrderEventV1_StatefulOrderPlacementV1.decode(reader, reader.uint32()); break; - case 2: - message.priceUpdate = MarketPriceUpdateEventV1.decode(reader, reader.uint32()); + case 4: + message.orderRemoval = StatefulOrderEventV1_StatefulOrderRemovalV1.decode(reader, reader.uint32()); break; - case 3: - message.marketCreate = MarketCreateEventV1.decode(reader, reader.uint32()); + case 5: + message.conditionalOrderPlacement = StatefulOrderEventV1_ConditionalOrderPlacementV1.decode(reader, reader.uint32()); break; - case 4: - message.marketModify = MarketModifyEventV1.decode(reader, reader.uint32()); + case 6: + message.conditionalOrderTriggered = StatefulOrderEventV1_ConditionalOrderTriggeredV1.decode(reader, reader.uint32()); + break; + + case 7: + message.longTermOrderPlacement = StatefulOrderEventV1_LongTermOrderPlacementV1.decode(reader, reader.uint32()); + break; + + case 8: + message.orderReplacement = StatefulOrderEventV1_LongTermOrderReplacementV1.decode(reader, reader.uint32()); break; default: @@ -1371,43 +2908,45 @@ export const MarketEventV1 = { return message; }, - fromPartial(object: DeepPartial): MarketEventV1 { - const message = createBaseMarketEventV1(); - message.marketId = object.marketId ?? 0; - message.priceUpdate = object.priceUpdate !== undefined && object.priceUpdate !== null ? MarketPriceUpdateEventV1.fromPartial(object.priceUpdate) : undefined; - message.marketCreate = object.marketCreate !== undefined && object.marketCreate !== null ? MarketCreateEventV1.fromPartial(object.marketCreate) : undefined; - message.marketModify = object.marketModify !== undefined && object.marketModify !== null ? MarketModifyEventV1.fromPartial(object.marketModify) : undefined; + fromPartial(object: DeepPartial): StatefulOrderEventV1 { + const message = createBaseStatefulOrderEventV1(); + message.orderPlace = object.orderPlace !== undefined && object.orderPlace !== null ? StatefulOrderEventV1_StatefulOrderPlacementV1.fromPartial(object.orderPlace) : undefined; + message.orderRemoval = object.orderRemoval !== undefined && object.orderRemoval !== null ? StatefulOrderEventV1_StatefulOrderRemovalV1.fromPartial(object.orderRemoval) : undefined; + message.conditionalOrderPlacement = object.conditionalOrderPlacement !== undefined && object.conditionalOrderPlacement !== null ? StatefulOrderEventV1_ConditionalOrderPlacementV1.fromPartial(object.conditionalOrderPlacement) : undefined; + message.conditionalOrderTriggered = object.conditionalOrderTriggered !== undefined && object.conditionalOrderTriggered !== null ? StatefulOrderEventV1_ConditionalOrderTriggeredV1.fromPartial(object.conditionalOrderTriggered) : undefined; + message.longTermOrderPlacement = object.longTermOrderPlacement !== undefined && object.longTermOrderPlacement !== null ? StatefulOrderEventV1_LongTermOrderPlacementV1.fromPartial(object.longTermOrderPlacement) : undefined; + message.orderReplacement = object.orderReplacement !== undefined && object.orderReplacement !== null ? StatefulOrderEventV1_LongTermOrderReplacementV1.fromPartial(object.orderReplacement) : undefined; return message; } }; -function createBaseMarketPriceUpdateEventV1(): MarketPriceUpdateEventV1 { +function createBaseStatefulOrderEventV1_StatefulOrderPlacementV1(): StatefulOrderEventV1_StatefulOrderPlacementV1 { return { - priceWithExponent: Long.UZERO + order: undefined }; } -export const MarketPriceUpdateEventV1 = { - encode(message: MarketPriceUpdateEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (!message.priceWithExponent.isZero()) { - writer.uint32(8).uint64(message.priceWithExponent); +export const StatefulOrderEventV1_StatefulOrderPlacementV1 = { + encode(message: StatefulOrderEventV1_StatefulOrderPlacementV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.order !== undefined) { + IndexerOrder.encode(message.order, writer.uint32(10).fork()).ldelim(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): MarketPriceUpdateEventV1 { + decode(input: _m0.Reader | Uint8Array, length?: number): StatefulOrderEventV1_StatefulOrderPlacementV1 { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseMarketPriceUpdateEventV1(); + const message = createBaseStatefulOrderEventV1_StatefulOrderPlacementV1(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.priceWithExponent = (reader.uint64() as Long); + message.order = IndexerOrder.decode(reader, reader.uint32()); break; default: @@ -1419,49 +2958,49 @@ export const MarketPriceUpdateEventV1 = { return message; }, - fromPartial(object: DeepPartial): MarketPriceUpdateEventV1 { - const message = createBaseMarketPriceUpdateEventV1(); - message.priceWithExponent = object.priceWithExponent !== undefined && object.priceWithExponent !== null ? Long.fromValue(object.priceWithExponent) : Long.UZERO; + fromPartial(object: DeepPartial): StatefulOrderEventV1_StatefulOrderPlacementV1 { + const message = createBaseStatefulOrderEventV1_StatefulOrderPlacementV1(); + message.order = object.order !== undefined && object.order !== null ? IndexerOrder.fromPartial(object.order) : undefined; return message; } }; -function createBaseMarketBaseEventV1(): MarketBaseEventV1 { +function createBaseStatefulOrderEventV1_StatefulOrderRemovalV1(): StatefulOrderEventV1_StatefulOrderRemovalV1 { return { - pair: "", - minPriceChangePpm: 0 + removedOrderId: undefined, + reason: 0 }; } -export const MarketBaseEventV1 = { - encode(message: MarketBaseEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.pair !== "") { - writer.uint32(10).string(message.pair); +export const StatefulOrderEventV1_StatefulOrderRemovalV1 = { + encode(message: StatefulOrderEventV1_StatefulOrderRemovalV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.removedOrderId !== undefined) { + IndexerOrderId.encode(message.removedOrderId, writer.uint32(10).fork()).ldelim(); } - if (message.minPriceChangePpm !== 0) { - writer.uint32(16).uint32(message.minPriceChangePpm); + if (message.reason !== 0) { + writer.uint32(16).int32(message.reason); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): MarketBaseEventV1 { + decode(input: _m0.Reader | Uint8Array, length?: number): StatefulOrderEventV1_StatefulOrderRemovalV1 { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseMarketBaseEventV1(); + const message = createBaseStatefulOrderEventV1_StatefulOrderRemovalV1(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.pair = reader.string(); + message.removedOrderId = IndexerOrderId.decode(reader, reader.uint32()); break; case 2: - message.minPriceChangePpm = reader.uint32(); + message.reason = (reader.int32() as any); break; default: @@ -1473,50 +3012,41 @@ export const MarketBaseEventV1 = { return message; }, - fromPartial(object: DeepPartial): MarketBaseEventV1 { - const message = createBaseMarketBaseEventV1(); - message.pair = object.pair ?? ""; - message.minPriceChangePpm = object.minPriceChangePpm ?? 0; + fromPartial(object: DeepPartial): StatefulOrderEventV1_StatefulOrderRemovalV1 { + const message = createBaseStatefulOrderEventV1_StatefulOrderRemovalV1(); + message.removedOrderId = object.removedOrderId !== undefined && object.removedOrderId !== null ? IndexerOrderId.fromPartial(object.removedOrderId) : undefined; + message.reason = object.reason ?? 0; return message; } }; -function createBaseMarketCreateEventV1(): MarketCreateEventV1 { +function createBaseStatefulOrderEventV1_ConditionalOrderPlacementV1(): StatefulOrderEventV1_ConditionalOrderPlacementV1 { return { - base: undefined, - exponent: 0 + order: undefined }; } -export const MarketCreateEventV1 = { - encode(message: MarketCreateEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.base !== undefined) { - MarketBaseEventV1.encode(message.base, writer.uint32(10).fork()).ldelim(); - } - - if (message.exponent !== 0) { - writer.uint32(16).sint32(message.exponent); +export const StatefulOrderEventV1_ConditionalOrderPlacementV1 = { + encode(message: StatefulOrderEventV1_ConditionalOrderPlacementV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.order !== undefined) { + IndexerOrder.encode(message.order, writer.uint32(10).fork()).ldelim(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): MarketCreateEventV1 { + decode(input: _m0.Reader | Uint8Array, length?: number): StatefulOrderEventV1_ConditionalOrderPlacementV1 { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseMarketCreateEventV1(); + const message = createBaseStatefulOrderEventV1_ConditionalOrderPlacementV1(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.base = MarketBaseEventV1.decode(reader, reader.uint32()); - break; - - case 2: - message.exponent = reader.sint32(); + message.order = IndexerOrder.decode(reader, reader.uint32()); break; default: @@ -1528,41 +3058,40 @@ export const MarketCreateEventV1 = { return message; }, - fromPartial(object: DeepPartial): MarketCreateEventV1 { - const message = createBaseMarketCreateEventV1(); - message.base = object.base !== undefined && object.base !== null ? MarketBaseEventV1.fromPartial(object.base) : undefined; - message.exponent = object.exponent ?? 0; + fromPartial(object: DeepPartial): StatefulOrderEventV1_ConditionalOrderPlacementV1 { + const message = createBaseStatefulOrderEventV1_ConditionalOrderPlacementV1(); + message.order = object.order !== undefined && object.order !== null ? IndexerOrder.fromPartial(object.order) : undefined; return message; } }; -function createBaseMarketModifyEventV1(): MarketModifyEventV1 { +function createBaseStatefulOrderEventV1_ConditionalOrderTriggeredV1(): StatefulOrderEventV1_ConditionalOrderTriggeredV1 { return { - base: undefined + triggeredOrderId: undefined }; } -export const MarketModifyEventV1 = { - encode(message: MarketModifyEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.base !== undefined) { - MarketBaseEventV1.encode(message.base, writer.uint32(10).fork()).ldelim(); +export const StatefulOrderEventV1_ConditionalOrderTriggeredV1 = { + encode(message: StatefulOrderEventV1_ConditionalOrderTriggeredV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.triggeredOrderId !== undefined) { + IndexerOrderId.encode(message.triggeredOrderId, writer.uint32(10).fork()).ldelim(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): MarketModifyEventV1 { + decode(input: _m0.Reader | Uint8Array, length?: number): StatefulOrderEventV1_ConditionalOrderTriggeredV1 { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseMarketModifyEventV1(); + const message = createBaseStatefulOrderEventV1_ConditionalOrderTriggeredV1(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.base = MarketBaseEventV1.decode(reader, reader.uint32()); + message.triggeredOrderId = IndexerOrderId.decode(reader, reader.uint32()); break; default: @@ -1574,49 +3103,40 @@ export const MarketModifyEventV1 = { return message; }, - fromPartial(object: DeepPartial): MarketModifyEventV1 { - const message = createBaseMarketModifyEventV1(); - message.base = object.base !== undefined && object.base !== null ? MarketBaseEventV1.fromPartial(object.base) : undefined; + fromPartial(object: DeepPartial): StatefulOrderEventV1_ConditionalOrderTriggeredV1 { + const message = createBaseStatefulOrderEventV1_ConditionalOrderTriggeredV1(); + message.triggeredOrderId = object.triggeredOrderId !== undefined && object.triggeredOrderId !== null ? IndexerOrderId.fromPartial(object.triggeredOrderId) : undefined; return message; } }; -function createBaseSourceOfFunds(): SourceOfFunds { +function createBaseStatefulOrderEventV1_LongTermOrderPlacementV1(): StatefulOrderEventV1_LongTermOrderPlacementV1 { return { - subaccountId: undefined, - address: undefined + order: undefined }; } -export const SourceOfFunds = { - encode(message: SourceOfFunds, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.subaccountId !== undefined) { - IndexerSubaccountId.encode(message.subaccountId, writer.uint32(10).fork()).ldelim(); - } - - if (message.address !== undefined) { - writer.uint32(18).string(message.address); +export const StatefulOrderEventV1_LongTermOrderPlacementV1 = { + encode(message: StatefulOrderEventV1_LongTermOrderPlacementV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.order !== undefined) { + IndexerOrder.encode(message.order, writer.uint32(10).fork()).ldelim(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SourceOfFunds { + decode(input: _m0.Reader | Uint8Array, length?: number): StatefulOrderEventV1_LongTermOrderPlacementV1 { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseSourceOfFunds(); + const message = createBaseStatefulOrderEventV1_LongTermOrderPlacementV1(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.subaccountId = IndexerSubaccountId.decode(reader, reader.uint32()); - break; - - case 2: - message.address = reader.string(); + message.order = IndexerOrder.decode(reader, reader.uint32()); break; default: @@ -1628,86 +3148,49 @@ export const SourceOfFunds = { return message; }, - fromPartial(object: DeepPartial): SourceOfFunds { - const message = createBaseSourceOfFunds(); - message.subaccountId = object.subaccountId !== undefined && object.subaccountId !== null ? IndexerSubaccountId.fromPartial(object.subaccountId) : undefined; - message.address = object.address ?? undefined; + fromPartial(object: DeepPartial): StatefulOrderEventV1_LongTermOrderPlacementV1 { + const message = createBaseStatefulOrderEventV1_LongTermOrderPlacementV1(); + message.order = object.order !== undefined && object.order !== null ? IndexerOrder.fromPartial(object.order) : undefined; return message; } }; -function createBaseTransferEventV1(): TransferEventV1 { +function createBaseStatefulOrderEventV1_LongTermOrderReplacementV1(): StatefulOrderEventV1_LongTermOrderReplacementV1 { return { - senderSubaccountId: undefined, - recipientSubaccountId: undefined, - assetId: 0, - amount: Long.UZERO, - sender: undefined, - recipient: undefined + oldOrderId: undefined, + order: undefined }; } -export const TransferEventV1 = { - encode(message: TransferEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.senderSubaccountId !== undefined) { - IndexerSubaccountId.encode(message.senderSubaccountId, writer.uint32(10).fork()).ldelim(); - } - - if (message.recipientSubaccountId !== undefined) { - IndexerSubaccountId.encode(message.recipientSubaccountId, writer.uint32(18).fork()).ldelim(); - } - - if (message.assetId !== 0) { - writer.uint32(24).uint32(message.assetId); - } - - if (!message.amount.isZero()) { - writer.uint32(32).uint64(message.amount); - } - - if (message.sender !== undefined) { - SourceOfFunds.encode(message.sender, writer.uint32(42).fork()).ldelim(); +export const StatefulOrderEventV1_LongTermOrderReplacementV1 = { + encode(message: StatefulOrderEventV1_LongTermOrderReplacementV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.oldOrderId !== undefined) { + IndexerOrderId.encode(message.oldOrderId, writer.uint32(10).fork()).ldelim(); } - if (message.recipient !== undefined) { - SourceOfFunds.encode(message.recipient, writer.uint32(50).fork()).ldelim(); + if (message.order !== undefined) { + IndexerOrder.encode(message.order, writer.uint32(18).fork()).ldelim(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): TransferEventV1 { + decode(input: _m0.Reader | Uint8Array, length?: number): StatefulOrderEventV1_LongTermOrderReplacementV1 { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseTransferEventV1(); + const message = createBaseStatefulOrderEventV1_LongTermOrderReplacementV1(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.senderSubaccountId = IndexerSubaccountId.decode(reader, reader.uint32()); + message.oldOrderId = IndexerOrderId.decode(reader, reader.uint32()); break; case 2: - message.recipientSubaccountId = IndexerSubaccountId.decode(reader, reader.uint32()); - break; - - case 3: - message.assetId = reader.uint32(); - break; - - case 4: - message.amount = (reader.uint64() as Long); - break; - - case 5: - message.sender = SourceOfFunds.decode(reader, reader.uint32()); - break; - - case 6: - message.recipient = SourceOfFunds.decode(reader, reader.uint32()); + message.order = IndexerOrder.decode(reader, reader.uint32()); break; default: @@ -1719,108 +3202,77 @@ export const TransferEventV1 = { return message; }, - fromPartial(object: DeepPartial): TransferEventV1 { - const message = createBaseTransferEventV1(); - message.senderSubaccountId = object.senderSubaccountId !== undefined && object.senderSubaccountId !== null ? IndexerSubaccountId.fromPartial(object.senderSubaccountId) : undefined; - message.recipientSubaccountId = object.recipientSubaccountId !== undefined && object.recipientSubaccountId !== null ? IndexerSubaccountId.fromPartial(object.recipientSubaccountId) : undefined; - message.assetId = object.assetId ?? 0; - message.amount = object.amount !== undefined && object.amount !== null ? Long.fromValue(object.amount) : Long.UZERO; - message.sender = object.sender !== undefined && object.sender !== null ? SourceOfFunds.fromPartial(object.sender) : undefined; - message.recipient = object.recipient !== undefined && object.recipient !== null ? SourceOfFunds.fromPartial(object.recipient) : undefined; + fromPartial(object: DeepPartial): StatefulOrderEventV1_LongTermOrderReplacementV1 { + const message = createBaseStatefulOrderEventV1_LongTermOrderReplacementV1(); + message.oldOrderId = object.oldOrderId !== undefined && object.oldOrderId !== null ? IndexerOrderId.fromPartial(object.oldOrderId) : undefined; + message.order = object.order !== undefined && object.order !== null ? IndexerOrder.fromPartial(object.order) : undefined; return message; } }; -function createBaseOrderFillEventV1(): OrderFillEventV1 { +function createBaseAssetCreateEventV1(): AssetCreateEventV1 { return { - makerOrder: undefined, - order: undefined, - liquidationOrder: undefined, - fillAmount: Long.UZERO, - makerFee: Long.ZERO, - takerFee: Long.ZERO, - totalFilledMaker: Long.UZERO, - totalFilledTaker: Long.UZERO + id: 0, + symbol: "", + hasMarket: false, + marketId: 0, + atomicResolution: 0 }; } -export const OrderFillEventV1 = { - encode(message: OrderFillEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.makerOrder !== undefined) { - IndexerOrder.encode(message.makerOrder, writer.uint32(10).fork()).ldelim(); - } - - if (message.order !== undefined) { - IndexerOrder.encode(message.order, writer.uint32(18).fork()).ldelim(); - } - - if (message.liquidationOrder !== undefined) { - LiquidationOrderV1.encode(message.liquidationOrder, writer.uint32(34).fork()).ldelim(); - } - - if (!message.fillAmount.isZero()) { - writer.uint32(24).uint64(message.fillAmount); +export const AssetCreateEventV1 = { + encode(message: AssetCreateEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.id !== 0) { + writer.uint32(8).uint32(message.id); } - if (!message.makerFee.isZero()) { - writer.uint32(40).sint64(message.makerFee); + if (message.symbol !== "") { + writer.uint32(18).string(message.symbol); } - if (!message.takerFee.isZero()) { - writer.uint32(48).sint64(message.takerFee); + if (message.hasMarket === true) { + writer.uint32(24).bool(message.hasMarket); } - if (!message.totalFilledMaker.isZero()) { - writer.uint32(56).uint64(message.totalFilledMaker); + if (message.marketId !== 0) { + writer.uint32(32).uint32(message.marketId); } - if (!message.totalFilledTaker.isZero()) { - writer.uint32(64).uint64(message.totalFilledTaker); + if (message.atomicResolution !== 0) { + writer.uint32(40).sint32(message.atomicResolution); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): OrderFillEventV1 { + decode(input: _m0.Reader | Uint8Array, length?: number): AssetCreateEventV1 { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseOrderFillEventV1(); + const message = createBaseAssetCreateEventV1(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.makerOrder = IndexerOrder.decode(reader, reader.uint32()); + message.id = reader.uint32(); break; case 2: - message.order = IndexerOrder.decode(reader, reader.uint32()); - break; - - case 4: - message.liquidationOrder = LiquidationOrderV1.decode(reader, reader.uint32()); + message.symbol = reader.string(); break; case 3: - message.fillAmount = (reader.uint64() as Long); - break; - - case 5: - message.makerFee = (reader.sint64() as Long); - break; - - case 6: - message.takerFee = (reader.sint64() as Long); + message.hasMarket = reader.bool(); break; - case 7: - message.totalFilledMaker = (reader.uint64() as Long); + case 4: + message.marketId = reader.uint32(); break; - case 8: - message.totalFilledTaker = (reader.uint64() as Long); + case 5: + message.atomicResolution = reader.sint32(); break; default: @@ -1832,101 +3284,125 @@ export const OrderFillEventV1 = { return message; }, - fromPartial(object: DeepPartial): OrderFillEventV1 { - const message = createBaseOrderFillEventV1(); - message.makerOrder = object.makerOrder !== undefined && object.makerOrder !== null ? IndexerOrder.fromPartial(object.makerOrder) : undefined; - message.order = object.order !== undefined && object.order !== null ? IndexerOrder.fromPartial(object.order) : undefined; - message.liquidationOrder = object.liquidationOrder !== undefined && object.liquidationOrder !== null ? LiquidationOrderV1.fromPartial(object.liquidationOrder) : undefined; - message.fillAmount = object.fillAmount !== undefined && object.fillAmount !== null ? Long.fromValue(object.fillAmount) : Long.UZERO; - message.makerFee = object.makerFee !== undefined && object.makerFee !== null ? Long.fromValue(object.makerFee) : Long.ZERO; - message.takerFee = object.takerFee !== undefined && object.takerFee !== null ? Long.fromValue(object.takerFee) : Long.ZERO; - message.totalFilledMaker = object.totalFilledMaker !== undefined && object.totalFilledMaker !== null ? Long.fromValue(object.totalFilledMaker) : Long.UZERO; - message.totalFilledTaker = object.totalFilledTaker !== undefined && object.totalFilledTaker !== null ? Long.fromValue(object.totalFilledTaker) : Long.UZERO; + fromPartial(object: DeepPartial): AssetCreateEventV1 { + const message = createBaseAssetCreateEventV1(); + message.id = object.id ?? 0; + message.symbol = object.symbol ?? ""; + message.hasMarket = object.hasMarket ?? false; + message.marketId = object.marketId ?? 0; + message.atomicResolution = object.atomicResolution ?? 0; return message; } }; -function createBaseDeleveragingEventV1(): DeleveragingEventV1 { +function createBasePerpetualMarketCreateEventV1(): PerpetualMarketCreateEventV1 { return { - liquidated: undefined, - offsetting: undefined, - perpetualId: 0, - fillAmount: Long.UZERO, - totalQuoteQuantums: Long.UZERO, - isBuy: false, - isFinalSettlement: false + id: 0, + clobPairId: 0, + ticker: "", + marketId: 0, + status: 0, + quantumConversionExponent: 0, + atomicResolution: 0, + subticksPerTick: 0, + stepBaseQuantums: Long.UZERO, + liquidityTier: 0 }; } -export const DeleveragingEventV1 = { - encode(message: DeleveragingEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.liquidated !== undefined) { - IndexerSubaccountId.encode(message.liquidated, writer.uint32(10).fork()).ldelim(); +export const PerpetualMarketCreateEventV1 = { + encode(message: PerpetualMarketCreateEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.id !== 0) { + writer.uint32(8).uint32(message.id); + } + + if (message.clobPairId !== 0) { + writer.uint32(16).uint32(message.clobPairId); + } + + if (message.ticker !== "") { + writer.uint32(26).string(message.ticker); + } + + if (message.marketId !== 0) { + writer.uint32(32).uint32(message.marketId); } - if (message.offsetting !== undefined) { - IndexerSubaccountId.encode(message.offsetting, writer.uint32(18).fork()).ldelim(); + if (message.status !== 0) { + writer.uint32(40).int32(message.status); } - if (message.perpetualId !== 0) { - writer.uint32(24).uint32(message.perpetualId); + if (message.quantumConversionExponent !== 0) { + writer.uint32(48).sint32(message.quantumConversionExponent); } - if (!message.fillAmount.isZero()) { - writer.uint32(32).uint64(message.fillAmount); + if (message.atomicResolution !== 0) { + writer.uint32(56).sint32(message.atomicResolution); } - if (!message.totalQuoteQuantums.isZero()) { - writer.uint32(40).uint64(message.totalQuoteQuantums); + if (message.subticksPerTick !== 0) { + writer.uint32(64).uint32(message.subticksPerTick); } - if (message.isBuy === true) { - writer.uint32(48).bool(message.isBuy); + if (!message.stepBaseQuantums.isZero()) { + writer.uint32(72).uint64(message.stepBaseQuantums); } - if (message.isFinalSettlement === true) { - writer.uint32(56).bool(message.isFinalSettlement); + if (message.liquidityTier !== 0) { + writer.uint32(80).uint32(message.liquidityTier); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): DeleveragingEventV1 { + decode(input: _m0.Reader | Uint8Array, length?: number): PerpetualMarketCreateEventV1 { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseDeleveragingEventV1(); + const message = createBasePerpetualMarketCreateEventV1(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.liquidated = IndexerSubaccountId.decode(reader, reader.uint32()); + message.id = reader.uint32(); break; case 2: - message.offsetting = IndexerSubaccountId.decode(reader, reader.uint32()); + message.clobPairId = reader.uint32(); break; case 3: - message.perpetualId = reader.uint32(); + message.ticker = reader.string(); break; case 4: - message.fillAmount = (reader.uint64() as Long); + message.marketId = reader.uint32(); break; case 5: - message.totalQuoteQuantums = (reader.uint64() as Long); + message.status = (reader.int32() as any); break; case 6: - message.isBuy = reader.bool(); + message.quantumConversionExponent = reader.sint32(); break; case 7: - message.isFinalSettlement = reader.bool(); + message.atomicResolution = reader.sint32(); + break; + + case 8: + message.subticksPerTick = reader.uint32(); + break; + + case 9: + message.stepBaseQuantums = (reader.uint64() as Long); + break; + + case 10: + message.liquidityTier = reader.uint32(); break; default: @@ -1938,71 +3414,99 @@ export const DeleveragingEventV1 = { return message; }, - fromPartial(object: DeepPartial): DeleveragingEventV1 { - const message = createBaseDeleveragingEventV1(); - message.liquidated = object.liquidated !== undefined && object.liquidated !== null ? IndexerSubaccountId.fromPartial(object.liquidated) : undefined; - message.offsetting = object.offsetting !== undefined && object.offsetting !== null ? IndexerSubaccountId.fromPartial(object.offsetting) : undefined; - message.perpetualId = object.perpetualId ?? 0; - message.fillAmount = object.fillAmount !== undefined && object.fillAmount !== null ? Long.fromValue(object.fillAmount) : Long.UZERO; - message.totalQuoteQuantums = object.totalQuoteQuantums !== undefined && object.totalQuoteQuantums !== null ? Long.fromValue(object.totalQuoteQuantums) : Long.UZERO; - message.isBuy = object.isBuy ?? false; - message.isFinalSettlement = object.isFinalSettlement ?? false; + fromPartial(object: DeepPartial): PerpetualMarketCreateEventV1 { + const message = createBasePerpetualMarketCreateEventV1(); + message.id = object.id ?? 0; + message.clobPairId = object.clobPairId ?? 0; + message.ticker = object.ticker ?? ""; + message.marketId = object.marketId ?? 0; + message.status = object.status ?? 0; + message.quantumConversionExponent = object.quantumConversionExponent ?? 0; + message.atomicResolution = object.atomicResolution ?? 0; + message.subticksPerTick = object.subticksPerTick ?? 0; + message.stepBaseQuantums = object.stepBaseQuantums !== undefined && object.stepBaseQuantums !== null ? Long.fromValue(object.stepBaseQuantums) : Long.UZERO; + message.liquidityTier = object.liquidityTier ?? 0; return message; } }; -function createBaseLiquidationOrderV1(): LiquidationOrderV1 { +function createBasePerpetualMarketCreateEventV2(): PerpetualMarketCreateEventV2 { return { - liquidated: undefined, + id: 0, clobPairId: 0, - perpetualId: 0, - totalSize: Long.UZERO, - isBuy: false, - subticks: Long.UZERO + ticker: "", + marketId: 0, + status: 0, + quantumConversionExponent: 0, + atomicResolution: 0, + subticksPerTick: 0, + stepBaseQuantums: Long.UZERO, + liquidityTier: 0, + marketType: 0 }; } -export const LiquidationOrderV1 = { - encode(message: LiquidationOrderV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.liquidated !== undefined) { - IndexerSubaccountId.encode(message.liquidated, writer.uint32(10).fork()).ldelim(); +export const PerpetualMarketCreateEventV2 = { + encode(message: PerpetualMarketCreateEventV2, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.id !== 0) { + writer.uint32(8).uint32(message.id); } if (message.clobPairId !== 0) { writer.uint32(16).uint32(message.clobPairId); } - if (message.perpetualId !== 0) { - writer.uint32(24).uint32(message.perpetualId); + if (message.ticker !== "") { + writer.uint32(26).string(message.ticker); } - if (!message.totalSize.isZero()) { - writer.uint32(32).uint64(message.totalSize); + if (message.marketId !== 0) { + writer.uint32(32).uint32(message.marketId); } - if (message.isBuy === true) { - writer.uint32(40).bool(message.isBuy); + if (message.status !== 0) { + writer.uint32(40).int32(message.status); } - if (!message.subticks.isZero()) { - writer.uint32(48).uint64(message.subticks); + if (message.quantumConversionExponent !== 0) { + writer.uint32(48).sint32(message.quantumConversionExponent); + } + + if (message.atomicResolution !== 0) { + writer.uint32(56).sint32(message.atomicResolution); + } + + if (message.subticksPerTick !== 0) { + writer.uint32(64).uint32(message.subticksPerTick); + } + + if (!message.stepBaseQuantums.isZero()) { + writer.uint32(72).uint64(message.stepBaseQuantums); + } + + if (message.liquidityTier !== 0) { + writer.uint32(80).uint32(message.liquidityTier); + } + + if (message.marketType !== 0) { + writer.uint32(88).int32(message.marketType); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): LiquidationOrderV1 { + decode(input: _m0.Reader | Uint8Array, length?: number): PerpetualMarketCreateEventV2 { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseLiquidationOrderV1(); + const message = createBasePerpetualMarketCreateEventV2(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.liquidated = IndexerSubaccountId.decode(reader, reader.uint32()); + message.id = reader.uint32(); break; case 2: @@ -2010,19 +3514,39 @@ export const LiquidationOrderV1 = { break; case 3: - message.perpetualId = reader.uint32(); + message.ticker = reader.string(); break; case 4: - message.totalSize = (reader.uint64() as Long); + message.marketId = reader.uint32(); break; case 5: - message.isBuy = reader.bool(); + message.status = (reader.int32() as any); break; case 6: - message.subticks = (reader.uint64() as Long); + message.quantumConversionExponent = reader.sint32(); + break; + + case 7: + message.atomicResolution = reader.sint32(); + break; + + case 8: + message.subticksPerTick = reader.uint32(); + break; + + case 9: + message.stepBaseQuantums = (reader.uint64() as Long); + break; + + case 10: + message.liquidityTier = reader.uint32(); + break; + + case 11: + message.marketType = (reader.int32() as any); break; default: @@ -2034,195 +3558,149 @@ export const LiquidationOrderV1 = { return message; }, - fromPartial(object: DeepPartial): LiquidationOrderV1 { - const message = createBaseLiquidationOrderV1(); - message.liquidated = object.liquidated !== undefined && object.liquidated !== null ? IndexerSubaccountId.fromPartial(object.liquidated) : undefined; + fromPartial(object: DeepPartial): PerpetualMarketCreateEventV2 { + const message = createBasePerpetualMarketCreateEventV2(); + message.id = object.id ?? 0; message.clobPairId = object.clobPairId ?? 0; - message.perpetualId = object.perpetualId ?? 0; - message.totalSize = object.totalSize !== undefined && object.totalSize !== null ? Long.fromValue(object.totalSize) : Long.UZERO; - message.isBuy = object.isBuy ?? false; - message.subticks = object.subticks !== undefined && object.subticks !== null ? Long.fromValue(object.subticks) : Long.UZERO; + message.ticker = object.ticker ?? ""; + message.marketId = object.marketId ?? 0; + message.status = object.status ?? 0; + message.quantumConversionExponent = object.quantumConversionExponent ?? 0; + message.atomicResolution = object.atomicResolution ?? 0; + message.subticksPerTick = object.subticksPerTick ?? 0; + message.stepBaseQuantums = object.stepBaseQuantums !== undefined && object.stepBaseQuantums !== null ? Long.fromValue(object.stepBaseQuantums) : Long.UZERO; + message.liquidityTier = object.liquidityTier ?? 0; + message.marketType = object.marketType ?? 0; return message; } }; -function createBaseSubaccountUpdateEventV1(): SubaccountUpdateEventV1 { +function createBasePerpetualMarketCreateEventV3(): PerpetualMarketCreateEventV3 { return { - subaccountId: undefined, - updatedPerpetualPositions: [], - updatedAssetPositions: [] + id: 0, + clobPairId: 0, + ticker: "", + marketId: 0, + status: 0, + quantumConversionExponent: 0, + atomicResolution: 0, + subticksPerTick: 0, + stepBaseQuantums: Long.UZERO, + liquidityTier: 0, + marketType: 0, + defaultFunding8hrPpm: 0 }; } -export const SubaccountUpdateEventV1 = { - encode(message: SubaccountUpdateEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.subaccountId !== undefined) { - IndexerSubaccountId.encode(message.subaccountId, writer.uint32(10).fork()).ldelim(); +export const PerpetualMarketCreateEventV3 = { + encode(message: PerpetualMarketCreateEventV3, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.id !== 0) { + writer.uint32(8).uint32(message.id); } - for (const v of message.updatedPerpetualPositions) { - IndexerPerpetualPosition.encode(v!, writer.uint32(26).fork()).ldelim(); + if (message.clobPairId !== 0) { + writer.uint32(16).uint32(message.clobPairId); } - for (const v of message.updatedAssetPositions) { - IndexerAssetPosition.encode(v!, writer.uint32(34).fork()).ldelim(); + if (message.ticker !== "") { + writer.uint32(26).string(message.ticker); } - return writer; - }, - - decode(input: _m0.Reader | Uint8Array, length?: number): SubaccountUpdateEventV1 { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseSubaccountUpdateEventV1(); - - while (reader.pos < end) { - const tag = reader.uint32(); - - switch (tag >>> 3) { - case 1: - message.subaccountId = IndexerSubaccountId.decode(reader, reader.uint32()); - break; - - case 3: - message.updatedPerpetualPositions.push(IndexerPerpetualPosition.decode(reader, reader.uint32())); - break; - - case 4: - message.updatedAssetPositions.push(IndexerAssetPosition.decode(reader, reader.uint32())); - break; - - default: - reader.skipType(tag & 7); - break; - } + if (message.marketId !== 0) { + writer.uint32(32).uint32(message.marketId); } - return message; - }, - - fromPartial(object: DeepPartial): SubaccountUpdateEventV1 { - const message = createBaseSubaccountUpdateEventV1(); - message.subaccountId = object.subaccountId !== undefined && object.subaccountId !== null ? IndexerSubaccountId.fromPartial(object.subaccountId) : undefined; - message.updatedPerpetualPositions = object.updatedPerpetualPositions?.map(e => IndexerPerpetualPosition.fromPartial(e)) || []; - message.updatedAssetPositions = object.updatedAssetPositions?.map(e => IndexerAssetPosition.fromPartial(e)) || []; - return message; - } - -}; - -function createBaseStatefulOrderEventV1(): StatefulOrderEventV1 { - return { - orderPlace: undefined, - orderRemoval: undefined, - conditionalOrderPlacement: undefined, - conditionalOrderTriggered: undefined, - longTermOrderPlacement: undefined - }; -} + if (message.status !== 0) { + writer.uint32(40).int32(message.status); + } -export const StatefulOrderEventV1 = { - encode(message: StatefulOrderEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.orderPlace !== undefined) { - StatefulOrderEventV1_StatefulOrderPlacementV1.encode(message.orderPlace, writer.uint32(10).fork()).ldelim(); + if (message.quantumConversionExponent !== 0) { + writer.uint32(48).sint32(message.quantumConversionExponent); } - if (message.orderRemoval !== undefined) { - StatefulOrderEventV1_StatefulOrderRemovalV1.encode(message.orderRemoval, writer.uint32(34).fork()).ldelim(); + if (message.atomicResolution !== 0) { + writer.uint32(56).sint32(message.atomicResolution); } - if (message.conditionalOrderPlacement !== undefined) { - StatefulOrderEventV1_ConditionalOrderPlacementV1.encode(message.conditionalOrderPlacement, writer.uint32(42).fork()).ldelim(); + if (message.subticksPerTick !== 0) { + writer.uint32(64).uint32(message.subticksPerTick); } - if (message.conditionalOrderTriggered !== undefined) { - StatefulOrderEventV1_ConditionalOrderTriggeredV1.encode(message.conditionalOrderTriggered, writer.uint32(50).fork()).ldelim(); + if (!message.stepBaseQuantums.isZero()) { + writer.uint32(72).uint64(message.stepBaseQuantums); } - if (message.longTermOrderPlacement !== undefined) { - StatefulOrderEventV1_LongTermOrderPlacementV1.encode(message.longTermOrderPlacement, writer.uint32(58).fork()).ldelim(); + if (message.liquidityTier !== 0) { + writer.uint32(80).uint32(message.liquidityTier); + } + + if (message.marketType !== 0) { + writer.uint32(88).int32(message.marketType); + } + + if (message.defaultFunding8hrPpm !== 0) { + writer.uint32(96).int32(message.defaultFunding8hrPpm); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): StatefulOrderEventV1 { + decode(input: _m0.Reader | Uint8Array, length?: number): PerpetualMarketCreateEventV3 { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseStatefulOrderEventV1(); + const message = createBasePerpetualMarketCreateEventV3(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.orderPlace = StatefulOrderEventV1_StatefulOrderPlacementV1.decode(reader, reader.uint32()); + message.id = reader.uint32(); + break; + + case 2: + message.clobPairId = reader.uint32(); + break; + + case 3: + message.ticker = reader.string(); break; case 4: - message.orderRemoval = StatefulOrderEventV1_StatefulOrderRemovalV1.decode(reader, reader.uint32()); + message.marketId = reader.uint32(); break; case 5: - message.conditionalOrderPlacement = StatefulOrderEventV1_ConditionalOrderPlacementV1.decode(reader, reader.uint32()); + message.status = (reader.int32() as any); break; case 6: - message.conditionalOrderTriggered = StatefulOrderEventV1_ConditionalOrderTriggeredV1.decode(reader, reader.uint32()); + message.quantumConversionExponent = reader.sint32(); break; case 7: - message.longTermOrderPlacement = StatefulOrderEventV1_LongTermOrderPlacementV1.decode(reader, reader.uint32()); + message.atomicResolution = reader.sint32(); break; - default: - reader.skipType(tag & 7); + case 8: + message.subticksPerTick = reader.uint32(); break; - } - } - - return message; - }, - - fromPartial(object: DeepPartial): StatefulOrderEventV1 { - const message = createBaseStatefulOrderEventV1(); - message.orderPlace = object.orderPlace !== undefined && object.orderPlace !== null ? StatefulOrderEventV1_StatefulOrderPlacementV1.fromPartial(object.orderPlace) : undefined; - message.orderRemoval = object.orderRemoval !== undefined && object.orderRemoval !== null ? StatefulOrderEventV1_StatefulOrderRemovalV1.fromPartial(object.orderRemoval) : undefined; - message.conditionalOrderPlacement = object.conditionalOrderPlacement !== undefined && object.conditionalOrderPlacement !== null ? StatefulOrderEventV1_ConditionalOrderPlacementV1.fromPartial(object.conditionalOrderPlacement) : undefined; - message.conditionalOrderTriggered = object.conditionalOrderTriggered !== undefined && object.conditionalOrderTriggered !== null ? StatefulOrderEventV1_ConditionalOrderTriggeredV1.fromPartial(object.conditionalOrderTriggered) : undefined; - message.longTermOrderPlacement = object.longTermOrderPlacement !== undefined && object.longTermOrderPlacement !== null ? StatefulOrderEventV1_LongTermOrderPlacementV1.fromPartial(object.longTermOrderPlacement) : undefined; - return message; - } - -}; - -function createBaseStatefulOrderEventV1_StatefulOrderPlacementV1(): StatefulOrderEventV1_StatefulOrderPlacementV1 { - return { - order: undefined - }; -} - -export const StatefulOrderEventV1_StatefulOrderPlacementV1 = { - encode(message: StatefulOrderEventV1_StatefulOrderPlacementV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.order !== undefined) { - IndexerOrder.encode(message.order, writer.uint32(10).fork()).ldelim(); - } - return writer; - }, + case 9: + message.stepBaseQuantums = (reader.uint64() as Long); + break; - decode(input: _m0.Reader | Uint8Array, length?: number): StatefulOrderEventV1_StatefulOrderPlacementV1 { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseStatefulOrderEventV1_StatefulOrderPlacementV1(); + case 10: + message.liquidityTier = reader.uint32(); + break; - while (reader.pos < end) { - const tag = reader.uint32(); + case 11: + message.marketType = (reader.int32() as any); + break; - switch (tag >>> 3) { - case 1: - message.order = IndexerOrder.decode(reader, reader.uint32()); + case 12: + message.defaultFunding8hrPpm = reader.int32(); break; default: @@ -2234,49 +3712,87 @@ export const StatefulOrderEventV1_StatefulOrderPlacementV1 = { return message; }, - fromPartial(object: DeepPartial): StatefulOrderEventV1_StatefulOrderPlacementV1 { - const message = createBaseStatefulOrderEventV1_StatefulOrderPlacementV1(); - message.order = object.order !== undefined && object.order !== null ? IndexerOrder.fromPartial(object.order) : undefined; + fromPartial(object: DeepPartial): PerpetualMarketCreateEventV3 { + const message = createBasePerpetualMarketCreateEventV3(); + message.id = object.id ?? 0; + message.clobPairId = object.clobPairId ?? 0; + message.ticker = object.ticker ?? ""; + message.marketId = object.marketId ?? 0; + message.status = object.status ?? 0; + message.quantumConversionExponent = object.quantumConversionExponent ?? 0; + message.atomicResolution = object.atomicResolution ?? 0; + message.subticksPerTick = object.subticksPerTick ?? 0; + message.stepBaseQuantums = object.stepBaseQuantums !== undefined && object.stepBaseQuantums !== null ? Long.fromValue(object.stepBaseQuantums) : Long.UZERO; + message.liquidityTier = object.liquidityTier ?? 0; + message.marketType = object.marketType ?? 0; + message.defaultFunding8hrPpm = object.defaultFunding8hrPpm ?? 0; return message; } }; -function createBaseStatefulOrderEventV1_StatefulOrderRemovalV1(): StatefulOrderEventV1_StatefulOrderRemovalV1 { +function createBaseLiquidityTierUpsertEventV1(): LiquidityTierUpsertEventV1 { return { - removedOrderId: undefined, - reason: 0 + id: 0, + name: "", + initialMarginPpm: 0, + maintenanceFractionPpm: 0, + basePositionNotional: Long.UZERO }; } -export const StatefulOrderEventV1_StatefulOrderRemovalV1 = { - encode(message: StatefulOrderEventV1_StatefulOrderRemovalV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.removedOrderId !== undefined) { - IndexerOrderId.encode(message.removedOrderId, writer.uint32(10).fork()).ldelim(); +export const LiquidityTierUpsertEventV1 = { + encode(message: LiquidityTierUpsertEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.id !== 0) { + writer.uint32(8).uint32(message.id); } - if (message.reason !== 0) { - writer.uint32(16).int32(message.reason); + if (message.name !== "") { + writer.uint32(18).string(message.name); + } + + if (message.initialMarginPpm !== 0) { + writer.uint32(24).uint32(message.initialMarginPpm); + } + + if (message.maintenanceFractionPpm !== 0) { + writer.uint32(32).uint32(message.maintenanceFractionPpm); + } + + if (!message.basePositionNotional.isZero()) { + writer.uint32(40).uint64(message.basePositionNotional); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): StatefulOrderEventV1_StatefulOrderRemovalV1 { + decode(input: _m0.Reader | Uint8Array, length?: number): LiquidityTierUpsertEventV1 { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseStatefulOrderEventV1_StatefulOrderRemovalV1(); + const message = createBaseLiquidityTierUpsertEventV1(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.removedOrderId = IndexerOrderId.decode(reader, reader.uint32()); + message.id = reader.uint32(); break; case 2: - message.reason = (reader.int32() as any); + message.name = reader.string(); + break; + + case 3: + message.initialMarginPpm = reader.uint32(); + break; + + case 4: + message.maintenanceFractionPpm = reader.uint32(); + break; + + case 5: + message.basePositionNotional = (reader.uint64() as Long); break; default: @@ -2288,86 +3804,80 @@ export const StatefulOrderEventV1_StatefulOrderRemovalV1 = { return message; }, - fromPartial(object: DeepPartial): StatefulOrderEventV1_StatefulOrderRemovalV1 { - const message = createBaseStatefulOrderEventV1_StatefulOrderRemovalV1(); - message.removedOrderId = object.removedOrderId !== undefined && object.removedOrderId !== null ? IndexerOrderId.fromPartial(object.removedOrderId) : undefined; - message.reason = object.reason ?? 0; + fromPartial(object: DeepPartial): LiquidityTierUpsertEventV1 { + const message = createBaseLiquidityTierUpsertEventV1(); + message.id = object.id ?? 0; + message.name = object.name ?? ""; + message.initialMarginPpm = object.initialMarginPpm ?? 0; + message.maintenanceFractionPpm = object.maintenanceFractionPpm ?? 0; + message.basePositionNotional = object.basePositionNotional !== undefined && object.basePositionNotional !== null ? Long.fromValue(object.basePositionNotional) : Long.UZERO; return message; } }; -function createBaseStatefulOrderEventV1_ConditionalOrderPlacementV1(): StatefulOrderEventV1_ConditionalOrderPlacementV1 { +function createBaseUpdateClobPairEventV1(): UpdateClobPairEventV1 { return { - order: undefined + clobPairId: 0, + status: 0, + quantumConversionExponent: 0, + subticksPerTick: 0, + stepBaseQuantums: Long.UZERO }; } -export const StatefulOrderEventV1_ConditionalOrderPlacementV1 = { - encode(message: StatefulOrderEventV1_ConditionalOrderPlacementV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.order !== undefined) { - IndexerOrder.encode(message.order, writer.uint32(10).fork()).ldelim(); +export const UpdateClobPairEventV1 = { + encode(message: UpdateClobPairEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.clobPairId !== 0) { + writer.uint32(8).uint32(message.clobPairId); + } + + if (message.status !== 0) { + writer.uint32(16).int32(message.status); + } + + if (message.quantumConversionExponent !== 0) { + writer.uint32(24).sint32(message.quantumConversionExponent); + } + + if (message.subticksPerTick !== 0) { + writer.uint32(32).uint32(message.subticksPerTick); + } + + if (!message.stepBaseQuantums.isZero()) { + writer.uint32(40).uint64(message.stepBaseQuantums); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): StatefulOrderEventV1_ConditionalOrderPlacementV1 { + decode(input: _m0.Reader | Uint8Array, length?: number): UpdateClobPairEventV1 { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseStatefulOrderEventV1_ConditionalOrderPlacementV1(); + const message = createBaseUpdateClobPairEventV1(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.order = IndexerOrder.decode(reader, reader.uint32()); + message.clobPairId = reader.uint32(); break; - default: - reader.skipType(tag & 7); + case 2: + message.status = (reader.int32() as any); break; - } - } - - return message; - }, - - fromPartial(object: DeepPartial): StatefulOrderEventV1_ConditionalOrderPlacementV1 { - const message = createBaseStatefulOrderEventV1_ConditionalOrderPlacementV1(); - message.order = object.order !== undefined && object.order !== null ? IndexerOrder.fromPartial(object.order) : undefined; - return message; - } - -}; - -function createBaseStatefulOrderEventV1_ConditionalOrderTriggeredV1(): StatefulOrderEventV1_ConditionalOrderTriggeredV1 { - return { - triggeredOrderId: undefined - }; -} - -export const StatefulOrderEventV1_ConditionalOrderTriggeredV1 = { - encode(message: StatefulOrderEventV1_ConditionalOrderTriggeredV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.triggeredOrderId !== undefined) { - IndexerOrderId.encode(message.triggeredOrderId, writer.uint32(10).fork()).ldelim(); - } - - return writer; - }, - decode(input: _m0.Reader | Uint8Array, length?: number): StatefulOrderEventV1_ConditionalOrderTriggeredV1 { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseStatefulOrderEventV1_ConditionalOrderTriggeredV1(); + case 3: + message.quantumConversionExponent = reader.sint32(); + break; - while (reader.pos < end) { - const tag = reader.uint32(); + case 4: + message.subticksPerTick = reader.uint32(); + break; - switch (tag >>> 3) { - case 1: - message.triggeredOrderId = IndexerOrderId.decode(reader, reader.uint32()); + case 5: + message.stepBaseQuantums = (reader.uint64() as Long); break; default: @@ -2379,40 +3889,80 @@ export const StatefulOrderEventV1_ConditionalOrderTriggeredV1 = { return message; }, - fromPartial(object: DeepPartial): StatefulOrderEventV1_ConditionalOrderTriggeredV1 { - const message = createBaseStatefulOrderEventV1_ConditionalOrderTriggeredV1(); - message.triggeredOrderId = object.triggeredOrderId !== undefined && object.triggeredOrderId !== null ? IndexerOrderId.fromPartial(object.triggeredOrderId) : undefined; + fromPartial(object: DeepPartial): UpdateClobPairEventV1 { + const message = createBaseUpdateClobPairEventV1(); + message.clobPairId = object.clobPairId ?? 0; + message.status = object.status ?? 0; + message.quantumConversionExponent = object.quantumConversionExponent ?? 0; + message.subticksPerTick = object.subticksPerTick ?? 0; + message.stepBaseQuantums = object.stepBaseQuantums !== undefined && object.stepBaseQuantums !== null ? Long.fromValue(object.stepBaseQuantums) : Long.UZERO; return message; } }; -function createBaseStatefulOrderEventV1_LongTermOrderPlacementV1(): StatefulOrderEventV1_LongTermOrderPlacementV1 { +function createBaseUpdatePerpetualEventV1(): UpdatePerpetualEventV1 { return { - order: undefined + id: 0, + ticker: "", + marketId: 0, + atomicResolution: 0, + liquidityTier: 0 }; } -export const StatefulOrderEventV1_LongTermOrderPlacementV1 = { - encode(message: StatefulOrderEventV1_LongTermOrderPlacementV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.order !== undefined) { - IndexerOrder.encode(message.order, writer.uint32(10).fork()).ldelim(); +export const UpdatePerpetualEventV1 = { + encode(message: UpdatePerpetualEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.id !== 0) { + writer.uint32(8).uint32(message.id); + } + + if (message.ticker !== "") { + writer.uint32(18).string(message.ticker); + } + + if (message.marketId !== 0) { + writer.uint32(24).uint32(message.marketId); + } + + if (message.atomicResolution !== 0) { + writer.uint32(32).sint32(message.atomicResolution); + } + + if (message.liquidityTier !== 0) { + writer.uint32(40).uint32(message.liquidityTier); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): StatefulOrderEventV1_LongTermOrderPlacementV1 { + decode(input: _m0.Reader | Uint8Array, length?: number): UpdatePerpetualEventV1 { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseStatefulOrderEventV1_LongTermOrderPlacementV1(); + const message = createBaseUpdatePerpetualEventV1(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.order = IndexerOrder.decode(reader, reader.uint32()); + message.id = reader.uint32(); + break; + + case 2: + message.ticker = reader.string(); + break; + + case 3: + message.marketId = reader.uint32(); + break; + + case 4: + message.atomicResolution = reader.sint32(); + break; + + case 5: + message.liquidityTier = reader.uint32(); break; default: @@ -2424,53 +3974,62 @@ export const StatefulOrderEventV1_LongTermOrderPlacementV1 = { return message; }, - fromPartial(object: DeepPartial): StatefulOrderEventV1_LongTermOrderPlacementV1 { - const message = createBaseStatefulOrderEventV1_LongTermOrderPlacementV1(); - message.order = object.order !== undefined && object.order !== null ? IndexerOrder.fromPartial(object.order) : undefined; + fromPartial(object: DeepPartial): UpdatePerpetualEventV1 { + const message = createBaseUpdatePerpetualEventV1(); + message.id = object.id ?? 0; + message.ticker = object.ticker ?? ""; + message.marketId = object.marketId ?? 0; + message.atomicResolution = object.atomicResolution ?? 0; + message.liquidityTier = object.liquidityTier ?? 0; return message; } }; -function createBaseAssetCreateEventV1(): AssetCreateEventV1 { +function createBaseUpdatePerpetualEventV2(): UpdatePerpetualEventV2 { return { id: 0, - symbol: "", - hasMarket: false, + ticker: "", marketId: 0, - atomicResolution: 0 + atomicResolution: 0, + liquidityTier: 0, + marketType: 0 }; } -export const AssetCreateEventV1 = { - encode(message: AssetCreateEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const UpdatePerpetualEventV2 = { + encode(message: UpdatePerpetualEventV2, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { if (message.id !== 0) { writer.uint32(8).uint32(message.id); } - if (message.symbol !== "") { - writer.uint32(18).string(message.symbol); - } - - if (message.hasMarket === true) { - writer.uint32(24).bool(message.hasMarket); + if (message.ticker !== "") { + writer.uint32(18).string(message.ticker); } if (message.marketId !== 0) { - writer.uint32(32).uint32(message.marketId); + writer.uint32(24).uint32(message.marketId); } if (message.atomicResolution !== 0) { - writer.uint32(40).sint32(message.atomicResolution); + writer.uint32(32).sint32(message.atomicResolution); + } + + if (message.liquidityTier !== 0) { + writer.uint32(40).uint32(message.liquidityTier); + } + + if (message.marketType !== 0) { + writer.uint32(48).int32(message.marketType); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): AssetCreateEventV1 { + decode(input: _m0.Reader | Uint8Array, length?: number): UpdatePerpetualEventV2 { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseAssetCreateEventV1(); + const message = createBaseUpdatePerpetualEventV2(); while (reader.pos < end) { const tag = reader.uint32(); @@ -2481,19 +4040,23 @@ export const AssetCreateEventV1 = { break; case 2: - message.symbol = reader.string(); + message.ticker = reader.string(); break; case 3: - message.hasMarket = reader.bool(); + message.marketId = reader.uint32(); break; case 4: - message.marketId = reader.uint32(); + message.atomicResolution = reader.sint32(); break; case 5: - message.atomicResolution = reader.sint32(); + message.liquidityTier = reader.uint32(); + break; + + case 6: + message.marketType = (reader.int32() as any); break; default: @@ -2505,82 +4068,68 @@ export const AssetCreateEventV1 = { return message; }, - fromPartial(object: DeepPartial): AssetCreateEventV1 { - const message = createBaseAssetCreateEventV1(); + fromPartial(object: DeepPartial): UpdatePerpetualEventV2 { + const message = createBaseUpdatePerpetualEventV2(); message.id = object.id ?? 0; - message.symbol = object.symbol ?? ""; - message.hasMarket = object.hasMarket ?? false; + message.ticker = object.ticker ?? ""; message.marketId = object.marketId ?? 0; message.atomicResolution = object.atomicResolution ?? 0; + message.liquidityTier = object.liquidityTier ?? 0; + message.marketType = object.marketType ?? 0; return message; } }; -function createBasePerpetualMarketCreateEventV1(): PerpetualMarketCreateEventV1 { +function createBaseUpdatePerpetualEventV3(): UpdatePerpetualEventV3 { return { id: 0, - clobPairId: 0, ticker: "", marketId: 0, - status: 0, - quantumConversionExponent: 0, atomicResolution: 0, - subticksPerTick: 0, - stepBaseQuantums: Long.UZERO, - liquidityTier: 0 + liquidityTier: 0, + marketType: 0, + defaultFunding8hrPpm: 0 }; } -export const PerpetualMarketCreateEventV1 = { - encode(message: PerpetualMarketCreateEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const UpdatePerpetualEventV3 = { + encode(message: UpdatePerpetualEventV3, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { if (message.id !== 0) { writer.uint32(8).uint32(message.id); } - if (message.clobPairId !== 0) { - writer.uint32(16).uint32(message.clobPairId); - } - if (message.ticker !== "") { - writer.uint32(26).string(message.ticker); + writer.uint32(18).string(message.ticker); } if (message.marketId !== 0) { - writer.uint32(32).uint32(message.marketId); - } - - if (message.status !== 0) { - writer.uint32(40).int32(message.status); - } - - if (message.quantumConversionExponent !== 0) { - writer.uint32(48).sint32(message.quantumConversionExponent); + writer.uint32(24).uint32(message.marketId); } if (message.atomicResolution !== 0) { - writer.uint32(56).sint32(message.atomicResolution); + writer.uint32(32).sint32(message.atomicResolution); } - if (message.subticksPerTick !== 0) { - writer.uint32(64).uint32(message.subticksPerTick); + if (message.liquidityTier !== 0) { + writer.uint32(40).uint32(message.liquidityTier); } - if (!message.stepBaseQuantums.isZero()) { - writer.uint32(72).uint64(message.stepBaseQuantums); + if (message.marketType !== 0) { + writer.uint32(48).int32(message.marketType); } - if (message.liquidityTier !== 0) { - writer.uint32(80).uint32(message.liquidityTier); + if (message.defaultFunding8hrPpm !== 0) { + writer.uint32(56).int32(message.defaultFunding8hrPpm); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): PerpetualMarketCreateEventV1 { + decode(input: _m0.Reader | Uint8Array, length?: number): UpdatePerpetualEventV3 { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBasePerpetualMarketCreateEventV1(); + const message = createBaseUpdatePerpetualEventV3(); while (reader.pos < end) { const tag = reader.uint32(); @@ -2591,39 +4140,132 @@ export const PerpetualMarketCreateEventV1 = { break; case 2: - message.clobPairId = reader.uint32(); + message.ticker = reader.string(); break; case 3: - message.ticker = reader.string(); + message.marketId = reader.uint32(); break; case 4: - message.marketId = reader.uint32(); + message.atomicResolution = reader.sint32(); + break; + + case 5: + message.liquidityTier = reader.uint32(); + break; + + case 6: + message.marketType = (reader.int32() as any); + break; + + case 7: + message.defaultFunding8hrPpm = reader.int32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): UpdatePerpetualEventV3 { + const message = createBaseUpdatePerpetualEventV3(); + message.id = object.id ?? 0; + message.ticker = object.ticker ?? ""; + message.marketId = object.marketId ?? 0; + message.atomicResolution = object.atomicResolution ?? 0; + message.liquidityTier = object.liquidityTier ?? 0; + message.marketType = object.marketType ?? 0; + message.defaultFunding8hrPpm = object.defaultFunding8hrPpm ?? 0; + return message; + } + +}; + +function createBaseTradingRewardsEventV1(): TradingRewardsEventV1 { + return { + tradingRewards: [] + }; +} + +export const TradingRewardsEventV1 = { + encode(message: TradingRewardsEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.tradingRewards) { + AddressTradingReward.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TradingRewardsEventV1 { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTradingRewardsEventV1(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.tradingRewards.push(AddressTradingReward.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): TradingRewardsEventV1 { + const message = createBaseTradingRewardsEventV1(); + message.tradingRewards = object.tradingRewards?.map(e => AddressTradingReward.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseAddressTradingReward(): AddressTradingReward { + return { + owner: "", + denomAmount: new Uint8Array() + }; +} + +export const AddressTradingReward = { + encode(message: AddressTradingReward, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.owner !== "") { + writer.uint32(10).string(message.owner); + } - case 5: - message.status = (reader.int32() as any); - break; + if (message.denomAmount.length !== 0) { + writer.uint32(18).bytes(message.denomAmount); + } - case 6: - message.quantumConversionExponent = reader.sint32(); - break; + return writer; + }, - case 7: - message.atomicResolution = reader.sint32(); - break; + decode(input: _m0.Reader | Uint8Array, length?: number): AddressTradingReward { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAddressTradingReward(); - case 8: - message.subticksPerTick = reader.uint32(); - break; + while (reader.pos < end) { + const tag = reader.uint32(); - case 9: - message.stepBaseQuantums = (reader.uint64() as Long); + switch (tag >>> 3) { + case 1: + message.owner = reader.string(); break; - case 10: - message.liquidityTier = reader.uint32(); + case 2: + message.denomAmount = reader.bytes(); break; default: @@ -2635,85 +4277,41 @@ export const PerpetualMarketCreateEventV1 = { return message; }, - fromPartial(object: DeepPartial): PerpetualMarketCreateEventV1 { - const message = createBasePerpetualMarketCreateEventV1(); - message.id = object.id ?? 0; - message.clobPairId = object.clobPairId ?? 0; - message.ticker = object.ticker ?? ""; - message.marketId = object.marketId ?? 0; - message.status = object.status ?? 0; - message.quantumConversionExponent = object.quantumConversionExponent ?? 0; - message.atomicResolution = object.atomicResolution ?? 0; - message.subticksPerTick = object.subticksPerTick ?? 0; - message.stepBaseQuantums = object.stepBaseQuantums !== undefined && object.stepBaseQuantums !== null ? Long.fromValue(object.stepBaseQuantums) : Long.UZERO; - message.liquidityTier = object.liquidityTier ?? 0; + fromPartial(object: DeepPartial): AddressTradingReward { + const message = createBaseAddressTradingReward(); + message.owner = object.owner ?? ""; + message.denomAmount = object.denomAmount ?? new Uint8Array(); return message; } }; -function createBaseLiquidityTierUpsertEventV1(): LiquidityTierUpsertEventV1 { +function createBaseOpenInterestUpdateEventV1(): OpenInterestUpdateEventV1 { return { - id: 0, - name: "", - initialMarginPpm: 0, - maintenanceFractionPpm: 0, - basePositionNotional: Long.UZERO + openInterestUpdates: [] }; } -export const LiquidityTierUpsertEventV1 = { - encode(message: LiquidityTierUpsertEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.id !== 0) { - writer.uint32(8).uint32(message.id); - } - - if (message.name !== "") { - writer.uint32(18).string(message.name); - } - - if (message.initialMarginPpm !== 0) { - writer.uint32(24).uint32(message.initialMarginPpm); - } - - if (message.maintenanceFractionPpm !== 0) { - writer.uint32(32).uint32(message.maintenanceFractionPpm); - } - - if (!message.basePositionNotional.isZero()) { - writer.uint32(40).uint64(message.basePositionNotional); +export const OpenInterestUpdateEventV1 = { + encode(message: OpenInterestUpdateEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.openInterestUpdates) { + OpenInterestUpdate.encode(v!, writer.uint32(10).fork()).ldelim(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): LiquidityTierUpsertEventV1 { + decode(input: _m0.Reader | Uint8Array, length?: number): OpenInterestUpdateEventV1 { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseLiquidityTierUpsertEventV1(); + const message = createBaseOpenInterestUpdateEventV1(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.id = reader.uint32(); - break; - - case 2: - message.name = reader.string(); - break; - - case 3: - message.initialMarginPpm = reader.uint32(); - break; - - case 4: - message.maintenanceFractionPpm = reader.uint32(); - break; - - case 5: - message.basePositionNotional = (reader.uint64() as Long); + message.openInterestUpdates.push(OpenInterestUpdate.decode(reader, reader.uint32())); break; default: @@ -2725,80 +4323,49 @@ export const LiquidityTierUpsertEventV1 = { return message; }, - fromPartial(object: DeepPartial): LiquidityTierUpsertEventV1 { - const message = createBaseLiquidityTierUpsertEventV1(); - message.id = object.id ?? 0; - message.name = object.name ?? ""; - message.initialMarginPpm = object.initialMarginPpm ?? 0; - message.maintenanceFractionPpm = object.maintenanceFractionPpm ?? 0; - message.basePositionNotional = object.basePositionNotional !== undefined && object.basePositionNotional !== null ? Long.fromValue(object.basePositionNotional) : Long.UZERO; + fromPartial(object: DeepPartial): OpenInterestUpdateEventV1 { + const message = createBaseOpenInterestUpdateEventV1(); + message.openInterestUpdates = object.openInterestUpdates?.map(e => OpenInterestUpdate.fromPartial(e)) || []; return message; } }; -function createBaseUpdateClobPairEventV1(): UpdateClobPairEventV1 { +function createBaseOpenInterestUpdate(): OpenInterestUpdate { return { - clobPairId: 0, - status: 0, - quantumConversionExponent: 0, - subticksPerTick: 0, - stepBaseQuantums: Long.UZERO + perpetualId: 0, + openInterest: new Uint8Array() }; } -export const UpdateClobPairEventV1 = { - encode(message: UpdateClobPairEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.clobPairId !== 0) { - writer.uint32(8).uint32(message.clobPairId); - } - - if (message.status !== 0) { - writer.uint32(16).int32(message.status); - } - - if (message.quantumConversionExponent !== 0) { - writer.uint32(24).sint32(message.quantumConversionExponent); - } - - if (message.subticksPerTick !== 0) { - writer.uint32(32).uint32(message.subticksPerTick); +export const OpenInterestUpdate = { + encode(message: OpenInterestUpdate, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.perpetualId !== 0) { + writer.uint32(8).uint32(message.perpetualId); } - if (!message.stepBaseQuantums.isZero()) { - writer.uint32(40).uint64(message.stepBaseQuantums); + if (message.openInterest.length !== 0) { + writer.uint32(18).bytes(message.openInterest); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): UpdateClobPairEventV1 { + decode(input: _m0.Reader | Uint8Array, length?: number): OpenInterestUpdate { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseUpdateClobPairEventV1(); + const message = createBaseOpenInterestUpdate(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.clobPairId = reader.uint32(); + message.perpetualId = reader.uint32(); break; case 2: - message.status = (reader.int32() as any); - break; - - case 3: - message.quantumConversionExponent = reader.sint32(); - break; - - case 4: - message.subticksPerTick = reader.uint32(); - break; - - case 5: - message.stepBaseQuantums = (reader.uint64() as Long); + message.openInterest = reader.bytes(); break; default: @@ -2810,57 +4377,64 @@ export const UpdateClobPairEventV1 = { return message; }, - fromPartial(object: DeepPartial): UpdateClobPairEventV1 { - const message = createBaseUpdateClobPairEventV1(); - message.clobPairId = object.clobPairId ?? 0; - message.status = object.status ?? 0; - message.quantumConversionExponent = object.quantumConversionExponent ?? 0; - message.subticksPerTick = object.subticksPerTick ?? 0; - message.stepBaseQuantums = object.stepBaseQuantums !== undefined && object.stepBaseQuantums !== null ? Long.fromValue(object.stepBaseQuantums) : Long.UZERO; + fromPartial(object: DeepPartial): OpenInterestUpdate { + const message = createBaseOpenInterestUpdate(); + message.perpetualId = object.perpetualId ?? 0; + message.openInterest = object.openInterest ?? new Uint8Array(); return message; } }; -function createBaseUpdatePerpetualEventV1(): UpdatePerpetualEventV1 { +function createBaseLiquidityTierUpsertEventV2(): LiquidityTierUpsertEventV2 { return { id: 0, - ticker: "", - marketId: 0, - atomicResolution: 0, - liquidityTier: 0 + name: "", + initialMarginPpm: 0, + maintenanceFractionPpm: 0, + basePositionNotional: Long.UZERO, + openInterestLowerCap: Long.UZERO, + openInterestUpperCap: Long.UZERO }; } -export const UpdatePerpetualEventV1 = { - encode(message: UpdatePerpetualEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const LiquidityTierUpsertEventV2 = { + encode(message: LiquidityTierUpsertEventV2, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { if (message.id !== 0) { writer.uint32(8).uint32(message.id); } - if (message.ticker !== "") { - writer.uint32(18).string(message.ticker); + if (message.name !== "") { + writer.uint32(18).string(message.name); } - if (message.marketId !== 0) { - writer.uint32(24).uint32(message.marketId); + if (message.initialMarginPpm !== 0) { + writer.uint32(24).uint32(message.initialMarginPpm); } - if (message.atomicResolution !== 0) { - writer.uint32(32).sint32(message.atomicResolution); + if (message.maintenanceFractionPpm !== 0) { + writer.uint32(32).uint32(message.maintenanceFractionPpm); } - if (message.liquidityTier !== 0) { - writer.uint32(40).uint32(message.liquidityTier); + if (!message.basePositionNotional.isZero()) { + writer.uint32(40).uint64(message.basePositionNotional); + } + + if (!message.openInterestLowerCap.isZero()) { + writer.uint32(48).uint64(message.openInterestLowerCap); + } + + if (!message.openInterestUpperCap.isZero()) { + writer.uint32(56).uint64(message.openInterestUpperCap); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): UpdatePerpetualEventV1 { + decode(input: _m0.Reader | Uint8Array, length?: number): LiquidityTierUpsertEventV2 { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseUpdatePerpetualEventV1(); + const message = createBaseLiquidityTierUpsertEventV2(); while (reader.pos < end) { const tag = reader.uint32(); @@ -2871,19 +4445,27 @@ export const UpdatePerpetualEventV1 = { break; case 2: - message.ticker = reader.string(); + message.name = reader.string(); break; case 3: - message.marketId = reader.uint32(); + message.initialMarginPpm = reader.uint32(); break; case 4: - message.atomicResolution = reader.sint32(); + message.maintenanceFractionPpm = reader.uint32(); break; case 5: - message.liquidityTier = reader.uint32(); + message.basePositionNotional = (reader.uint64() as Long); + break; + + case 6: + message.openInterestLowerCap = (reader.uint64() as Long); + break; + + case 7: + message.openInterestUpperCap = (reader.uint64() as Long); break; default: @@ -2895,44 +4477,55 @@ export const UpdatePerpetualEventV1 = { return message; }, - fromPartial(object: DeepPartial): UpdatePerpetualEventV1 { - const message = createBaseUpdatePerpetualEventV1(); + fromPartial(object: DeepPartial): LiquidityTierUpsertEventV2 { + const message = createBaseLiquidityTierUpsertEventV2(); message.id = object.id ?? 0; - message.ticker = object.ticker ?? ""; - message.marketId = object.marketId ?? 0; - message.atomicResolution = object.atomicResolution ?? 0; - message.liquidityTier = object.liquidityTier ?? 0; + message.name = object.name ?? ""; + message.initialMarginPpm = object.initialMarginPpm ?? 0; + message.maintenanceFractionPpm = object.maintenanceFractionPpm ?? 0; + message.basePositionNotional = object.basePositionNotional !== undefined && object.basePositionNotional !== null ? Long.fromValue(object.basePositionNotional) : Long.UZERO; + message.openInterestLowerCap = object.openInterestLowerCap !== undefined && object.openInterestLowerCap !== null ? Long.fromValue(object.openInterestLowerCap) : Long.UZERO; + message.openInterestUpperCap = object.openInterestUpperCap !== undefined && object.openInterestUpperCap !== null ? Long.fromValue(object.openInterestUpperCap) : Long.UZERO; return message; } }; -function createBaseTradingRewardsEventV1(): TradingRewardsEventV1 { +function createBaseRegisterAffiliateEventV1(): RegisterAffiliateEventV1 { return { - tradingRewards: [] + referee: "", + affiliate: "" }; } -export const TradingRewardsEventV1 = { - encode(message: TradingRewardsEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - for (const v of message.tradingRewards) { - AddressTradingReward.encode(v!, writer.uint32(10).fork()).ldelim(); +export const RegisterAffiliateEventV1 = { + encode(message: RegisterAffiliateEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.referee !== "") { + writer.uint32(10).string(message.referee); + } + + if (message.affiliate !== "") { + writer.uint32(18).string(message.affiliate); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): TradingRewardsEventV1 { + decode(input: _m0.Reader | Uint8Array, length?: number): RegisterAffiliateEventV1 { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseTradingRewardsEventV1(); + const message = createBaseRegisterAffiliateEventV1(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.tradingRewards.push(AddressTradingReward.decode(reader, reader.uint32())); + message.referee = reader.string(); + break; + + case 2: + message.affiliate = reader.string(); break; default: @@ -2944,49 +4537,59 @@ export const TradingRewardsEventV1 = { return message; }, - fromPartial(object: DeepPartial): TradingRewardsEventV1 { - const message = createBaseTradingRewardsEventV1(); - message.tradingRewards = object.tradingRewards?.map(e => AddressTradingReward.fromPartial(e)) || []; + fromPartial(object: DeepPartial): RegisterAffiliateEventV1 { + const message = createBaseRegisterAffiliateEventV1(); + message.referee = object.referee ?? ""; + message.affiliate = object.affiliate ?? ""; return message; } }; -function createBaseAddressTradingReward(): AddressTradingReward { +function createBaseUpsertVaultEventV1(): UpsertVaultEventV1 { return { - owner: "", - denomAmount: new Uint8Array() + address: "", + clobPairId: 0, + status: 0 }; } -export const AddressTradingReward = { - encode(message: AddressTradingReward, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.owner !== "") { - writer.uint32(10).string(message.owner); +export const UpsertVaultEventV1 = { + encode(message: UpsertVaultEventV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); } - if (message.denomAmount.length !== 0) { - writer.uint32(18).bytes(message.denomAmount); + if (message.clobPairId !== 0) { + writer.uint32(16).uint32(message.clobPairId); + } + + if (message.status !== 0) { + writer.uint32(24).int32(message.status); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): AddressTradingReward { + decode(input: _m0.Reader | Uint8Array, length?: number): UpsertVaultEventV1 { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseAddressTradingReward(); + const message = createBaseUpsertVaultEventV1(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.owner = reader.string(); + message.address = reader.string(); break; case 2: - message.denomAmount = reader.bytes(); + message.clobPairId = reader.uint32(); + break; + + case 3: + message.status = (reader.int32() as any); break; default: @@ -2998,10 +4601,11 @@ export const AddressTradingReward = { return message; }, - fromPartial(object: DeepPartial): AddressTradingReward { - const message = createBaseAddressTradingReward(); - message.owner = object.owner ?? ""; - message.denomAmount = object.denomAmount ?? new Uint8Array(); + fromPartial(object: DeepPartial): UpsertVaultEventV1 { + const message = createBaseUpsertVaultEventV1(); + message.address = object.address ?? ""; + message.clobPairId = object.clobPairId ?? 0; + message.status = object.status ?? 0; return message; } diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/indexer/off_chain_updates/off_chain_updates.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/indexer/off_chain_updates/off_chain_updates.ts index e66f9b0e5c7..891d5f3c87d 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/indexer/off_chain_updates/off_chain_updates.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/indexer/off_chain_updates/off_chain_updates.ts @@ -1,7 +1,8 @@ import { IndexerOrder, IndexerOrderSDKType, IndexerOrderId, IndexerOrderIdSDKType } from "../protocol/v1/clob"; +import { Timestamp } from "../../../google/protobuf/timestamp"; import { OrderRemovalReason, OrderRemovalReasonSDKType } from "../shared/removal_reason"; import * as _m0 from "protobufjs/minimal"; -import { DeepPartial, Long } from "../../../helpers"; +import { toTimestamp, fromTimestamp, DeepPartial, Long } from "../../../helpers"; /** * OrderPlacementStatus is an enum for the resulting status after an order is * placed. @@ -220,12 +221,18 @@ export function orderRemoveV1_OrderRemovalStatusToJSON(object: OrderRemoveV1_Ord export interface OrderPlaceV1 { order?: IndexerOrder; placementStatus: OrderPlaceV1_OrderPlacementStatus; + /** The timestamp of the order placement. */ + + timeStamp?: Date; } /** OrderPlace messages contain the order placed/replaced. */ export interface OrderPlaceV1SDKType { order?: IndexerOrderSDKType; placement_status: OrderPlaceV1_OrderPlacementStatusSDKType; + /** The timestamp of the order placement. */ + + time_stamp?: Date; } /** * OrderRemove messages contain the id of the order removed, the reason for the @@ -236,6 +243,9 @@ export interface OrderRemoveV1 { removedOrderId?: IndexerOrderId; reason: OrderRemovalReason; removalStatus: OrderRemoveV1_OrderRemovalStatus; + /** The timestamp of the order removal. */ + + timeStamp?: Date; } /** * OrderRemove messages contain the id of the order removed, the reason for the @@ -246,6 +256,9 @@ export interface OrderRemoveV1SDKType { removed_order_id?: IndexerOrderIdSDKType; reason: OrderRemovalReasonSDKType; removal_status: OrderRemoveV1_OrderRemovalStatusSDKType; + /** The timestamp of the order removal. */ + + time_stamp?: Date; } /** * OrderUpdate messages contain the id of the order being updated, and the @@ -265,6 +278,24 @@ export interface OrderUpdateV1SDKType { order_id?: IndexerOrderIdSDKType; total_filled_quantums: Long; } +/** OrderReplace messages contain the old order ID and the replacement order. */ + +export interface OrderReplaceV1 { + /** vault replaces orders with a different order ID */ + oldOrderId?: IndexerOrderId; + order?: IndexerOrder; + placementStatus: OrderPlaceV1_OrderPlacementStatus; + timeStamp?: Date; +} +/** OrderReplace messages contain the old order ID and the replacement order. */ + +export interface OrderReplaceV1SDKType { + /** vault replaces orders with a different order ID */ + old_order_id?: IndexerOrderIdSDKType; + order?: IndexerOrderSDKType; + placement_status: OrderPlaceV1_OrderPlacementStatusSDKType; + time_stamp?: Date; +} /** * An OffChainUpdate message is the message type which will be sent on Kafka to * the Indexer. @@ -274,6 +305,7 @@ export interface OffChainUpdateV1 { orderPlace?: OrderPlaceV1; orderRemove?: OrderRemoveV1; orderUpdate?: OrderUpdateV1; + orderReplace?: OrderReplaceV1; } /** * An OffChainUpdate message is the message type which will be sent on Kafka to @@ -284,12 +316,14 @@ export interface OffChainUpdateV1SDKType { order_place?: OrderPlaceV1SDKType; order_remove?: OrderRemoveV1SDKType; order_update?: OrderUpdateV1SDKType; + order_replace?: OrderReplaceV1SDKType; } function createBaseOrderPlaceV1(): OrderPlaceV1 { return { order: undefined, - placementStatus: 0 + placementStatus: 0, + timeStamp: undefined }; } @@ -303,6 +337,10 @@ export const OrderPlaceV1 = { writer.uint32(16).int32(message.placementStatus); } + if (message.timeStamp !== undefined) { + Timestamp.encode(toTimestamp(message.timeStamp), writer.uint32(26).fork()).ldelim(); + } + return writer; }, @@ -323,6 +361,10 @@ export const OrderPlaceV1 = { message.placementStatus = (reader.int32() as any); break; + case 3: + message.timeStamp = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + default: reader.skipType(tag & 7); break; @@ -336,6 +378,7 @@ export const OrderPlaceV1 = { const message = createBaseOrderPlaceV1(); message.order = object.order !== undefined && object.order !== null ? IndexerOrder.fromPartial(object.order) : undefined; message.placementStatus = object.placementStatus ?? 0; + message.timeStamp = object.timeStamp ?? undefined; return message; } @@ -345,7 +388,8 @@ function createBaseOrderRemoveV1(): OrderRemoveV1 { return { removedOrderId: undefined, reason: 0, - removalStatus: 0 + removalStatus: 0, + timeStamp: undefined }; } @@ -363,6 +407,10 @@ export const OrderRemoveV1 = { writer.uint32(24).int32(message.removalStatus); } + if (message.timeStamp !== undefined) { + Timestamp.encode(toTimestamp(message.timeStamp), writer.uint32(34).fork()).ldelim(); + } + return writer; }, @@ -387,6 +435,10 @@ export const OrderRemoveV1 = { message.removalStatus = (reader.int32() as any); break; + case 4: + message.timeStamp = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + default: reader.skipType(tag & 7); break; @@ -401,6 +453,7 @@ export const OrderRemoveV1 = { message.removedOrderId = object.removedOrderId !== undefined && object.removedOrderId !== null ? IndexerOrderId.fromPartial(object.removedOrderId) : undefined; message.reason = object.reason ?? 0; message.removalStatus = object.removalStatus ?? 0; + message.timeStamp = object.timeStamp ?? undefined; return message; } @@ -461,11 +514,87 @@ export const OrderUpdateV1 = { }; +function createBaseOrderReplaceV1(): OrderReplaceV1 { + return { + oldOrderId: undefined, + order: undefined, + placementStatus: 0, + timeStamp: undefined + }; +} + +export const OrderReplaceV1 = { + encode(message: OrderReplaceV1, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.oldOrderId !== undefined) { + IndexerOrderId.encode(message.oldOrderId, writer.uint32(10).fork()).ldelim(); + } + + if (message.order !== undefined) { + IndexerOrder.encode(message.order, writer.uint32(18).fork()).ldelim(); + } + + if (message.placementStatus !== 0) { + writer.uint32(24).int32(message.placementStatus); + } + + if (message.timeStamp !== undefined) { + Timestamp.encode(toTimestamp(message.timeStamp), writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OrderReplaceV1 { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOrderReplaceV1(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.oldOrderId = IndexerOrderId.decode(reader, reader.uint32()); + break; + + case 2: + message.order = IndexerOrder.decode(reader, reader.uint32()); + break; + + case 3: + message.placementStatus = (reader.int32() as any); + break; + + case 4: + message.timeStamp = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): OrderReplaceV1 { + const message = createBaseOrderReplaceV1(); + message.oldOrderId = object.oldOrderId !== undefined && object.oldOrderId !== null ? IndexerOrderId.fromPartial(object.oldOrderId) : undefined; + message.order = object.order !== undefined && object.order !== null ? IndexerOrder.fromPartial(object.order) : undefined; + message.placementStatus = object.placementStatus ?? 0; + message.timeStamp = object.timeStamp ?? undefined; + return message; + } + +}; + function createBaseOffChainUpdateV1(): OffChainUpdateV1 { return { orderPlace: undefined, orderRemove: undefined, - orderUpdate: undefined + orderUpdate: undefined, + orderReplace: undefined }; } @@ -483,6 +612,10 @@ export const OffChainUpdateV1 = { OrderUpdateV1.encode(message.orderUpdate, writer.uint32(26).fork()).ldelim(); } + if (message.orderReplace !== undefined) { + OrderReplaceV1.encode(message.orderReplace, writer.uint32(34).fork()).ldelim(); + } + return writer; }, @@ -507,6 +640,10 @@ export const OffChainUpdateV1 = { message.orderUpdate = OrderUpdateV1.decode(reader, reader.uint32()); break; + case 4: + message.orderReplace = OrderReplaceV1.decode(reader, reader.uint32()); + break; + default: reader.skipType(tag & 7); break; @@ -521,6 +658,7 @@ export const OffChainUpdateV1 = { message.orderPlace = object.orderPlace !== undefined && object.orderPlace !== null ? OrderPlaceV1.fromPartial(object.orderPlace) : undefined; message.orderRemove = object.orderRemove !== undefined && object.orderRemove !== null ? OrderRemoveV1.fromPartial(object.orderRemove) : undefined; message.orderUpdate = object.orderUpdate !== undefined && object.orderUpdate !== null ? OrderUpdateV1.fromPartial(object.orderUpdate) : undefined; + message.orderReplace = object.orderReplace !== undefined && object.orderReplace !== null ? OrderReplaceV1.fromPartial(object.orderReplace) : undefined; return message; } diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/indexer/protocol/v1/perpetual.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/indexer/protocol/v1/perpetual.ts new file mode 100644 index 00000000000..829dced6d83 --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/indexer/protocol/v1/perpetual.ts @@ -0,0 +1,67 @@ +/** + * Market type of perpetual. + * Defined in perpetual. + */ +export enum PerpetualMarketType { + /** PERPETUAL_MARKET_TYPE_UNSPECIFIED - Unspecified market type. */ + PERPETUAL_MARKET_TYPE_UNSPECIFIED = 0, + + /** PERPETUAL_MARKET_TYPE_CROSS - Market type for cross margin perpetual markets. */ + PERPETUAL_MARKET_TYPE_CROSS = 1, + + /** PERPETUAL_MARKET_TYPE_ISOLATED - Market type for isolated margin perpetual markets. */ + PERPETUAL_MARKET_TYPE_ISOLATED = 2, + UNRECOGNIZED = -1, +} +/** + * Market type of perpetual. + * Defined in perpetual. + */ + +export enum PerpetualMarketTypeSDKType { + /** PERPETUAL_MARKET_TYPE_UNSPECIFIED - Unspecified market type. */ + PERPETUAL_MARKET_TYPE_UNSPECIFIED = 0, + + /** PERPETUAL_MARKET_TYPE_CROSS - Market type for cross margin perpetual markets. */ + PERPETUAL_MARKET_TYPE_CROSS = 1, + + /** PERPETUAL_MARKET_TYPE_ISOLATED - Market type for isolated margin perpetual markets. */ + PERPETUAL_MARKET_TYPE_ISOLATED = 2, + UNRECOGNIZED = -1, +} +export function perpetualMarketTypeFromJSON(object: any): PerpetualMarketType { + switch (object) { + case 0: + case "PERPETUAL_MARKET_TYPE_UNSPECIFIED": + return PerpetualMarketType.PERPETUAL_MARKET_TYPE_UNSPECIFIED; + + case 1: + case "PERPETUAL_MARKET_TYPE_CROSS": + return PerpetualMarketType.PERPETUAL_MARKET_TYPE_CROSS; + + case 2: + case "PERPETUAL_MARKET_TYPE_ISOLATED": + return PerpetualMarketType.PERPETUAL_MARKET_TYPE_ISOLATED; + + case -1: + case "UNRECOGNIZED": + default: + return PerpetualMarketType.UNRECOGNIZED; + } +} +export function perpetualMarketTypeToJSON(object: PerpetualMarketType): string { + switch (object) { + case PerpetualMarketType.PERPETUAL_MARKET_TYPE_UNSPECIFIED: + return "PERPETUAL_MARKET_TYPE_UNSPECIFIED"; + + case PerpetualMarketType.PERPETUAL_MARKET_TYPE_CROSS: + return "PERPETUAL_MARKET_TYPE_CROSS"; + + case PerpetualMarketType.PERPETUAL_MARKET_TYPE_ISOLATED: + return "PERPETUAL_MARKET_TYPE_ISOLATED"; + + case PerpetualMarketType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/indexer/protocol/v1/vault.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/indexer/protocol/v1/vault.ts new file mode 100644 index 00000000000..93b1f9b2fdb --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/indexer/protocol/v1/vault.ts @@ -0,0 +1,87 @@ +/** VaultStatus represents the status of a vault. */ +export enum VaultStatus { + /** VAULT_STATUS_UNSPECIFIED - Default value, invalid and unused. */ + VAULT_STATUS_UNSPECIFIED = 0, + + /** VAULT_STATUS_DEACTIVATED - Don’t place orders. Does not count toward global vault balances. */ + VAULT_STATUS_DEACTIVATED = 1, + + /** VAULT_STATUS_STAND_BY - Don’t place orders. Does count towards global vault balances. */ + VAULT_STATUS_STAND_BY = 2, + + /** VAULT_STATUS_QUOTING - Places orders on both sides of the book. */ + VAULT_STATUS_QUOTING = 3, + + /** VAULT_STATUS_CLOSE_ONLY - Only place orders that close the position. */ + VAULT_STATUS_CLOSE_ONLY = 4, + UNRECOGNIZED = -1, +} +/** VaultStatus represents the status of a vault. */ + +export enum VaultStatusSDKType { + /** VAULT_STATUS_UNSPECIFIED - Default value, invalid and unused. */ + VAULT_STATUS_UNSPECIFIED = 0, + + /** VAULT_STATUS_DEACTIVATED - Don’t place orders. Does not count toward global vault balances. */ + VAULT_STATUS_DEACTIVATED = 1, + + /** VAULT_STATUS_STAND_BY - Don’t place orders. Does count towards global vault balances. */ + VAULT_STATUS_STAND_BY = 2, + + /** VAULT_STATUS_QUOTING - Places orders on both sides of the book. */ + VAULT_STATUS_QUOTING = 3, + + /** VAULT_STATUS_CLOSE_ONLY - Only place orders that close the position. */ + VAULT_STATUS_CLOSE_ONLY = 4, + UNRECOGNIZED = -1, +} +export function vaultStatusFromJSON(object: any): VaultStatus { + switch (object) { + case 0: + case "VAULT_STATUS_UNSPECIFIED": + return VaultStatus.VAULT_STATUS_UNSPECIFIED; + + case 1: + case "VAULT_STATUS_DEACTIVATED": + return VaultStatus.VAULT_STATUS_DEACTIVATED; + + case 2: + case "VAULT_STATUS_STAND_BY": + return VaultStatus.VAULT_STATUS_STAND_BY; + + case 3: + case "VAULT_STATUS_QUOTING": + return VaultStatus.VAULT_STATUS_QUOTING; + + case 4: + case "VAULT_STATUS_CLOSE_ONLY": + return VaultStatus.VAULT_STATUS_CLOSE_ONLY; + + case -1: + case "UNRECOGNIZED": + default: + return VaultStatus.UNRECOGNIZED; + } +} +export function vaultStatusToJSON(object: VaultStatus): string { + switch (object) { + case VaultStatus.VAULT_STATUS_UNSPECIFIED: + return "VAULT_STATUS_UNSPECIFIED"; + + case VaultStatus.VAULT_STATUS_DEACTIVATED: + return "VAULT_STATUS_DEACTIVATED"; + + case VaultStatus.VAULT_STATUS_STAND_BY: + return "VAULT_STATUS_STAND_BY"; + + case VaultStatus.VAULT_STATUS_QUOTING: + return "VAULT_STATUS_QUOTING"; + + case VaultStatus.VAULT_STATUS_CLOSE_ONLY: + return "VAULT_STATUS_CLOSE_ONLY"; + + case VaultStatus.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/indexer/shared/removal_reason.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/indexer/shared/removal_reason.ts index 970a5c8e2c5..1d49cf0805e 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/indexer/shared/removal_reason.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/indexer/shared/removal_reason.ts @@ -78,6 +78,12 @@ export enum OrderRemovalReason { /** ORDER_REMOVAL_REASON_FINAL_SETTLEMENT - The order has been removed since its ClobPair has entered final settlement. */ ORDER_REMOVAL_REASON_FINAL_SETTLEMENT = 14, + + /** + * ORDER_REMOVAL_REASON_VIOLATES_ISOLATED_SUBACCOUNT_CONSTRAINTS - The order has been removed since filling it would lead to the subaccount + * violating isolated subaccount constraints. + */ + ORDER_REMOVAL_REASON_VIOLATES_ISOLATED_SUBACCOUNT_CONSTRAINTS = 15, UNRECOGNIZED = -1, } /** OrderRemovalReason is an enum of all the reasons an order was removed. */ @@ -161,6 +167,12 @@ export enum OrderRemovalReasonSDKType { /** ORDER_REMOVAL_REASON_FINAL_SETTLEMENT - The order has been removed since its ClobPair has entered final settlement. */ ORDER_REMOVAL_REASON_FINAL_SETTLEMENT = 14, + + /** + * ORDER_REMOVAL_REASON_VIOLATES_ISOLATED_SUBACCOUNT_CONSTRAINTS - The order has been removed since filling it would lead to the subaccount + * violating isolated subaccount constraints. + */ + ORDER_REMOVAL_REASON_VIOLATES_ISOLATED_SUBACCOUNT_CONSTRAINTS = 15, UNRECOGNIZED = -1, } export function orderRemovalReasonFromJSON(object: any): OrderRemovalReason { @@ -225,6 +237,10 @@ export function orderRemovalReasonFromJSON(object: any): OrderRemovalReason { case "ORDER_REMOVAL_REASON_FINAL_SETTLEMENT": return OrderRemovalReason.ORDER_REMOVAL_REASON_FINAL_SETTLEMENT; + case 15: + case "ORDER_REMOVAL_REASON_VIOLATES_ISOLATED_SUBACCOUNT_CONSTRAINTS": + return OrderRemovalReason.ORDER_REMOVAL_REASON_VIOLATES_ISOLATED_SUBACCOUNT_CONSTRAINTS; + case -1: case "UNRECOGNIZED": default: @@ -278,6 +294,9 @@ export function orderRemovalReasonToJSON(object: OrderRemovalReason): string { case OrderRemovalReason.ORDER_REMOVAL_REASON_FINAL_SETTLEMENT: return "ORDER_REMOVAL_REASON_FINAL_SETTLEMENT"; + case OrderRemovalReason.ORDER_REMOVAL_REASON_VIOLATES_ISOLATED_SUBACCOUNT_CONSTRAINTS: + return "ORDER_REMOVAL_REASON_VIOLATES_ISOLATED_SUBACCOUNT_CONSTRAINTS"; + case OrderRemovalReason.UNRECOGNIZED: default: return "UNRECOGNIZED"; diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/indexer/socks/messages.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/indexer/socks/messages.ts index 6a0f8fd6f53..9563487bd9b 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/indexer/socks/messages.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/indexer/socks/messages.ts @@ -265,6 +265,30 @@ export interface CandleMessageSDKType { version: string; } +/** Message to be sent through the 'to-websockets-block-height` kafka topic. */ + +export interface BlockHeightMessage { + /** Block height where the contents occur. */ + blockHeight: string; + /** ISO formatted time of the block height. */ + + time: string; + /** Version of the websocket message. */ + + version: string; +} +/** Message to be sent through the 'to-websockets-block-height` kafka topic. */ + +export interface BlockHeightMessageSDKType { + /** Block height where the contents occur. */ + block_height: string; + /** ISO formatted time of the block height. */ + + time: string; + /** Version of the websocket message. */ + + version: string; +} function createBaseOrderbookMessage(): OrderbookMessage { return { @@ -629,4 +653,69 @@ export const CandleMessage = { return message; } +}; + +function createBaseBlockHeightMessage(): BlockHeightMessage { + return { + blockHeight: "", + time: "", + version: "" + }; +} + +export const BlockHeightMessage = { + encode(message: BlockHeightMessage, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.blockHeight !== "") { + writer.uint32(10).string(message.blockHeight); + } + + if (message.time !== "") { + writer.uint32(18).string(message.time); + } + + if (message.version !== "") { + writer.uint32(26).string(message.version); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BlockHeightMessage { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBlockHeightMessage(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.blockHeight = reader.string(); + break; + + case 2: + message.time = reader.string(); + break; + + case 3: + message.version = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): BlockHeightMessage { + const message = createBaseBlockHeightMessage(); + message.blockHeight = object.blockHeight ?? ""; + message.time = object.time ?? ""; + message.version = object.version ?? ""; + return message; + } + }; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/lcd.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/lcd.ts index 318178b12f4..7a332c4c8e3 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/lcd.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/lcd.ts @@ -9,6 +9,12 @@ export const createLCDClient = async ({ }); return { dydxprotocol: { + accountplus: new (await import("./accountplus/query.lcd")).LCDQueryClient({ + requestClient + }), + affiliates: new (await import("./affiliates/query.lcd")).LCDQueryClient({ + requestClient + }), assets: new (await import("./assets/query.lcd")).LCDQueryClient({ requestClient }), @@ -30,6 +36,9 @@ export const createLCDClient = async ({ feetiers: new (await import("./feetiers/query.lcd")).LCDQueryClient({ requestClient }), + listing: new (await import("./listing/query.lcd")).LCDQueryClient({ + requestClient + }), perpetuals: new (await import("./perpetuals/query.lcd")).LCDQueryClient({ requestClient }), @@ -39,6 +48,9 @@ export const createLCDClient = async ({ ratelimit: new (await import("./ratelimit/query.lcd")).LCDQueryClient({ requestClient }), + revshare: new (await import("./revshare/query.lcd")).LCDQueryClient({ + requestClient + }), rewards: new (await import("./rewards/query.lcd")).LCDQueryClient({ requestClient }), @@ -48,6 +60,9 @@ export const createLCDClient = async ({ subaccounts: new (await import("./subaccounts/query.lcd")).LCDQueryClient({ requestClient }), + vault: new (await import("./vault/query.lcd")).LCDQueryClient({ + requestClient + }), vest: new (await import("./vest/query.lcd")).LCDQueryClient({ requestClient }) diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/listing/genesis.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/listing/genesis.ts new file mode 100644 index 00000000000..ec58a89c8cb --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/listing/genesis.ts @@ -0,0 +1,82 @@ +import { ListingVaultDepositParams, ListingVaultDepositParamsSDKType } from "./params"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** GenesisState defines `x/listing`'s genesis state. */ + +export interface GenesisState { + /** + * hard_cap_for_markets is the hard cap for the number of markets that can be + * listed + */ + hardCapForMarkets: number; + /** listing_vault_deposit_params is the params for PML megavault deposits */ + + listingVaultDepositParams?: ListingVaultDepositParams; +} +/** GenesisState defines `x/listing`'s genesis state. */ + +export interface GenesisStateSDKType { + /** + * hard_cap_for_markets is the hard cap for the number of markets that can be + * listed + */ + hard_cap_for_markets: number; + /** listing_vault_deposit_params is the params for PML megavault deposits */ + + listing_vault_deposit_params?: ListingVaultDepositParamsSDKType; +} + +function createBaseGenesisState(): GenesisState { + return { + hardCapForMarkets: 0, + listingVaultDepositParams: undefined + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.hardCapForMarkets !== 0) { + writer.uint32(8).uint32(message.hardCapForMarkets); + } + + if (message.listingVaultDepositParams !== undefined) { + ListingVaultDepositParams.encode(message.listingVaultDepositParams, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.hardCapForMarkets = reader.uint32(); + break; + + case 2: + message.listingVaultDepositParams = ListingVaultDepositParams.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.hardCapForMarkets = object.hardCapForMarkets ?? 0; + message.listingVaultDepositParams = object.listingVaultDepositParams !== undefined && object.listingVaultDepositParams !== null ? ListingVaultDepositParams.fromPartial(object.listingVaultDepositParams) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/listing/params.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/listing/params.ts new file mode 100644 index 00000000000..079889c1496 --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/listing/params.ts @@ -0,0 +1,97 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** ListingVaultDepositParams represents the params for PML megavault deposits */ + +export interface ListingVaultDepositParams { + /** Amount that will be deposited into the new market vault exclusively */ + newVaultDepositAmount: Uint8Array; + /** + * Amount deposited into the main vault exclusively. This amount does not + * include the amount deposited into the new vault. + */ + + mainVaultDepositAmount: Uint8Array; + /** Lockup period for this deposit */ + + numBlocksToLockShares: number; +} +/** ListingVaultDepositParams represents the params for PML megavault deposits */ + +export interface ListingVaultDepositParamsSDKType { + /** Amount that will be deposited into the new market vault exclusively */ + new_vault_deposit_amount: Uint8Array; + /** + * Amount deposited into the main vault exclusively. This amount does not + * include the amount deposited into the new vault. + */ + + main_vault_deposit_amount: Uint8Array; + /** Lockup period for this deposit */ + + num_blocks_to_lock_shares: number; +} + +function createBaseListingVaultDepositParams(): ListingVaultDepositParams { + return { + newVaultDepositAmount: new Uint8Array(), + mainVaultDepositAmount: new Uint8Array(), + numBlocksToLockShares: 0 + }; +} + +export const ListingVaultDepositParams = { + encode(message: ListingVaultDepositParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.newVaultDepositAmount.length !== 0) { + writer.uint32(10).bytes(message.newVaultDepositAmount); + } + + if (message.mainVaultDepositAmount.length !== 0) { + writer.uint32(18).bytes(message.mainVaultDepositAmount); + } + + if (message.numBlocksToLockShares !== 0) { + writer.uint32(24).uint32(message.numBlocksToLockShares); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ListingVaultDepositParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseListingVaultDepositParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.newVaultDepositAmount = reader.bytes(); + break; + + case 2: + message.mainVaultDepositAmount = reader.bytes(); + break; + + case 3: + message.numBlocksToLockShares = reader.uint32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ListingVaultDepositParams { + const message = createBaseListingVaultDepositParams(); + message.newVaultDepositAmount = object.newVaultDepositAmount ?? new Uint8Array(); + message.mainVaultDepositAmount = object.mainVaultDepositAmount ?? new Uint8Array(); + message.numBlocksToLockShares = object.numBlocksToLockShares ?? 0; + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/listing/query.lcd.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/listing/query.lcd.ts new file mode 100644 index 00000000000..70ab686dfbb --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/listing/query.lcd.ts @@ -0,0 +1,30 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryMarketsHardCap, QueryMarketsHardCapResponseSDKType, QueryListingVaultDepositParams, QueryListingVaultDepositParamsResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.marketsHardCap = this.marketsHardCap.bind(this); + this.listingVaultDepositParams = this.listingVaultDepositParams.bind(this); + } + /* Queries for the hard cap number of listed markets */ + + + async marketsHardCap(_params: QueryMarketsHardCap = {}): Promise { + const endpoint = `dydxprotocol/listing/markets_hard_cap`; + return await this.req.get(endpoint); + } + /* Queries the listing vault deposit params */ + + + async listingVaultDepositParams(_params: QueryListingVaultDepositParams = {}): Promise { + const endpoint = `dydxprotocol/listing/vault_deposit_params`; + return await this.req.get(endpoint); + } + +} \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/listing/query.rpc.Query.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/listing/query.rpc.Query.ts new file mode 100644 index 00000000000..f7138c04fc8 --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/listing/query.rpc.Query.ts @@ -0,0 +1,49 @@ +import { Rpc } from "../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryMarketsHardCap, QueryMarketsHardCapResponse, QueryListingVaultDepositParams, QueryListingVaultDepositParamsResponse } from "./query"; +/** Query defines the gRPC querier service. */ + +export interface Query { + /** Queries for the hard cap number of listed markets */ + marketsHardCap(request?: QueryMarketsHardCap): Promise; + /** Queries the listing vault deposit params */ + + listingVaultDepositParams(request?: QueryListingVaultDepositParams): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.marketsHardCap = this.marketsHardCap.bind(this); + this.listingVaultDepositParams = this.listingVaultDepositParams.bind(this); + } + + marketsHardCap(request: QueryMarketsHardCap = {}): Promise { + const data = QueryMarketsHardCap.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.listing.Query", "MarketsHardCap", data); + return promise.then(data => QueryMarketsHardCapResponse.decode(new _m0.Reader(data))); + } + + listingVaultDepositParams(request: QueryListingVaultDepositParams = {}): Promise { + const data = QueryListingVaultDepositParams.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.listing.Query", "ListingVaultDepositParams", data); + return promise.then(data => QueryListingVaultDepositParamsResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + marketsHardCap(request?: QueryMarketsHardCap): Promise { + return queryService.marketsHardCap(request); + }, + + listingVaultDepositParams(request?: QueryListingVaultDepositParams): Promise { + return queryService.listingVaultDepositParams(request); + } + + }; +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/listing/query.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/listing/query.ts new file mode 100644 index 00000000000..da0efcc04f1 --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/listing/query.ts @@ -0,0 +1,195 @@ +import { ListingVaultDepositParams, ListingVaultDepositParamsSDKType } from "./params"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** Queries for the hard cap on listed markets */ + +export interface QueryMarketsHardCap {} +/** Queries for the hard cap on listed markets */ + +export interface QueryMarketsHardCapSDKType {} +/** Response type indicating the hard cap on listed markets */ + +export interface QueryMarketsHardCapResponse { + /** Response type indicating the hard cap on listed markets */ + hardCap: number; +} +/** Response type indicating the hard cap on listed markets */ + +export interface QueryMarketsHardCapResponseSDKType { + /** Response type indicating the hard cap on listed markets */ + hard_cap: number; +} +/** Queries the listing vault deposit params */ + +export interface QueryListingVaultDepositParams {} +/** Queries the listing vault deposit params */ + +export interface QueryListingVaultDepositParamsSDKType {} +/** Response type for QueryListingVaultDepositParams */ + +export interface QueryListingVaultDepositParamsResponse { + params?: ListingVaultDepositParams; +} +/** Response type for QueryListingVaultDepositParams */ + +export interface QueryListingVaultDepositParamsResponseSDKType { + params?: ListingVaultDepositParamsSDKType; +} + +function createBaseQueryMarketsHardCap(): QueryMarketsHardCap { + return {}; +} + +export const QueryMarketsHardCap = { + encode(_: QueryMarketsHardCap, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryMarketsHardCap { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryMarketsHardCap(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryMarketsHardCap { + const message = createBaseQueryMarketsHardCap(); + return message; + } + +}; + +function createBaseQueryMarketsHardCapResponse(): QueryMarketsHardCapResponse { + return { + hardCap: 0 + }; +} + +export const QueryMarketsHardCapResponse = { + encode(message: QueryMarketsHardCapResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.hardCap !== 0) { + writer.uint32(8).uint32(message.hardCap); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryMarketsHardCapResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryMarketsHardCapResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.hardCap = reader.uint32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryMarketsHardCapResponse { + const message = createBaseQueryMarketsHardCapResponse(); + message.hardCap = object.hardCap ?? 0; + return message; + } + +}; + +function createBaseQueryListingVaultDepositParams(): QueryListingVaultDepositParams { + return {}; +} + +export const QueryListingVaultDepositParams = { + encode(_: QueryListingVaultDepositParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryListingVaultDepositParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryListingVaultDepositParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryListingVaultDepositParams { + const message = createBaseQueryListingVaultDepositParams(); + return message; + } + +}; + +function createBaseQueryListingVaultDepositParamsResponse(): QueryListingVaultDepositParamsResponse { + return { + params: undefined + }; +} + +export const QueryListingVaultDepositParamsResponse = { + encode(message: QueryListingVaultDepositParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.params !== undefined) { + ListingVaultDepositParams.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryListingVaultDepositParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryListingVaultDepositParamsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.params = ListingVaultDepositParams.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryListingVaultDepositParamsResponse { + const message = createBaseQueryListingVaultDepositParamsResponse(); + message.params = object.params !== undefined && object.params !== null ? ListingVaultDepositParams.fromPartial(object.params) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/listing/tx.rpc.msg.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/listing/tx.rpc.msg.ts new file mode 100644 index 00000000000..61a0bdf54b4 --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/listing/tx.rpc.msg.ts @@ -0,0 +1,57 @@ +import { Rpc } from "../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { MsgSetMarketsHardCap, MsgSetMarketsHardCapResponse, MsgCreateMarketPermissionless, MsgCreateMarketPermissionlessResponse, MsgSetListingVaultDepositParams, MsgSetListingVaultDepositParamsResponse, MsgUpgradeIsolatedPerpetualToCross, MsgUpgradeIsolatedPerpetualToCrossResponse } from "./tx"; +/** Msg defines the Msg service. */ + +export interface Msg { + /** SetMarketsHardCap sets a hard cap on the number of markets listed */ + setMarketsHardCap(request: MsgSetMarketsHardCap): Promise; + /** CreateMarketPermissionless creates a new market without going through x/gov */ + + createMarketPermissionless(request: MsgCreateMarketPermissionless): Promise; + /** SetListingVaultDepositParams sets PML megavault deposit params */ + + setListingVaultDepositParams(request: MsgSetListingVaultDepositParams): Promise; + /** + * UpgradeIsolatedPerpetualToCross upgrades a perpetual from isolated to cross + * margin + */ + + upgradeIsolatedPerpetualToCross(request: MsgUpgradeIsolatedPerpetualToCross): Promise; +} +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.setMarketsHardCap = this.setMarketsHardCap.bind(this); + this.createMarketPermissionless = this.createMarketPermissionless.bind(this); + this.setListingVaultDepositParams = this.setListingVaultDepositParams.bind(this); + this.upgradeIsolatedPerpetualToCross = this.upgradeIsolatedPerpetualToCross.bind(this); + } + + setMarketsHardCap(request: MsgSetMarketsHardCap): Promise { + const data = MsgSetMarketsHardCap.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.listing.Msg", "SetMarketsHardCap", data); + return promise.then(data => MsgSetMarketsHardCapResponse.decode(new _m0.Reader(data))); + } + + createMarketPermissionless(request: MsgCreateMarketPermissionless): Promise { + const data = MsgCreateMarketPermissionless.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.listing.Msg", "CreateMarketPermissionless", data); + return promise.then(data => MsgCreateMarketPermissionlessResponse.decode(new _m0.Reader(data))); + } + + setListingVaultDepositParams(request: MsgSetListingVaultDepositParams): Promise { + const data = MsgSetListingVaultDepositParams.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.listing.Msg", "SetListingVaultDepositParams", data); + return promise.then(data => MsgSetListingVaultDepositParamsResponse.decode(new _m0.Reader(data))); + } + + upgradeIsolatedPerpetualToCross(request: MsgUpgradeIsolatedPerpetualToCross): Promise { + const data = MsgUpgradeIsolatedPerpetualToCross.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.listing.Msg", "UpgradeIsolatedPerpetualToCross", data); + return promise.then(data => MsgUpgradeIsolatedPerpetualToCrossResponse.decode(new _m0.Reader(data))); + } + +} \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/listing/tx.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/listing/tx.ts new file mode 100644 index 00000000000..111d29fc370 --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/listing/tx.ts @@ -0,0 +1,492 @@ +import { SubaccountId, SubaccountIdSDKType } from "../subaccounts/subaccount"; +import { ListingVaultDepositParams, ListingVaultDepositParamsSDKType } from "./params"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** + * MsgSetMarketsHardCap is used to set a hard cap on the number of markets + * listed + */ + +export interface MsgSetMarketsHardCap { + authority: string; + /** Hard cap for the total number of markets listed */ + + hardCapForMarkets: number; +} +/** + * MsgSetMarketsHardCap is used to set a hard cap on the number of markets + * listed + */ + +export interface MsgSetMarketsHardCapSDKType { + authority: string; + /** Hard cap for the total number of markets listed */ + + hard_cap_for_markets: number; +} +/** MsgSetMarketsHardCapResponse defines the MsgSetMarketsHardCap response */ + +export interface MsgSetMarketsHardCapResponse {} +/** MsgSetMarketsHardCapResponse defines the MsgSetMarketsHardCap response */ + +export interface MsgSetMarketsHardCapResponseSDKType {} +/** + * MsgCreateMarketPermissionless is a message used to create new markets without + * going through x/gov + */ + +export interface MsgCreateMarketPermissionless { + /** The name of the `Perpetual` (e.g. `BTC-USD`). */ + ticker: string; + /** The subaccount to deposit from. */ + + subaccountId?: SubaccountId; +} +/** + * MsgCreateMarketPermissionless is a message used to create new markets without + * going through x/gov + */ + +export interface MsgCreateMarketPermissionlessSDKType { + /** The name of the `Perpetual` (e.g. `BTC-USD`). */ + ticker: string; + /** The subaccount to deposit from. */ + + subaccount_id?: SubaccountIdSDKType; +} +/** + * MsgCreateMarketPermissionlessResponse defines the + * MsgCreateMarketPermissionless response + */ + +export interface MsgCreateMarketPermissionlessResponse {} +/** + * MsgCreateMarketPermissionlessResponse defines the + * MsgCreateMarketPermissionless response + */ + +export interface MsgCreateMarketPermissionlessResponseSDKType {} +/** + * MsgSetListingVaultDepositParams is a message used to set PML megavault + * deposit params + */ + +export interface MsgSetListingVaultDepositParams { + authority: string; + /** Params which define the vault deposit for market listing */ + + params?: ListingVaultDepositParams; +} +/** + * MsgSetListingVaultDepositParams is a message used to set PML megavault + * deposit params + */ + +export interface MsgSetListingVaultDepositParamsSDKType { + authority: string; + /** Params which define the vault deposit for market listing */ + + params?: ListingVaultDepositParamsSDKType; +} +/** + * MsgSetListingVaultDepositParamsResponse defines the + * MsgSetListingVaultDepositParams response + */ + +export interface MsgSetListingVaultDepositParamsResponse {} +/** + * MsgSetListingVaultDepositParamsResponse defines the + * MsgSetListingVaultDepositParams response + */ + +export interface MsgSetListingVaultDepositParamsResponseSDKType {} +/** + * MsgUpgradeIsolatedPerpetualToCross is used to upgrade a market from + * isolated margin to cross margin. + */ + +export interface MsgUpgradeIsolatedPerpetualToCross { + authority: string; + /** ID of the perpetual to be upgraded to CROSS */ + + perpetualId: number; +} +/** + * MsgUpgradeIsolatedPerpetualToCross is used to upgrade a market from + * isolated margin to cross margin. + */ + +export interface MsgUpgradeIsolatedPerpetualToCrossSDKType { + authority: string; + /** ID of the perpetual to be upgraded to CROSS */ + + perpetual_id: number; +} +/** + * MsgUpgradeIsolatedPerpetualToCrossResponse defines the + * UpgradeIsolatedPerpetualToCross response type. + */ + +export interface MsgUpgradeIsolatedPerpetualToCrossResponse {} +/** + * MsgUpgradeIsolatedPerpetualToCrossResponse defines the + * UpgradeIsolatedPerpetualToCross response type. + */ + +export interface MsgUpgradeIsolatedPerpetualToCrossResponseSDKType {} + +function createBaseMsgSetMarketsHardCap(): MsgSetMarketsHardCap { + return { + authority: "", + hardCapForMarkets: 0 + }; +} + +export const MsgSetMarketsHardCap = { + encode(message: MsgSetMarketsHardCap, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authority !== "") { + writer.uint32(10).string(message.authority); + } + + if (message.hardCapForMarkets !== 0) { + writer.uint32(16).uint32(message.hardCapForMarkets); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSetMarketsHardCap { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSetMarketsHardCap(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.authority = reader.string(); + break; + + case 2: + message.hardCapForMarkets = reader.uint32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgSetMarketsHardCap { + const message = createBaseMsgSetMarketsHardCap(); + message.authority = object.authority ?? ""; + message.hardCapForMarkets = object.hardCapForMarkets ?? 0; + return message; + } + +}; + +function createBaseMsgSetMarketsHardCapResponse(): MsgSetMarketsHardCapResponse { + return {}; +} + +export const MsgSetMarketsHardCapResponse = { + encode(_: MsgSetMarketsHardCapResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSetMarketsHardCapResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSetMarketsHardCapResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgSetMarketsHardCapResponse { + const message = createBaseMsgSetMarketsHardCapResponse(); + return message; + } + +}; + +function createBaseMsgCreateMarketPermissionless(): MsgCreateMarketPermissionless { + return { + ticker: "", + subaccountId: undefined + }; +} + +export const MsgCreateMarketPermissionless = { + encode(message: MsgCreateMarketPermissionless, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.ticker !== "") { + writer.uint32(10).string(message.ticker); + } + + if (message.subaccountId !== undefined) { + SubaccountId.encode(message.subaccountId, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateMarketPermissionless { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreateMarketPermissionless(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.ticker = reader.string(); + break; + + case 2: + message.subaccountId = SubaccountId.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgCreateMarketPermissionless { + const message = createBaseMsgCreateMarketPermissionless(); + message.ticker = object.ticker ?? ""; + message.subaccountId = object.subaccountId !== undefined && object.subaccountId !== null ? SubaccountId.fromPartial(object.subaccountId) : undefined; + return message; + } + +}; + +function createBaseMsgCreateMarketPermissionlessResponse(): MsgCreateMarketPermissionlessResponse { + return {}; +} + +export const MsgCreateMarketPermissionlessResponse = { + encode(_: MsgCreateMarketPermissionlessResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateMarketPermissionlessResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreateMarketPermissionlessResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgCreateMarketPermissionlessResponse { + const message = createBaseMsgCreateMarketPermissionlessResponse(); + return message; + } + +}; + +function createBaseMsgSetListingVaultDepositParams(): MsgSetListingVaultDepositParams { + return { + authority: "", + params: undefined + }; +} + +export const MsgSetListingVaultDepositParams = { + encode(message: MsgSetListingVaultDepositParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authority !== "") { + writer.uint32(10).string(message.authority); + } + + if (message.params !== undefined) { + ListingVaultDepositParams.encode(message.params, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSetListingVaultDepositParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSetListingVaultDepositParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.authority = reader.string(); + break; + + case 2: + message.params = ListingVaultDepositParams.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgSetListingVaultDepositParams { + const message = createBaseMsgSetListingVaultDepositParams(); + message.authority = object.authority ?? ""; + message.params = object.params !== undefined && object.params !== null ? ListingVaultDepositParams.fromPartial(object.params) : undefined; + return message; + } + +}; + +function createBaseMsgSetListingVaultDepositParamsResponse(): MsgSetListingVaultDepositParamsResponse { + return {}; +} + +export const MsgSetListingVaultDepositParamsResponse = { + encode(_: MsgSetListingVaultDepositParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSetListingVaultDepositParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSetListingVaultDepositParamsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgSetListingVaultDepositParamsResponse { + const message = createBaseMsgSetListingVaultDepositParamsResponse(); + return message; + } + +}; + +function createBaseMsgUpgradeIsolatedPerpetualToCross(): MsgUpgradeIsolatedPerpetualToCross { + return { + authority: "", + perpetualId: 0 + }; +} + +export const MsgUpgradeIsolatedPerpetualToCross = { + encode(message: MsgUpgradeIsolatedPerpetualToCross, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authority !== "") { + writer.uint32(10).string(message.authority); + } + + if (message.perpetualId !== 0) { + writer.uint32(16).uint32(message.perpetualId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpgradeIsolatedPerpetualToCross { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpgradeIsolatedPerpetualToCross(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.authority = reader.string(); + break; + + case 2: + message.perpetualId = reader.uint32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgUpgradeIsolatedPerpetualToCross { + const message = createBaseMsgUpgradeIsolatedPerpetualToCross(); + message.authority = object.authority ?? ""; + message.perpetualId = object.perpetualId ?? 0; + return message; + } + +}; + +function createBaseMsgUpgradeIsolatedPerpetualToCrossResponse(): MsgUpgradeIsolatedPerpetualToCrossResponse { + return {}; +} + +export const MsgUpgradeIsolatedPerpetualToCrossResponse = { + encode(_: MsgUpgradeIsolatedPerpetualToCrossResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpgradeIsolatedPerpetualToCrossResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpgradeIsolatedPerpetualToCrossResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgUpgradeIsolatedPerpetualToCrossResponse { + const message = createBaseMsgUpgradeIsolatedPerpetualToCrossResponse(); + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/perpetuals/perpetual.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/perpetuals/perpetual.ts index 720980b76d3..edef9ef967c 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/perpetuals/perpetual.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/perpetuals/perpetual.ts @@ -115,7 +115,7 @@ export interface PerpetualParams { atomicResolution: number; /** - * The default funding payment if there is no price premium. In + * The default (8hr) funding payment if there is no price premium. In * parts-per-million. */ @@ -154,7 +154,7 @@ export interface PerpetualParamsSDKType { atomic_resolution: number; /** - * The default funding payment if there is no price premium. In + * The default (8hr) funding payment if there is no price premium. In * parts-per-million. */ diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/perpetuals/query.lcd.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/perpetuals/query.lcd.ts index bf75ee98d49..a0bdc49c4bd 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/perpetuals/query.lcd.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/perpetuals/query.lcd.ts @@ -1,6 +1,6 @@ import { setPaginationParams } from "../../helpers"; import { LCDClient } from "@osmonauts/lcd"; -import { QueryPerpetualRequest, QueryPerpetualResponseSDKType, QueryAllPerpetualsRequest, QueryAllPerpetualsResponseSDKType, QueryAllLiquidityTiersRequest, QueryAllLiquidityTiersResponseSDKType, QueryPremiumVotesRequest, QueryPremiumVotesResponseSDKType, QueryPremiumSamplesRequest, QueryPremiumSamplesResponseSDKType, QueryParamsRequest, QueryParamsResponseSDKType } from "./query"; +import { QueryPerpetualRequest, QueryPerpetualResponseSDKType, QueryAllPerpetualsRequest, QueryAllPerpetualsResponseSDKType, QueryAllLiquidityTiersRequest, QueryAllLiquidityTiersResponseSDKType, QueryPremiumVotesRequest, QueryPremiumVotesResponseSDKType, QueryPremiumSamplesRequest, QueryPremiumSamplesResponseSDKType, QueryParamsRequest, QueryParamsResponseSDKType, QueryNextPerpetualIdRequest, QueryNextPerpetualIdResponseSDKType } from "./query"; export class LCDQueryClient { req: LCDClient; @@ -16,6 +16,7 @@ export class LCDQueryClient { this.premiumVotes = this.premiumVotes.bind(this); this.premiumSamples = this.premiumSamples.bind(this); this.params = this.params.bind(this); + this.nextPerpetualId = this.nextPerpetualId.bind(this); } /* Queries a Perpetual by id. */ @@ -79,5 +80,12 @@ export class LCDQueryClient { const endpoint = `dydxprotocol/perpetuals/params`; return await this.req.get(endpoint); } + /* Queries the next perpetual id. */ + + + async nextPerpetualId(_params: QueryNextPerpetualIdRequest = {}): Promise { + const endpoint = `dydxprotocol/perpetuals/next_perpetual_id`; + return await this.req.get(endpoint); + } } \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/perpetuals/query.rpc.Query.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/perpetuals/query.rpc.Query.ts index afd777b2df5..64c43f1b96b 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/perpetuals/query.rpc.Query.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/perpetuals/query.rpc.Query.ts @@ -1,7 +1,7 @@ import { Rpc } from "../../helpers"; import * as _m0 from "protobufjs/minimal"; import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; -import { QueryPerpetualRequest, QueryPerpetualResponse, QueryAllPerpetualsRequest, QueryAllPerpetualsResponse, QueryAllLiquidityTiersRequest, QueryAllLiquidityTiersResponse, QueryPremiumVotesRequest, QueryPremiumVotesResponse, QueryPremiumSamplesRequest, QueryPremiumSamplesResponse, QueryParamsRequest, QueryParamsResponse } from "./query"; +import { QueryPerpetualRequest, QueryPerpetualResponse, QueryAllPerpetualsRequest, QueryAllPerpetualsResponse, QueryAllLiquidityTiersRequest, QueryAllLiquidityTiersResponse, QueryPremiumVotesRequest, QueryPremiumVotesResponse, QueryPremiumSamplesRequest, QueryPremiumSamplesResponse, QueryParamsRequest, QueryParamsResponse, QueryNextPerpetualIdRequest, QueryNextPerpetualIdResponse } from "./query"; /** Query defines the gRPC querier service. */ export interface Query { @@ -22,6 +22,9 @@ export interface Query { /** Queries the perpetual params. */ params(request?: QueryParamsRequest): Promise; + /** Queries the next perpetual id. */ + + nextPerpetualId(request?: QueryNextPerpetualIdRequest): Promise; } export class QueryClientImpl implements Query { private readonly rpc: Rpc; @@ -34,6 +37,7 @@ export class QueryClientImpl implements Query { this.premiumVotes = this.premiumVotes.bind(this); this.premiumSamples = this.premiumSamples.bind(this); this.params = this.params.bind(this); + this.nextPerpetualId = this.nextPerpetualId.bind(this); } perpetual(request: QueryPerpetualRequest): Promise { @@ -76,6 +80,12 @@ export class QueryClientImpl implements Query { return promise.then(data => QueryParamsResponse.decode(new _m0.Reader(data))); } + nextPerpetualId(request: QueryNextPerpetualIdRequest = {}): Promise { + const data = QueryNextPerpetualIdRequest.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.perpetuals.Query", "NextPerpetualId", data); + return promise.then(data => QueryNextPerpetualIdResponse.decode(new _m0.Reader(data))); + } + } export const createRpcQueryExtension = (base: QueryClient) => { const rpc = createProtobufRpcClient(base); @@ -103,6 +113,10 @@ export const createRpcQueryExtension = (base: QueryClient) => { params(request?: QueryParamsRequest): Promise { return queryService.params(request); + }, + + nextPerpetualId(request?: QueryNextPerpetualIdRequest): Promise { + return queryService.nextPerpetualId(request); } }; diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/perpetuals/query.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/perpetuals/query.ts index 4dda168b08b..8fd1e273c66 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/perpetuals/query.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/perpetuals/query.ts @@ -141,6 +141,24 @@ export interface QueryParamsResponse { export interface QueryParamsResponseSDKType { params?: ParamsSDKType; } +/** QueryNextPerpetualIdRequest is the request type for the NextPerpetualId RPC */ + +export interface QueryNextPerpetualIdRequest {} +/** QueryNextPerpetualIdRequest is the request type for the NextPerpetualId RPC */ + +export interface QueryNextPerpetualIdRequestSDKType {} +/** QueryNextPerpetualIdResponse is the response type for the NextPerpetualId RPC */ + +export interface QueryNextPerpetualIdResponse { + /** QueryNextPerpetualIdResponse is the response type for the NextPerpetualId RPC */ + nextPerpetualId: number; +} +/** QueryNextPerpetualIdResponse is the response type for the NextPerpetualId RPC */ + +export interface QueryNextPerpetualIdResponseSDKType { + /** QueryNextPerpetualIdResponse is the response type for the NextPerpetualId RPC */ + next_perpetual_id: number; +} function createBaseQueryPerpetualRequest(): QueryPerpetualRequest { return { @@ -667,4 +685,83 @@ export const QueryParamsResponse = { return message; } +}; + +function createBaseQueryNextPerpetualIdRequest(): QueryNextPerpetualIdRequest { + return {}; +} + +export const QueryNextPerpetualIdRequest = { + encode(_: QueryNextPerpetualIdRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryNextPerpetualIdRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryNextPerpetualIdRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryNextPerpetualIdRequest { + const message = createBaseQueryNextPerpetualIdRequest(); + return message; + } + +}; + +function createBaseQueryNextPerpetualIdResponse(): QueryNextPerpetualIdResponse { + return { + nextPerpetualId: 0 + }; +} + +export const QueryNextPerpetualIdResponse = { + encode(message: QueryNextPerpetualIdResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.nextPerpetualId !== 0) { + writer.uint32(8).uint32(message.nextPerpetualId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryNextPerpetualIdResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryNextPerpetualIdResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.nextPerpetualId = reader.uint32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryNextPerpetualIdResponse { + const message = createBaseQueryNextPerpetualIdResponse(); + message.nextPerpetualId = object.nextPerpetualId ?? 0; + return message; + } + }; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/prices/market_param.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/prices/market_param.ts index d945e1c82fb..1bd6ccf4564 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/prices/market_param.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/prices/market_param.ts @@ -18,12 +18,18 @@ export interface MarketParam { * For example if `Exponent == -5` then a `Value` of `1,000,000,000` * represents ``$10,000`. Therefore `10 ^ Exponent` represents the smallest * price step (in dollars) that can be recorded. + * + * Deprecated since v8.x. This value is now determined from the marketmap. */ + /** @deprecated */ + exponent: number; /** * The minimum number of exchanges that should be reporting a live price for * a price update to be considered valid. + * + * Deprecated since v8.x. This value is now determined from the marketmap. */ minExchanges: number; @@ -36,6 +42,8 @@ export interface MarketParam { /** * A string of json that encodes the configuration for resolving the price * of this market on various exchanges. + * + * Deprecated since v8.x. This is now determined from the marketmap. */ exchangeConfigJson: string; @@ -58,12 +66,18 @@ export interface MarketParamSDKType { * For example if `Exponent == -5` then a `Value` of `1,000,000,000` * represents ``$10,000`. Therefore `10 ^ Exponent` represents the smallest * price step (in dollars) that can be recorded. + * + * Deprecated since v8.x. This value is now determined from the marketmap. */ + /** @deprecated */ + exponent: number; /** * The minimum number of exchanges that should be reporting a live price for * a price update to be considered valid. + * + * Deprecated since v8.x. This value is now determined from the marketmap. */ min_exchanges: number; @@ -76,6 +90,8 @@ export interface MarketParamSDKType { /** * A string of json that encodes the configuration for resolving the price * of this market on various exchanges. + * + * Deprecated since v8.x. This is now determined from the marketmap. */ exchange_config_json: string; diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/prices/market_price.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/prices/market_price.ts index e8cd1ae5967..5320c2c8011 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/prices/market_price.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/prices/market_price.ts @@ -8,6 +8,9 @@ export interface MarketPrice { /** * Static value. The exponent of the price. See the comment on the duplicate * MarketParam field for more information. + * + * As of v7.1.x, this value is determined from the marketmap instead of + * needing to match the MarketParam field. */ exponent: number; @@ -26,6 +29,9 @@ export interface MarketPriceSDKType { /** * Static value. The exponent of the price. See the comment on the duplicate * MarketParam field for more information. + * + * As of v7.1.x, this value is determined from the marketmap instead of + * needing to match the MarketParam field. */ exponent: number; diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/prices/query.lcd.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/prices/query.lcd.ts index 0f78eab48a6..aa78b042eec 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/prices/query.lcd.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/prices/query.lcd.ts @@ -1,6 +1,6 @@ import { setPaginationParams } from "../../helpers"; import { LCDClient } from "@osmonauts/lcd"; -import { QueryMarketPriceRequest, QueryMarketPriceResponseSDKType, QueryAllMarketPricesRequest, QueryAllMarketPricesResponseSDKType, QueryMarketParamRequest, QueryMarketParamResponseSDKType, QueryAllMarketParamsRequest, QueryAllMarketParamsResponseSDKType } from "./query"; +import { QueryMarketPriceRequest, QueryMarketPriceResponseSDKType, QueryAllMarketPricesRequest, QueryAllMarketPricesResponseSDKType, QueryMarketParamRequest, QueryMarketParamResponseSDKType, QueryAllMarketParamsRequest, QueryAllMarketParamsResponseSDKType, QueryNextMarketIdRequest, QueryNextMarketIdResponseSDKType } from "./query"; export class LCDQueryClient { req: LCDClient; @@ -14,6 +14,7 @@ export class LCDQueryClient { this.allMarketPrices = this.allMarketPrices.bind(this); this.marketParam = this.marketParam.bind(this); this.allMarketParams = this.allMarketParams.bind(this); + this.nextMarketId = this.nextMarketId.bind(this); } /* Queries a MarketPrice by id. */ @@ -63,5 +64,12 @@ export class LCDQueryClient { const endpoint = `dydxprotocol/prices/params/market`; return await this.req.get(endpoint, options); } + /* Queries the next market id. */ + + + async nextMarketId(_params: QueryNextMarketIdRequest = {}): Promise { + const endpoint = `dydxprotocol/prices/next_market_id`; + return await this.req.get(endpoint); + } } \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/prices/query.rpc.Query.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/prices/query.rpc.Query.ts index 2c24a6d01a3..e6b955ad291 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/prices/query.rpc.Query.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/prices/query.rpc.Query.ts @@ -1,7 +1,7 @@ import { Rpc } from "../../helpers"; import * as _m0 from "protobufjs/minimal"; import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; -import { QueryMarketPriceRequest, QueryMarketPriceResponse, QueryAllMarketPricesRequest, QueryAllMarketPricesResponse, QueryMarketParamRequest, QueryMarketParamResponse, QueryAllMarketParamsRequest, QueryAllMarketParamsResponse } from "./query"; +import { QueryMarketPriceRequest, QueryMarketPriceResponse, QueryAllMarketPricesRequest, QueryAllMarketPricesResponse, QueryMarketParamRequest, QueryMarketParamResponse, QueryAllMarketParamsRequest, QueryAllMarketParamsResponse, QueryNextMarketIdRequest, QueryNextMarketIdResponse } from "./query"; /** Query defines the gRPC querier service. */ export interface Query { @@ -16,6 +16,9 @@ export interface Query { /** Queries a list of MarketParam items. */ allMarketParams(request?: QueryAllMarketParamsRequest): Promise; + /** Queries the next market id. */ + + nextMarketId(request?: QueryNextMarketIdRequest): Promise; } export class QueryClientImpl implements Query { private readonly rpc: Rpc; @@ -26,6 +29,7 @@ export class QueryClientImpl implements Query { this.allMarketPrices = this.allMarketPrices.bind(this); this.marketParam = this.marketParam.bind(this); this.allMarketParams = this.allMarketParams.bind(this); + this.nextMarketId = this.nextMarketId.bind(this); } marketPrice(request: QueryMarketPriceRequest): Promise { @@ -56,6 +60,12 @@ export class QueryClientImpl implements Query { return promise.then(data => QueryAllMarketParamsResponse.decode(new _m0.Reader(data))); } + nextMarketId(request: QueryNextMarketIdRequest = {}): Promise { + const data = QueryNextMarketIdRequest.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.prices.Query", "NextMarketId", data); + return promise.then(data => QueryNextMarketIdResponse.decode(new _m0.Reader(data))); + } + } export const createRpcQueryExtension = (base: QueryClient) => { const rpc = createProtobufRpcClient(base); @@ -75,6 +85,10 @@ export const createRpcQueryExtension = (base: QueryClient) => { allMarketParams(request?: QueryAllMarketParamsRequest): Promise { return queryService.allMarketParams(request); + }, + + nextMarketId(request?: QueryNextMarketIdRequest): Promise { + return queryService.nextMarketId(request); } }; diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/prices/query.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/prices/query.ts index 4c7a38db92f..525f64b6b0a 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/prices/query.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/prices/query.ts @@ -151,6 +151,36 @@ export interface QueryAllMarketParamsResponseSDKType { market_params: MarketParamSDKType[]; pagination?: PageResponseSDKType; } +/** QueryNextMarketIdRequest is request type for the Query/Params `NextMarketId` */ + +export interface QueryNextMarketIdRequest {} +/** QueryNextMarketIdRequest is request type for the Query/Params `NextMarketId` */ + +export interface QueryNextMarketIdRequestSDKType {} +/** + * QueryNextMarketIdResponse is response type for the Query/Params + * `NextMarketId` + */ + +export interface QueryNextMarketIdResponse { + /** + * QueryNextMarketIdResponse is response type for the Query/Params + * `NextMarketId` + */ + nextMarketId: number; +} +/** + * QueryNextMarketIdResponse is response type for the Query/Params + * `NextMarketId` + */ + +export interface QueryNextMarketIdResponseSDKType { + /** + * QueryNextMarketIdResponse is response type for the Query/Params + * `NextMarketId` + */ + next_market_id: number; +} function createBaseQueryMarketPriceRequest(): QueryMarketPriceRequest { return { @@ -530,4 +560,83 @@ export const QueryAllMarketParamsResponse = { return message; } +}; + +function createBaseQueryNextMarketIdRequest(): QueryNextMarketIdRequest { + return {}; +} + +export const QueryNextMarketIdRequest = { + encode(_: QueryNextMarketIdRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryNextMarketIdRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryNextMarketIdRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryNextMarketIdRequest { + const message = createBaseQueryNextMarketIdRequest(); + return message; + } + +}; + +function createBaseQueryNextMarketIdResponse(): QueryNextMarketIdResponse { + return { + nextMarketId: 0 + }; +} + +export const QueryNextMarketIdResponse = { + encode(message: QueryNextMarketIdResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.nextMarketId !== 0) { + writer.uint32(8).uint32(message.nextMarketId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryNextMarketIdResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryNextMarketIdResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.nextMarketId = reader.uint32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryNextMarketIdResponse { + const message = createBaseQueryNextMarketIdResponse(); + message.nextMarketId = object.nextMarketId ?? 0; + return message; + } + }; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/prices/streaming.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/prices/streaming.ts new file mode 100644 index 00000000000..185fe8dc4f8 --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/prices/streaming.ts @@ -0,0 +1,92 @@ +import { MarketPrice, MarketPriceSDKType } from "./market_price"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** StreamPriceUpdate provides information on a price update. */ + +export interface StreamPriceUpdate { + /** The `Id` of the `Market`. */ + marketId: number; + /** The updated price. */ + + price?: MarketPrice; + /** Snapshot indicates if the response is from a snapshot of the price. */ + + snapshot: boolean; +} +/** StreamPriceUpdate provides information on a price update. */ + +export interface StreamPriceUpdateSDKType { + /** The `Id` of the `Market`. */ + market_id: number; + /** The updated price. */ + + price?: MarketPriceSDKType; + /** Snapshot indicates if the response is from a snapshot of the price. */ + + snapshot: boolean; +} + +function createBaseStreamPriceUpdate(): StreamPriceUpdate { + return { + marketId: 0, + price: undefined, + snapshot: false + }; +} + +export const StreamPriceUpdate = { + encode(message: StreamPriceUpdate, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.marketId !== 0) { + writer.uint32(8).uint32(message.marketId); + } + + if (message.price !== undefined) { + MarketPrice.encode(message.price, writer.uint32(18).fork()).ldelim(); + } + + if (message.snapshot === true) { + writer.uint32(24).bool(message.snapshot); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): StreamPriceUpdate { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStreamPriceUpdate(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.marketId = reader.uint32(); + break; + + case 2: + message.price = MarketPrice.decode(reader, reader.uint32()); + break; + + case 3: + message.snapshot = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): StreamPriceUpdate { + const message = createBaseStreamPriceUpdate(); + message.marketId = object.marketId ?? 0; + message.price = object.price !== undefined && object.price !== null ? MarketPrice.fromPartial(object.price) : undefined; + message.snapshot = object.snapshot ?? false; + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/ratelimit/pending_send_packet.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/ratelimit/pending_send_packet.ts new file mode 100644 index 00000000000..e2b8b10de95 --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/ratelimit/pending_send_packet.ts @@ -0,0 +1,75 @@ +import * as _m0 from "protobufjs/minimal"; +import { Long, DeepPartial } from "../../helpers"; +/** + * PendingSendPacket contains the channel_id and sequence pair to identify a + * pending packet + */ + +export interface PendingSendPacket { + channelId: string; + sequence: Long; +} +/** + * PendingSendPacket contains the channel_id and sequence pair to identify a + * pending packet + */ + +export interface PendingSendPacketSDKType { + channel_id: string; + sequence: Long; +} + +function createBasePendingSendPacket(): PendingSendPacket { + return { + channelId: "", + sequence: Long.UZERO + }; +} + +export const PendingSendPacket = { + encode(message: PendingSendPacket, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.channelId !== "") { + writer.uint32(10).string(message.channelId); + } + + if (!message.sequence.isZero()) { + writer.uint32(16).uint64(message.sequence); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PendingSendPacket { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePendingSendPacket(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.channelId = reader.string(); + break; + + case 2: + message.sequence = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PendingSendPacket { + const message = createBasePendingSendPacket(); + message.channelId = object.channelId ?? ""; + message.sequence = object.sequence !== undefined && object.sequence !== null ? Long.fromValue(object.sequence) : Long.UZERO; + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/ratelimit/query.lcd.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/ratelimit/query.lcd.ts index 88809acdffc..24d3f27edc0 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/ratelimit/query.lcd.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/ratelimit/query.lcd.ts @@ -1,5 +1,5 @@ import { LCDClient } from "@osmonauts/lcd"; -import { ListLimitParamsRequest, ListLimitParamsResponseSDKType, QueryCapacityByDenomRequest, QueryCapacityByDenomResponseSDKType } from "./query"; +import { ListLimitParamsRequest, ListLimitParamsResponseSDKType, QueryCapacityByDenomRequest, QueryCapacityByDenomResponseSDKType, QueryAllPendingSendPacketsRequest, QueryAllPendingSendPacketsResponseSDKType } from "./query"; export class LCDQueryClient { req: LCDClient; @@ -11,6 +11,7 @@ export class LCDQueryClient { this.req = requestClient; this.listLimitParams = this.listLimitParams.bind(this); this.capacityByDenom = this.capacityByDenom.bind(this); + this.allPendingSendPackets = this.allPendingSendPackets.bind(this); } /* List all limit params. */ @@ -34,5 +35,12 @@ export class LCDQueryClient { const endpoint = `dydxprotocol/v4/ratelimit/capacity_by_denom`; return await this.req.get(endpoint, options); } + /* Get all pending send packets */ + + + async allPendingSendPackets(_params: QueryAllPendingSendPacketsRequest = {}): Promise { + const endpoint = `dydxprotocol/v4/ratelimit/get_all_pending_send_packet`; + return await this.req.get(endpoint); + } } \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/ratelimit/query.rpc.Query.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/ratelimit/query.rpc.Query.ts index c617aa46f40..70cf931a869 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/ratelimit/query.rpc.Query.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/ratelimit/query.rpc.Query.ts @@ -1,7 +1,7 @@ import { Rpc } from "../../helpers"; import * as _m0 from "protobufjs/minimal"; import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; -import { ListLimitParamsRequest, ListLimitParamsResponse, QueryCapacityByDenomRequest, QueryCapacityByDenomResponse } from "./query"; +import { ListLimitParamsRequest, ListLimitParamsResponse, QueryCapacityByDenomRequest, QueryCapacityByDenomResponse, QueryAllPendingSendPacketsRequest, QueryAllPendingSendPacketsResponse } from "./query"; /** Query defines the gRPC querier service. */ export interface Query { @@ -10,6 +10,9 @@ export interface Query { /** Query capacity by denom. */ capacityByDenom(request: QueryCapacityByDenomRequest): Promise; + /** Get all pending send packets */ + + allPendingSendPackets(request?: QueryAllPendingSendPacketsRequest): Promise; } export class QueryClientImpl implements Query { private readonly rpc: Rpc; @@ -18,6 +21,7 @@ export class QueryClientImpl implements Query { this.rpc = rpc; this.listLimitParams = this.listLimitParams.bind(this); this.capacityByDenom = this.capacityByDenom.bind(this); + this.allPendingSendPackets = this.allPendingSendPackets.bind(this); } listLimitParams(request: ListLimitParamsRequest = {}): Promise { @@ -32,6 +36,12 @@ export class QueryClientImpl implements Query { return promise.then(data => QueryCapacityByDenomResponse.decode(new _m0.Reader(data))); } + allPendingSendPackets(request: QueryAllPendingSendPacketsRequest = {}): Promise { + const data = QueryAllPendingSendPacketsRequest.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.ratelimit.Query", "AllPendingSendPackets", data); + return promise.then(data => QueryAllPendingSendPacketsResponse.decode(new _m0.Reader(data))); + } + } export const createRpcQueryExtension = (base: QueryClient) => { const rpc = createProtobufRpcClient(base); @@ -43,6 +53,10 @@ export const createRpcQueryExtension = (base: QueryClient) => { capacityByDenom(request: QueryCapacityByDenomRequest): Promise { return queryService.capacityByDenom(request); + }, + + allPendingSendPackets(request?: QueryAllPendingSendPacketsRequest): Promise { + return queryService.allPendingSendPackets(request); } }; diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/ratelimit/query.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/ratelimit/query.ts index 82743479398..32846a68058 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/ratelimit/query.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/ratelimit/query.ts @@ -1,5 +1,6 @@ import { LimitParams, LimitParamsSDKType } from "./limit_params"; import { LimiterCapacity, LimiterCapacitySDKType } from "./capacity"; +import { PendingSendPacket, PendingSendPacketSDKType } from "./pending_send_packet"; import * as _m0 from "protobufjs/minimal"; import { DeepPartial } from "../../helpers"; /** ListLimitParamsRequest is a request type of the ListLimitParams RPC method. */ @@ -58,6 +59,34 @@ export interface QueryCapacityByDenomResponse { export interface QueryCapacityByDenomResponseSDKType { limiter_capacity_list: LimiterCapacitySDKType[]; } +/** + * QueryAllPendingSendPacketsRequest is a request type for the + * AllPendingSendPackets RPC + */ + +export interface QueryAllPendingSendPacketsRequest {} +/** + * QueryAllPendingSendPacketsRequest is a request type for the + * AllPendingSendPackets RPC + */ + +export interface QueryAllPendingSendPacketsRequestSDKType {} +/** + * QueryAllPendingSendPacketsResponse is a response type of the + * AllPendingSendPackets RPC + */ + +export interface QueryAllPendingSendPacketsResponse { + pendingSendPackets: PendingSendPacket[]; +} +/** + * QueryAllPendingSendPacketsResponse is a response type of the + * AllPendingSendPackets RPC + */ + +export interface QueryAllPendingSendPacketsResponseSDKType { + pending_send_packets: PendingSendPacketSDKType[]; +} function createBaseListLimitParamsRequest(): ListLimitParamsRequest { return {}; @@ -226,4 +255,83 @@ export const QueryCapacityByDenomResponse = { return message; } +}; + +function createBaseQueryAllPendingSendPacketsRequest(): QueryAllPendingSendPacketsRequest { + return {}; +} + +export const QueryAllPendingSendPacketsRequest = { + encode(_: QueryAllPendingSendPacketsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllPendingSendPacketsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllPendingSendPacketsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryAllPendingSendPacketsRequest { + const message = createBaseQueryAllPendingSendPacketsRequest(); + return message; + } + +}; + +function createBaseQueryAllPendingSendPacketsResponse(): QueryAllPendingSendPacketsResponse { + return { + pendingSendPackets: [] + }; +} + +export const QueryAllPendingSendPacketsResponse = { + encode(message: QueryAllPendingSendPacketsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.pendingSendPackets) { + PendingSendPacket.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllPendingSendPacketsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllPendingSendPacketsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.pendingSendPackets.push(PendingSendPacket.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryAllPendingSendPacketsResponse { + const message = createBaseQueryAllPendingSendPacketsResponse(); + message.pendingSendPackets = object.pendingSendPackets?.map(e => PendingSendPacket.fromPartial(e)) || []; + return message; + } + }; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/revshare/genesis.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/revshare/genesis.ts new file mode 100644 index 00000000000..68e61a51190 --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/revshare/genesis.ts @@ -0,0 +1,58 @@ +import { MarketMapperRevenueShareParams, MarketMapperRevenueShareParamsSDKType } from "./params"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** GenesisState defines `x/revshare`'s genesis state. */ + +export interface GenesisState { + params?: MarketMapperRevenueShareParams; +} +/** GenesisState defines `x/revshare`'s genesis state. */ + +export interface GenesisStateSDKType { + params?: MarketMapperRevenueShareParamsSDKType; +} + +function createBaseGenesisState(): GenesisState { + return { + params: undefined + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.params !== undefined) { + MarketMapperRevenueShareParams.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.params = MarketMapperRevenueShareParams.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.params = object.params !== undefined && object.params !== null ? MarketMapperRevenueShareParams.fromPartial(object.params) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/revshare/params.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/revshare/params.ts new file mode 100644 index 00000000000..7c3e8c96cae --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/revshare/params.ts @@ -0,0 +1,105 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** MarketMappeRevenueShareParams represents params for the above message */ + +export interface MarketMapperRevenueShareParams { + /** The address which will receive the revenue share payouts */ + address: string; + /** + * The fraction of the fees which will go to the above mentioned address. + * In parts-per-million + */ + + revenueSharePpm: number; + /** + * This parameter defines how many days post market initiation will the + * revenue share be applied for. After valid_days from market initiation + * the revenue share goes down to 0 + */ + + validDays: number; +} +/** MarketMappeRevenueShareParams represents params for the above message */ + +export interface MarketMapperRevenueShareParamsSDKType { + /** The address which will receive the revenue share payouts */ + address: string; + /** + * The fraction of the fees which will go to the above mentioned address. + * In parts-per-million + */ + + revenue_share_ppm: number; + /** + * This parameter defines how many days post market initiation will the + * revenue share be applied for. After valid_days from market initiation + * the revenue share goes down to 0 + */ + + valid_days: number; +} + +function createBaseMarketMapperRevenueShareParams(): MarketMapperRevenueShareParams { + return { + address: "", + revenueSharePpm: 0, + validDays: 0 + }; +} + +export const MarketMapperRevenueShareParams = { + encode(message: MarketMapperRevenueShareParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (message.revenueSharePpm !== 0) { + writer.uint32(16).uint32(message.revenueSharePpm); + } + + if (message.validDays !== 0) { + writer.uint32(24).uint32(message.validDays); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MarketMapperRevenueShareParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMarketMapperRevenueShareParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.revenueSharePpm = reader.uint32(); + break; + + case 3: + message.validDays = reader.uint32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MarketMapperRevenueShareParams { + const message = createBaseMarketMapperRevenueShareParams(); + message.address = object.address ?? ""; + message.revenueSharePpm = object.revenueSharePpm ?? 0; + message.validDays = object.validDays ?? 0; + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/revshare/query.lcd.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/revshare/query.lcd.ts new file mode 100644 index 00000000000..7f45b52cc1f --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/revshare/query.lcd.ts @@ -0,0 +1,39 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryMarketMapperRevenueShareParams, QueryMarketMapperRevenueShareParamsResponseSDKType, QueryMarketMapperRevShareDetails, QueryMarketMapperRevShareDetailsResponseSDKType, QueryUnconditionalRevShareConfig, QueryUnconditionalRevShareConfigResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.marketMapperRevenueShareParams = this.marketMapperRevenueShareParams.bind(this); + this.marketMapperRevShareDetails = this.marketMapperRevShareDetails.bind(this); + this.unconditionalRevShareConfig = this.unconditionalRevShareConfig.bind(this); + } + /* MarketMapperRevenueShareParams queries the revenue share params for the + market mapper */ + + + async marketMapperRevenueShareParams(_params: QueryMarketMapperRevenueShareParams = {}): Promise { + const endpoint = `dydxprotocol/revshare/market_mapper_rev_share_params`; + return await this.req.get(endpoint); + } + /* Queries market mapper revenue share details for a specific market */ + + + async marketMapperRevShareDetails(params: QueryMarketMapperRevShareDetails): Promise { + const endpoint = `dydxprotocol/revshare/market_mapper_rev_share_details/${params.marketId}`; + return await this.req.get(endpoint); + } + /* Queries unconditional revenue share config */ + + + async unconditionalRevShareConfig(_params: QueryUnconditionalRevShareConfig = {}): Promise { + const endpoint = `dydxprotocol/revshare/unconditional_rev_share`; + return await this.req.get(endpoint); + } + +} \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/revshare/query.rpc.Query.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/revshare/query.rpc.Query.ts new file mode 100644 index 00000000000..633f6fc0151 --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/revshare/query.rpc.Query.ts @@ -0,0 +1,66 @@ +import { Rpc } from "../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryMarketMapperRevenueShareParams, QueryMarketMapperRevenueShareParamsResponse, QueryMarketMapperRevShareDetails, QueryMarketMapperRevShareDetailsResponse, QueryUnconditionalRevShareConfig, QueryUnconditionalRevShareConfigResponse } from "./query"; +/** Query defines the gRPC querier service. */ + +export interface Query { + /** + * MarketMapperRevenueShareParams queries the revenue share params for the + * market mapper + */ + marketMapperRevenueShareParams(request?: QueryMarketMapperRevenueShareParams): Promise; + /** Queries market mapper revenue share details for a specific market */ + + marketMapperRevShareDetails(request: QueryMarketMapperRevShareDetails): Promise; + /** Queries unconditional revenue share config */ + + unconditionalRevShareConfig(request?: QueryUnconditionalRevShareConfig): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.marketMapperRevenueShareParams = this.marketMapperRevenueShareParams.bind(this); + this.marketMapperRevShareDetails = this.marketMapperRevShareDetails.bind(this); + this.unconditionalRevShareConfig = this.unconditionalRevShareConfig.bind(this); + } + + marketMapperRevenueShareParams(request: QueryMarketMapperRevenueShareParams = {}): Promise { + const data = QueryMarketMapperRevenueShareParams.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.revshare.Query", "MarketMapperRevenueShareParams", data); + return promise.then(data => QueryMarketMapperRevenueShareParamsResponse.decode(new _m0.Reader(data))); + } + + marketMapperRevShareDetails(request: QueryMarketMapperRevShareDetails): Promise { + const data = QueryMarketMapperRevShareDetails.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.revshare.Query", "MarketMapperRevShareDetails", data); + return promise.then(data => QueryMarketMapperRevShareDetailsResponse.decode(new _m0.Reader(data))); + } + + unconditionalRevShareConfig(request: QueryUnconditionalRevShareConfig = {}): Promise { + const data = QueryUnconditionalRevShareConfig.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.revshare.Query", "UnconditionalRevShareConfig", data); + return promise.then(data => QueryUnconditionalRevShareConfigResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + marketMapperRevenueShareParams(request?: QueryMarketMapperRevenueShareParams): Promise { + return queryService.marketMapperRevenueShareParams(request); + }, + + marketMapperRevShareDetails(request: QueryMarketMapperRevShareDetails): Promise { + return queryService.marketMapperRevShareDetails(request); + }, + + unconditionalRevShareConfig(request?: QueryUnconditionalRevShareConfig): Promise { + return queryService.unconditionalRevShareConfig(request); + } + + }; +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/revshare/query.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/revshare/query.ts new file mode 100644 index 00000000000..e729da38cb2 --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/revshare/query.ts @@ -0,0 +1,306 @@ +import { MarketMapperRevenueShareParams, MarketMapperRevenueShareParamsSDKType } from "./params"; +import { MarketMapperRevShareDetails, MarketMapperRevShareDetailsSDKType, UnconditionalRevShareConfig, UnconditionalRevShareConfigSDKType } from "./revshare"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** Queries for the default market mapper revenue share params */ + +export interface QueryMarketMapperRevenueShareParams {} +/** Queries for the default market mapper revenue share params */ + +export interface QueryMarketMapperRevenueShareParamsSDKType {} +/** Response type for QueryMarketMapperRevenueShareParams */ + +export interface QueryMarketMapperRevenueShareParamsResponse { + params?: MarketMapperRevenueShareParams; +} +/** Response type for QueryMarketMapperRevenueShareParams */ + +export interface QueryMarketMapperRevenueShareParamsResponseSDKType { + params?: MarketMapperRevenueShareParamsSDKType; +} +/** Queries market mapper revenue share details for a specific market */ + +export interface QueryMarketMapperRevShareDetails { + /** Queries market mapper revenue share details for a specific market */ + marketId: number; +} +/** Queries market mapper revenue share details for a specific market */ + +export interface QueryMarketMapperRevShareDetailsSDKType { + /** Queries market mapper revenue share details for a specific market */ + market_id: number; +} +/** Response type for QueryMarketMapperRevShareDetails */ + +export interface QueryMarketMapperRevShareDetailsResponse { + details?: MarketMapperRevShareDetails; +} +/** Response type for QueryMarketMapperRevShareDetails */ + +export interface QueryMarketMapperRevShareDetailsResponseSDKType { + details?: MarketMapperRevShareDetailsSDKType; +} +/** Queries unconditional revenue share details */ + +export interface QueryUnconditionalRevShareConfig {} +/** Queries unconditional revenue share details */ + +export interface QueryUnconditionalRevShareConfigSDKType {} +/** Response type for QueryUnconditionalRevShareConfig */ + +export interface QueryUnconditionalRevShareConfigResponse { + config?: UnconditionalRevShareConfig; +} +/** Response type for QueryUnconditionalRevShareConfig */ + +export interface QueryUnconditionalRevShareConfigResponseSDKType { + config?: UnconditionalRevShareConfigSDKType; +} + +function createBaseQueryMarketMapperRevenueShareParams(): QueryMarketMapperRevenueShareParams { + return {}; +} + +export const QueryMarketMapperRevenueShareParams = { + encode(_: QueryMarketMapperRevenueShareParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryMarketMapperRevenueShareParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryMarketMapperRevenueShareParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryMarketMapperRevenueShareParams { + const message = createBaseQueryMarketMapperRevenueShareParams(); + return message; + } + +}; + +function createBaseQueryMarketMapperRevenueShareParamsResponse(): QueryMarketMapperRevenueShareParamsResponse { + return { + params: undefined + }; +} + +export const QueryMarketMapperRevenueShareParamsResponse = { + encode(message: QueryMarketMapperRevenueShareParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.params !== undefined) { + MarketMapperRevenueShareParams.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryMarketMapperRevenueShareParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryMarketMapperRevenueShareParamsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.params = MarketMapperRevenueShareParams.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryMarketMapperRevenueShareParamsResponse { + const message = createBaseQueryMarketMapperRevenueShareParamsResponse(); + message.params = object.params !== undefined && object.params !== null ? MarketMapperRevenueShareParams.fromPartial(object.params) : undefined; + return message; + } + +}; + +function createBaseQueryMarketMapperRevShareDetails(): QueryMarketMapperRevShareDetails { + return { + marketId: 0 + }; +} + +export const QueryMarketMapperRevShareDetails = { + encode(message: QueryMarketMapperRevShareDetails, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.marketId !== 0) { + writer.uint32(8).uint32(message.marketId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryMarketMapperRevShareDetails { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryMarketMapperRevShareDetails(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.marketId = reader.uint32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryMarketMapperRevShareDetails { + const message = createBaseQueryMarketMapperRevShareDetails(); + message.marketId = object.marketId ?? 0; + return message; + } + +}; + +function createBaseQueryMarketMapperRevShareDetailsResponse(): QueryMarketMapperRevShareDetailsResponse { + return { + details: undefined + }; +} + +export const QueryMarketMapperRevShareDetailsResponse = { + encode(message: QueryMarketMapperRevShareDetailsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.details !== undefined) { + MarketMapperRevShareDetails.encode(message.details, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryMarketMapperRevShareDetailsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryMarketMapperRevShareDetailsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.details = MarketMapperRevShareDetails.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryMarketMapperRevShareDetailsResponse { + const message = createBaseQueryMarketMapperRevShareDetailsResponse(); + message.details = object.details !== undefined && object.details !== null ? MarketMapperRevShareDetails.fromPartial(object.details) : undefined; + return message; + } + +}; + +function createBaseQueryUnconditionalRevShareConfig(): QueryUnconditionalRevShareConfig { + return {}; +} + +export const QueryUnconditionalRevShareConfig = { + encode(_: QueryUnconditionalRevShareConfig, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUnconditionalRevShareConfig { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryUnconditionalRevShareConfig(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryUnconditionalRevShareConfig { + const message = createBaseQueryUnconditionalRevShareConfig(); + return message; + } + +}; + +function createBaseQueryUnconditionalRevShareConfigResponse(): QueryUnconditionalRevShareConfigResponse { + return { + config: undefined + }; +} + +export const QueryUnconditionalRevShareConfigResponse = { + encode(message: QueryUnconditionalRevShareConfigResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.config !== undefined) { + UnconditionalRevShareConfig.encode(message.config, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUnconditionalRevShareConfigResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryUnconditionalRevShareConfigResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.config = UnconditionalRevShareConfig.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryUnconditionalRevShareConfigResponse { + const message = createBaseQueryUnconditionalRevShareConfigResponse(); + message.config = object.config !== undefined && object.config !== null ? UnconditionalRevShareConfig.fromPartial(object.config) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/revshare/revshare.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/revshare/revshare.ts new file mode 100644 index 00000000000..7eb60f47007 --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/revshare/revshare.ts @@ -0,0 +1,201 @@ +import * as _m0 from "protobufjs/minimal"; +import { Long, DeepPartial } from "../../helpers"; +/** + * MarketMapperRevShareDetails specifies any details associated with the market + * mapper revenue share + */ + +export interface MarketMapperRevShareDetails { + /** Unix timestamp recorded when the market revenue share expires */ + expirationTs: Long; +} +/** + * MarketMapperRevShareDetails specifies any details associated with the market + * mapper revenue share + */ + +export interface MarketMapperRevShareDetailsSDKType { + /** Unix timestamp recorded when the market revenue share expires */ + expiration_ts: Long; +} +/** + * UnconditionalRevShareConfig stores recipients that + * receive a share of net revenue unconditionally. + */ + +export interface UnconditionalRevShareConfig { + /** Configs for each recipient. */ + configs: UnconditionalRevShareConfig_RecipientConfig[]; +} +/** + * UnconditionalRevShareConfig stores recipients that + * receive a share of net revenue unconditionally. + */ + +export interface UnconditionalRevShareConfigSDKType { + /** Configs for each recipient. */ + configs: UnconditionalRevShareConfig_RecipientConfigSDKType[]; +} +/** Describes the config of a recipient */ + +export interface UnconditionalRevShareConfig_RecipientConfig { + /** Address of the recepient. */ + address: string; + /** Percentage of net revenue to share with recipient, in parts-per-million. */ + + sharePpm: number; +} +/** Describes the config of a recipient */ + +export interface UnconditionalRevShareConfig_RecipientConfigSDKType { + /** Address of the recepient. */ + address: string; + /** Percentage of net revenue to share with recipient, in parts-per-million. */ + + share_ppm: number; +} + +function createBaseMarketMapperRevShareDetails(): MarketMapperRevShareDetails { + return { + expirationTs: Long.UZERO + }; +} + +export const MarketMapperRevShareDetails = { + encode(message: MarketMapperRevShareDetails, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.expirationTs.isZero()) { + writer.uint32(8).uint64(message.expirationTs); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MarketMapperRevShareDetails { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMarketMapperRevShareDetails(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.expirationTs = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MarketMapperRevShareDetails { + const message = createBaseMarketMapperRevShareDetails(); + message.expirationTs = object.expirationTs !== undefined && object.expirationTs !== null ? Long.fromValue(object.expirationTs) : Long.UZERO; + return message; + } + +}; + +function createBaseUnconditionalRevShareConfig(): UnconditionalRevShareConfig { + return { + configs: [] + }; +} + +export const UnconditionalRevShareConfig = { + encode(message: UnconditionalRevShareConfig, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.configs) { + UnconditionalRevShareConfig_RecipientConfig.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UnconditionalRevShareConfig { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUnconditionalRevShareConfig(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.configs.push(UnconditionalRevShareConfig_RecipientConfig.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): UnconditionalRevShareConfig { + const message = createBaseUnconditionalRevShareConfig(); + message.configs = object.configs?.map(e => UnconditionalRevShareConfig_RecipientConfig.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseUnconditionalRevShareConfig_RecipientConfig(): UnconditionalRevShareConfig_RecipientConfig { + return { + address: "", + sharePpm: 0 + }; +} + +export const UnconditionalRevShareConfig_RecipientConfig = { + encode(message: UnconditionalRevShareConfig_RecipientConfig, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (message.sharePpm !== 0) { + writer.uint32(16).uint32(message.sharePpm); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UnconditionalRevShareConfig_RecipientConfig { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUnconditionalRevShareConfig_RecipientConfig(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.sharePpm = reader.uint32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): UnconditionalRevShareConfig_RecipientConfig { + const message = createBaseUnconditionalRevShareConfig_RecipientConfig(); + message.address = object.address ?? ""; + message.sharePpm = object.sharePpm ?? 0; + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/revshare/tx.rpc.msg.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/revshare/tx.rpc.msg.ts new file mode 100644 index 00000000000..e530b096bcb --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/revshare/tx.rpc.msg.ts @@ -0,0 +1,50 @@ +import { Rpc } from "../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { MsgSetMarketMapperRevenueShare, MsgSetMarketMapperRevenueShareResponse, MsgSetMarketMapperRevShareDetailsForMarket, MsgSetMarketMapperRevShareDetailsForMarketResponse, MsgUpdateUnconditionalRevShareConfig, MsgUpdateUnconditionalRevShareConfigResponse } from "./tx"; +/** Msg defines the Msg service. */ + +export interface Msg { + /** + * SetMarketMapperRevenueShare sets the revenue share for a market + * mapper. + */ + setMarketMapperRevenueShare(request: MsgSetMarketMapperRevenueShare): Promise; + /** + * SetMarketMapperRevenueShareDetails sets the revenue share details for a + * market mapper. + */ + + setMarketMapperRevShareDetailsForMarket(request: MsgSetMarketMapperRevShareDetailsForMarket): Promise; + /** UpdateUnconditionalRevShareConfig sets the unconditional revshare config */ + + updateUnconditionalRevShareConfig(request: MsgUpdateUnconditionalRevShareConfig): Promise; +} +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.setMarketMapperRevenueShare = this.setMarketMapperRevenueShare.bind(this); + this.setMarketMapperRevShareDetailsForMarket = this.setMarketMapperRevShareDetailsForMarket.bind(this); + this.updateUnconditionalRevShareConfig = this.updateUnconditionalRevShareConfig.bind(this); + } + + setMarketMapperRevenueShare(request: MsgSetMarketMapperRevenueShare): Promise { + const data = MsgSetMarketMapperRevenueShare.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.revshare.Msg", "SetMarketMapperRevenueShare", data); + return promise.then(data => MsgSetMarketMapperRevenueShareResponse.decode(new _m0.Reader(data))); + } + + setMarketMapperRevShareDetailsForMarket(request: MsgSetMarketMapperRevShareDetailsForMarket): Promise { + const data = MsgSetMarketMapperRevShareDetailsForMarket.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.revshare.Msg", "SetMarketMapperRevShareDetailsForMarket", data); + return promise.then(data => MsgSetMarketMapperRevShareDetailsForMarketResponse.decode(new _m0.Reader(data))); + } + + updateUnconditionalRevShareConfig(request: MsgUpdateUnconditionalRevShareConfig): Promise { + const data = MsgUpdateUnconditionalRevShareConfig.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.revshare.Msg", "UpdateUnconditionalRevShareConfig", data); + return promise.then(data => MsgUpdateUnconditionalRevShareConfigResponse.decode(new _m0.Reader(data))); + } + +} \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/revshare/tx.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/revshare/tx.ts new file mode 100644 index 00000000000..1a0195d71bb --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/revshare/tx.ts @@ -0,0 +1,361 @@ +import { MarketMapperRevenueShareParams, MarketMapperRevenueShareParamsSDKType } from "./params"; +import { MarketMapperRevShareDetails, MarketMapperRevShareDetailsSDKType, UnconditionalRevShareConfig, UnconditionalRevShareConfigSDKType } from "./revshare"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** Message to set the market mapper revenue share */ + +export interface MsgSetMarketMapperRevenueShare { + authority: string; + /** Parameters for the revenue share */ + + params?: MarketMapperRevenueShareParams; +} +/** Message to set the market mapper revenue share */ + +export interface MsgSetMarketMapperRevenueShareSDKType { + authority: string; + /** Parameters for the revenue share */ + + params?: MarketMapperRevenueShareParamsSDKType; +} +/** Response to a MsgSetMarketMapperRevenueShare */ + +export interface MsgSetMarketMapperRevenueShareResponse {} +/** Response to a MsgSetMarketMapperRevenueShare */ + +export interface MsgSetMarketMapperRevenueShareResponseSDKType {} +/** + * Msg to set market mapper revenue share details (e.g. expiration timestamp) + * for a specific market. To be used as an override for existing revenue share + * settings set by the MsgSetMarketMapperRevenueShare msg + */ + +export interface MsgSetMarketMapperRevShareDetailsForMarket { + authority: string; + /** The market ID for which to set the revenue share details */ + + marketId: number; + /** Parameters for the revenue share details */ + + params?: MarketMapperRevShareDetails; +} +/** + * Msg to set market mapper revenue share details (e.g. expiration timestamp) + * for a specific market. To be used as an override for existing revenue share + * settings set by the MsgSetMarketMapperRevenueShare msg + */ + +export interface MsgSetMarketMapperRevShareDetailsForMarketSDKType { + authority: string; + /** The market ID for which to set the revenue share details */ + + market_id: number; + /** Parameters for the revenue share details */ + + params?: MarketMapperRevShareDetailsSDKType; +} +/** Response to a MsgSetMarketMapperRevShareDetailsForMarket */ + +export interface MsgSetMarketMapperRevShareDetailsForMarketResponse {} +/** Response to a MsgSetMarketMapperRevShareDetailsForMarket */ + +export interface MsgSetMarketMapperRevShareDetailsForMarketResponseSDKType {} +/** Message to update the unconditional revenue share config. */ + +export interface MsgUpdateUnconditionalRevShareConfig { + authority: string; + /** The config to update. */ + + config?: UnconditionalRevShareConfig; +} +/** Message to update the unconditional revenue share config. */ + +export interface MsgUpdateUnconditionalRevShareConfigSDKType { + authority: string; + /** The config to update. */ + + config?: UnconditionalRevShareConfigSDKType; +} +/** Response to MsgUpdateUnconditionalRevShareConfig */ + +export interface MsgUpdateUnconditionalRevShareConfigResponse {} +/** Response to MsgUpdateUnconditionalRevShareConfig */ + +export interface MsgUpdateUnconditionalRevShareConfigResponseSDKType {} + +function createBaseMsgSetMarketMapperRevenueShare(): MsgSetMarketMapperRevenueShare { + return { + authority: "", + params: undefined + }; +} + +export const MsgSetMarketMapperRevenueShare = { + encode(message: MsgSetMarketMapperRevenueShare, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authority !== "") { + writer.uint32(10).string(message.authority); + } + + if (message.params !== undefined) { + MarketMapperRevenueShareParams.encode(message.params, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSetMarketMapperRevenueShare { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSetMarketMapperRevenueShare(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.authority = reader.string(); + break; + + case 2: + message.params = MarketMapperRevenueShareParams.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgSetMarketMapperRevenueShare { + const message = createBaseMsgSetMarketMapperRevenueShare(); + message.authority = object.authority ?? ""; + message.params = object.params !== undefined && object.params !== null ? MarketMapperRevenueShareParams.fromPartial(object.params) : undefined; + return message; + } + +}; + +function createBaseMsgSetMarketMapperRevenueShareResponse(): MsgSetMarketMapperRevenueShareResponse { + return {}; +} + +export const MsgSetMarketMapperRevenueShareResponse = { + encode(_: MsgSetMarketMapperRevenueShareResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSetMarketMapperRevenueShareResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSetMarketMapperRevenueShareResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgSetMarketMapperRevenueShareResponse { + const message = createBaseMsgSetMarketMapperRevenueShareResponse(); + return message; + } + +}; + +function createBaseMsgSetMarketMapperRevShareDetailsForMarket(): MsgSetMarketMapperRevShareDetailsForMarket { + return { + authority: "", + marketId: 0, + params: undefined + }; +} + +export const MsgSetMarketMapperRevShareDetailsForMarket = { + encode(message: MsgSetMarketMapperRevShareDetailsForMarket, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authority !== "") { + writer.uint32(10).string(message.authority); + } + + if (message.marketId !== 0) { + writer.uint32(16).uint32(message.marketId); + } + + if (message.params !== undefined) { + MarketMapperRevShareDetails.encode(message.params, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSetMarketMapperRevShareDetailsForMarket { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSetMarketMapperRevShareDetailsForMarket(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.authority = reader.string(); + break; + + case 2: + message.marketId = reader.uint32(); + break; + + case 3: + message.params = MarketMapperRevShareDetails.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgSetMarketMapperRevShareDetailsForMarket { + const message = createBaseMsgSetMarketMapperRevShareDetailsForMarket(); + message.authority = object.authority ?? ""; + message.marketId = object.marketId ?? 0; + message.params = object.params !== undefined && object.params !== null ? MarketMapperRevShareDetails.fromPartial(object.params) : undefined; + return message; + } + +}; + +function createBaseMsgSetMarketMapperRevShareDetailsForMarketResponse(): MsgSetMarketMapperRevShareDetailsForMarketResponse { + return {}; +} + +export const MsgSetMarketMapperRevShareDetailsForMarketResponse = { + encode(_: MsgSetMarketMapperRevShareDetailsForMarketResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSetMarketMapperRevShareDetailsForMarketResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSetMarketMapperRevShareDetailsForMarketResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgSetMarketMapperRevShareDetailsForMarketResponse { + const message = createBaseMsgSetMarketMapperRevShareDetailsForMarketResponse(); + return message; + } + +}; + +function createBaseMsgUpdateUnconditionalRevShareConfig(): MsgUpdateUnconditionalRevShareConfig { + return { + authority: "", + config: undefined + }; +} + +export const MsgUpdateUnconditionalRevShareConfig = { + encode(message: MsgUpdateUnconditionalRevShareConfig, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authority !== "") { + writer.uint32(10).string(message.authority); + } + + if (message.config !== undefined) { + UnconditionalRevShareConfig.encode(message.config, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateUnconditionalRevShareConfig { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateUnconditionalRevShareConfig(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.authority = reader.string(); + break; + + case 2: + message.config = UnconditionalRevShareConfig.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgUpdateUnconditionalRevShareConfig { + const message = createBaseMsgUpdateUnconditionalRevShareConfig(); + message.authority = object.authority ?? ""; + message.config = object.config !== undefined && object.config !== null ? UnconditionalRevShareConfig.fromPartial(object.config) : undefined; + return message; + } + +}; + +function createBaseMsgUpdateUnconditionalRevShareConfigResponse(): MsgUpdateUnconditionalRevShareConfigResponse { + return {}; +} + +export const MsgUpdateUnconditionalRevShareConfigResponse = { + encode(_: MsgUpdateUnconditionalRevShareConfigResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateUnconditionalRevShareConfigResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateUnconditionalRevShareConfigResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgUpdateUnconditionalRevShareConfigResponse { + const message = createBaseMsgUpdateUnconditionalRevShareConfigResponse(); + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/rpc.query.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/rpc.query.ts index 4c2d8a0de76..dd9037fad18 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/rpc.query.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/rpc.query.ts @@ -9,6 +9,8 @@ export const createRPCQueryClient = async ({ const client = new QueryClient(tmClient); return { dydxprotocol: { + accountplus: (await import("./accountplus/query.rpc.Query")).createRpcQueryExtension(client), + affiliates: (await import("./affiliates/query.rpc.Query")).createRpcQueryExtension(client), assets: (await import("./assets/query.rpc.Query")).createRpcQueryExtension(client), blocktime: (await import("./blocktime/query.rpc.Query")).createRpcQueryExtension(client), bridge: (await import("./bridge/query.rpc.Query")).createRpcQueryExtension(client), @@ -17,9 +19,11 @@ export const createRPCQueryClient = async ({ epochs: (await import("./epochs/query.rpc.Query")).createRpcQueryExtension(client), feetiers: (await import("./feetiers/query.rpc.Query")).createRpcQueryExtension(client), govplus: (await import("./govplus/query.rpc.Query")).createRpcQueryExtension(client), + listing: (await import("./listing/query.rpc.Query")).createRpcQueryExtension(client), perpetuals: (await import("./perpetuals/query.rpc.Query")).createRpcQueryExtension(client), prices: (await import("./prices/query.rpc.Query")).createRpcQueryExtension(client), ratelimit: (await import("./ratelimit/query.rpc.Query")).createRpcQueryExtension(client), + revshare: (await import("./revshare/query.rpc.Query")).createRpcQueryExtension(client), rewards: (await import("./rewards/query.rpc.Query")).createRpcQueryExtension(client), sending: (await import("./sending/query.rpc.Query")).createRpcQueryExtension(client), stats: (await import("./stats/query.rpc.Query")).createRpcQueryExtension(client), diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/rpc.tx.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/rpc.tx.ts index 2c23915bc52..cfddfb350c8 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/rpc.tx.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/rpc.tx.ts @@ -5,15 +5,19 @@ export const createRPCMsgClient = async ({ rpc: Rpc; }) => ({ dydxprotocol: { + accountplus: new (await import("./accountplus/tx.rpc.msg")).MsgClientImpl(rpc), + affiliates: new (await import("./affiliates/tx.rpc.msg")).MsgClientImpl(rpc), blocktime: new (await import("./blocktime/tx.rpc.msg")).MsgClientImpl(rpc), bridge: new (await import("./bridge/tx.rpc.msg")).MsgClientImpl(rpc), clob: new (await import("./clob/tx.rpc.msg")).MsgClientImpl(rpc), delaymsg: new (await import("./delaymsg/tx.rpc.msg")).MsgClientImpl(rpc), feetiers: new (await import("./feetiers/tx.rpc.msg")).MsgClientImpl(rpc), govplus: new (await import("./govplus/tx.rpc.msg")).MsgClientImpl(rpc), + listing: new (await import("./listing/tx.rpc.msg")).MsgClientImpl(rpc), perpetuals: new (await import("./perpetuals/tx.rpc.msg")).MsgClientImpl(rpc), prices: new (await import("./prices/tx.rpc.msg")).MsgClientImpl(rpc), ratelimit: new (await import("./ratelimit/tx.rpc.msg")).MsgClientImpl(rpc), + revshare: new (await import("./revshare/tx.rpc.msg")).MsgClientImpl(rpc), rewards: new (await import("./rewards/tx.rpc.msg")).MsgClientImpl(rpc), sending: new (await import("./sending/tx.rpc.msg")).MsgClientImpl(rpc), stats: new (await import("./stats/tx.rpc.msg")).MsgClientImpl(rpc), diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/stats/stats.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/stats/stats.ts index 1aba60a6bb3..1f41aed7f6b 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/stats/stats.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/stats/stats.ts @@ -115,6 +115,30 @@ export interface UserStatsSDKType { maker_notional: Long; } +/** CachedStakeAmount stores the last calculated total staked amount for address */ + +export interface CachedStakeAmount { + /** Last calculated total staked amount by the delegator (in coin amount). */ + stakedAmount: Uint8Array; + /** + * Block time at which the calculation is cached (in Unix Epoch seconds) + * Rounded down to nearest second. + */ + + cachedAt: Long; +} +/** CachedStakeAmount stores the last calculated total staked amount for address */ + +export interface CachedStakeAmountSDKType { + /** Last calculated total staked amount by the delegator (in coin amount). */ + staked_amount: Uint8Array; + /** + * Block time at which the calculation is cached (in Unix Epoch seconds) + * Rounded down to nearest second. + */ + + cached_at: Long; +} function createBaseBlockStats(): BlockStats { return { @@ -479,4 +503,59 @@ export const UserStats = { return message; } +}; + +function createBaseCachedStakeAmount(): CachedStakeAmount { + return { + stakedAmount: new Uint8Array(), + cachedAt: Long.ZERO + }; +} + +export const CachedStakeAmount = { + encode(message: CachedStakeAmount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.stakedAmount.length !== 0) { + writer.uint32(10).bytes(message.stakedAmount); + } + + if (!message.cachedAt.isZero()) { + writer.uint32(16).int64(message.cachedAt); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CachedStakeAmount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCachedStakeAmount(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.stakedAmount = reader.bytes(); + break; + + case 2: + message.cachedAt = (reader.int64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): CachedStakeAmount { + const message = createBaseCachedStakeAmount(); + message.stakedAmount = object.stakedAmount ?? new Uint8Array(); + message.cachedAt = object.cachedAt !== undefined && object.cachedAt !== null ? Long.fromValue(object.cachedAt) : Long.ZERO; + return message; + } + }; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/subaccounts/perpetual_position.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/subaccounts/perpetual_position.ts index 426152be4f2..7ed8b6b5f70 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/subaccounts/perpetual_position.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/subaccounts/perpetual_position.ts @@ -17,6 +17,9 @@ export interface PerpetualPosition { */ fundingIndex: Uint8Array; + /** The quote_balance of the `Perpetual`. */ + + quoteBalance: Uint8Array; } /** * PerpetualPositions are an account’s positions of a `Perpetual`. @@ -35,13 +38,17 @@ export interface PerpetualPositionSDKType { */ funding_index: Uint8Array; + /** The quote_balance of the `Perpetual`. */ + + quote_balance: Uint8Array; } function createBasePerpetualPosition(): PerpetualPosition { return { perpetualId: 0, quantums: new Uint8Array(), - fundingIndex: new Uint8Array() + fundingIndex: new Uint8Array(), + quoteBalance: new Uint8Array() }; } @@ -59,6 +66,10 @@ export const PerpetualPosition = { writer.uint32(26).bytes(message.fundingIndex); } + if (message.quoteBalance.length !== 0) { + writer.uint32(34).bytes(message.quoteBalance); + } + return writer; }, @@ -83,6 +94,10 @@ export const PerpetualPosition = { message.fundingIndex = reader.bytes(); break; + case 4: + message.quoteBalance = reader.bytes(); + break; + default: reader.skipType(tag & 7); break; @@ -97,6 +112,7 @@ export const PerpetualPosition = { message.perpetualId = object.perpetualId ?? 0; message.quantums = object.quantums ?? new Uint8Array(); message.fundingIndex = object.fundingIndex ?? new Uint8Array(); + message.quoteBalance = object.quoteBalance ?? new Uint8Array(); return message; } diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/subaccounts/query.lcd.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/subaccounts/query.lcd.ts index 93d034a1447..8c761428e77 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/subaccounts/query.lcd.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/subaccounts/query.lcd.ts @@ -1,6 +1,6 @@ import { setPaginationParams } from "../../helpers"; import { LCDClient } from "@osmonauts/lcd"; -import { QueryGetSubaccountRequest, QuerySubaccountResponseSDKType, QueryAllSubaccountRequest, QuerySubaccountAllResponseSDKType, QueryGetWithdrawalAndTransfersBlockedInfoRequest, QueryGetWithdrawalAndTransfersBlockedInfoResponseSDKType } from "./query"; +import { QueryGetSubaccountRequest, QuerySubaccountResponseSDKType, QueryAllSubaccountRequest, QuerySubaccountAllResponseSDKType, QueryGetWithdrawalAndTransfersBlockedInfoRequest, QueryGetWithdrawalAndTransfersBlockedInfoResponseSDKType, QueryCollateralPoolAddressRequest, QueryCollateralPoolAddressResponseSDKType } from "./query"; export class LCDQueryClient { req: LCDClient; @@ -13,6 +13,7 @@ export class LCDQueryClient { this.subaccount = this.subaccount.bind(this); this.subaccountAll = this.subaccountAll.bind(this); this.getWithdrawalAndTransfersBlockedInfo = this.getWithdrawalAndTransfersBlockedInfo.bind(this); + this.collateralPoolAddress = this.collateralPoolAddress.bind(this); } /* Queries a Subaccount by id */ @@ -42,9 +43,16 @@ export class LCDQueryClient { if so which block they are re-enabled on. */ - async getWithdrawalAndTransfersBlockedInfo(_params: QueryGetWithdrawalAndTransfersBlockedInfoRequest = {}): Promise { - const endpoint = `dydxprotocol/subaccounts/withdrawals_and_transfers_blocked_info`; + async getWithdrawalAndTransfersBlockedInfo(params: QueryGetWithdrawalAndTransfersBlockedInfoRequest): Promise { + const endpoint = `dydxprotocol/subaccounts/withdrawals_and_transfers_blocked_info/${params.perpetualId}`; return await this.req.get(endpoint); } + /* Queries the collateral pool account address for a perpetual id. */ + + + async collateralPoolAddress(params: QueryCollateralPoolAddressRequest): Promise { + const endpoint = `dydxprotocol/subaccounts/collateral_pool_address/${params.perpetualId}`; + return await this.req.get(endpoint); + } } \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/subaccounts/query.rpc.Query.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/subaccounts/query.rpc.Query.ts index 41f65d7652e..69ea5b399d0 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/subaccounts/query.rpc.Query.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/subaccounts/query.rpc.Query.ts @@ -1,7 +1,7 @@ import { Rpc } from "../../helpers"; import * as _m0 from "protobufjs/minimal"; import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; -import { QueryGetSubaccountRequest, QuerySubaccountResponse, QueryAllSubaccountRequest, QuerySubaccountAllResponse, QueryGetWithdrawalAndTransfersBlockedInfoRequest, QueryGetWithdrawalAndTransfersBlockedInfoResponse } from "./query"; +import { QueryGetSubaccountRequest, QuerySubaccountResponse, QueryAllSubaccountRequest, QuerySubaccountAllResponse, QueryGetWithdrawalAndTransfersBlockedInfoRequest, QueryGetWithdrawalAndTransfersBlockedInfoResponse, QueryCollateralPoolAddressRequest, QueryCollateralPoolAddressResponse } from "./query"; /** Query defines the gRPC querier service. */ export interface Query { @@ -15,7 +15,10 @@ export interface Query { * if so which block they are re-enabled on. */ - getWithdrawalAndTransfersBlockedInfo(request?: QueryGetWithdrawalAndTransfersBlockedInfoRequest): Promise; + getWithdrawalAndTransfersBlockedInfo(request: QueryGetWithdrawalAndTransfersBlockedInfoRequest): Promise; + /** Queries the collateral pool account address for a perpetual id. */ + + collateralPoolAddress(request: QueryCollateralPoolAddressRequest): Promise; } export class QueryClientImpl implements Query { private readonly rpc: Rpc; @@ -25,6 +28,7 @@ export class QueryClientImpl implements Query { this.subaccount = this.subaccount.bind(this); this.subaccountAll = this.subaccountAll.bind(this); this.getWithdrawalAndTransfersBlockedInfo = this.getWithdrawalAndTransfersBlockedInfo.bind(this); + this.collateralPoolAddress = this.collateralPoolAddress.bind(this); } subaccount(request: QueryGetSubaccountRequest): Promise { @@ -41,12 +45,18 @@ export class QueryClientImpl implements Query { return promise.then(data => QuerySubaccountAllResponse.decode(new _m0.Reader(data))); } - getWithdrawalAndTransfersBlockedInfo(request: QueryGetWithdrawalAndTransfersBlockedInfoRequest = {}): Promise { + getWithdrawalAndTransfersBlockedInfo(request: QueryGetWithdrawalAndTransfersBlockedInfoRequest): Promise { const data = QueryGetWithdrawalAndTransfersBlockedInfoRequest.encode(request).finish(); const promise = this.rpc.request("dydxprotocol.subaccounts.Query", "GetWithdrawalAndTransfersBlockedInfo", data); return promise.then(data => QueryGetWithdrawalAndTransfersBlockedInfoResponse.decode(new _m0.Reader(data))); } + collateralPoolAddress(request: QueryCollateralPoolAddressRequest): Promise { + const data = QueryCollateralPoolAddressRequest.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.subaccounts.Query", "CollateralPoolAddress", data); + return promise.then(data => QueryCollateralPoolAddressResponse.decode(new _m0.Reader(data))); + } + } export const createRpcQueryExtension = (base: QueryClient) => { const rpc = createProtobufRpcClient(base); @@ -60,8 +70,12 @@ export const createRpcQueryExtension = (base: QueryClient) => { return queryService.subaccountAll(request); }, - getWithdrawalAndTransfersBlockedInfo(request?: QueryGetWithdrawalAndTransfersBlockedInfoRequest): Promise { + getWithdrawalAndTransfersBlockedInfo(request: QueryGetWithdrawalAndTransfersBlockedInfoRequest): Promise { return queryService.getWithdrawalAndTransfersBlockedInfo(request); + }, + + collateralPoolAddress(request: QueryCollateralPoolAddressRequest): Promise { + return queryService.collateralPoolAddress(request); } }; diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/subaccounts/query.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/subaccounts/query.ts index 0b1d182ed98..d5512b12fe2 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/subaccounts/query.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/subaccounts/query.ts @@ -48,16 +48,22 @@ export interface QuerySubaccountAllResponseSDKType { } /** * QueryGetWithdrawalAndTransfersBlockedInfoRequest is a request type for - * fetching information about whether withdrawals and transfers are blocked. + * fetching information about whether withdrawals and transfers are blocked for + * a collateral pool associated with the passed in perpetual id. */ -export interface QueryGetWithdrawalAndTransfersBlockedInfoRequest {} +export interface QueryGetWithdrawalAndTransfersBlockedInfoRequest { + perpetualId: number; +} /** * QueryGetWithdrawalAndTransfersBlockedInfoRequest is a request type for - * fetching information about whether withdrawals and transfers are blocked. + * fetching information about whether withdrawals and transfers are blocked for + * a collateral pool associated with the passed in perpetual id. */ -export interface QueryGetWithdrawalAndTransfersBlockedInfoRequestSDKType {} +export interface QueryGetWithdrawalAndTransfersBlockedInfoRequestSDKType { + perpetual_id: number; +} /** * QueryGetWithdrawalAndTransfersBlockedInfoRequest is a response type for * fetching information about whether withdrawals and transfers are blocked. @@ -78,6 +84,52 @@ export interface QueryGetWithdrawalAndTransfersBlockedInfoResponseSDKType { chain_outage_seen_at_block: number; withdrawals_and_transfers_unblocked_at_block: number; } +/** + * QueryCollateralPoolAddressRequest is the request type for fetching the + * account address of the collateral pool associated with the passed in + * perpetual id. + */ + +export interface QueryCollateralPoolAddressRequest { + /** + * QueryCollateralPoolAddressRequest is the request type for fetching the + * account address of the collateral pool associated with the passed in + * perpetual id. + */ + perpetualId: number; +} +/** + * QueryCollateralPoolAddressRequest is the request type for fetching the + * account address of the collateral pool associated with the passed in + * perpetual id. + */ + +export interface QueryCollateralPoolAddressRequestSDKType { + /** + * QueryCollateralPoolAddressRequest is the request type for fetching the + * account address of the collateral pool associated with the passed in + * perpetual id. + */ + perpetual_id: number; +} +/** + * QueryCollateralPoolAddressResponse is a response type for fetching the + * account address of the collateral pool associated with the passed in + * perpetual id. + */ + +export interface QueryCollateralPoolAddressResponse { + collateralPoolAddress: string; +} +/** + * QueryCollateralPoolAddressResponse is a response type for fetching the + * account address of the collateral pool associated with the passed in + * perpetual id. + */ + +export interface QueryCollateralPoolAddressResponseSDKType { + collateral_pool_address: string; +} function createBaseQueryGetSubaccountRequest(): QueryGetSubaccountRequest { return { @@ -280,11 +332,17 @@ export const QuerySubaccountAllResponse = { }; function createBaseQueryGetWithdrawalAndTransfersBlockedInfoRequest(): QueryGetWithdrawalAndTransfersBlockedInfoRequest { - return {}; + return { + perpetualId: 0 + }; } export const QueryGetWithdrawalAndTransfersBlockedInfoRequest = { - encode(_: QueryGetWithdrawalAndTransfersBlockedInfoRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode(message: QueryGetWithdrawalAndTransfersBlockedInfoRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.perpetualId !== 0) { + writer.uint32(8).uint32(message.perpetualId); + } + return writer; }, @@ -297,6 +355,10 @@ export const QueryGetWithdrawalAndTransfersBlockedInfoRequest = { const tag = reader.uint32(); switch (tag >>> 3) { + case 1: + message.perpetualId = reader.uint32(); + break; + default: reader.skipType(tag & 7); break; @@ -306,8 +368,9 @@ export const QueryGetWithdrawalAndTransfersBlockedInfoRequest = { return message; }, - fromPartial(_: DeepPartial): QueryGetWithdrawalAndTransfersBlockedInfoRequest { + fromPartial(object: DeepPartial): QueryGetWithdrawalAndTransfersBlockedInfoRequest { const message = createBaseQueryGetWithdrawalAndTransfersBlockedInfoRequest(); + message.perpetualId = object.perpetualId ?? 0; return message; } @@ -376,4 +439,94 @@ export const QueryGetWithdrawalAndTransfersBlockedInfoResponse = { return message; } +}; + +function createBaseQueryCollateralPoolAddressRequest(): QueryCollateralPoolAddressRequest { + return { + perpetualId: 0 + }; +} + +export const QueryCollateralPoolAddressRequest = { + encode(message: QueryCollateralPoolAddressRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.perpetualId !== 0) { + writer.uint32(8).uint32(message.perpetualId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryCollateralPoolAddressRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryCollateralPoolAddressRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.perpetualId = reader.uint32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryCollateralPoolAddressRequest { + const message = createBaseQueryCollateralPoolAddressRequest(); + message.perpetualId = object.perpetualId ?? 0; + return message; + } + +}; + +function createBaseQueryCollateralPoolAddressResponse(): QueryCollateralPoolAddressResponse { + return { + collateralPoolAddress: "" + }; +} + +export const QueryCollateralPoolAddressResponse = { + encode(message: QueryCollateralPoolAddressResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.collateralPoolAddress !== "") { + writer.uint32(10).string(message.collateralPoolAddress); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryCollateralPoolAddressResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryCollateralPoolAddressResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.collateralPoolAddress = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryCollateralPoolAddressResponse { + const message = createBaseQueryCollateralPoolAddressResponse(); + message.collateralPoolAddress = object.collateralPoolAddress ?? ""; + return message; + } + }; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/subaccounts/streaming.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/subaccounts/streaming.ts new file mode 100644 index 00000000000..ce7dc7b83cd --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/subaccounts/streaming.ts @@ -0,0 +1,286 @@ +import { SubaccountId, SubaccountIdSDKType } from "./subaccount"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../helpers"; +/** + * StreamSubaccountUpdate provides information on a subaccount update. Used in + * the full node GRPC stream. + */ + +export interface StreamSubaccountUpdate { + subaccountId?: SubaccountId; + /** updated_perpetual_positions will each be for unique perpetuals. */ + + updatedPerpetualPositions: SubaccountPerpetualPosition[]; + /** updated_asset_positions will each be for unique assets. */ + + updatedAssetPositions: SubaccountAssetPosition[]; + /** + * Snapshot indicates if the response is from a snapshot of the subaccount. + * All updates should be ignored until snapshot is received. + * If the snapshot is true, then all previous entries should be + * discarded and the subaccount should be resynced. + * For a snapshot subaccount update, the `updated_perpetual_positions` and + * `updated_asset_positions` fields will contain the full state of the + * subaccount. + */ + + snapshot: boolean; +} +/** + * StreamSubaccountUpdate provides information on a subaccount update. Used in + * the full node GRPC stream. + */ + +export interface StreamSubaccountUpdateSDKType { + subaccount_id?: SubaccountIdSDKType; + /** updated_perpetual_positions will each be for unique perpetuals. */ + + updated_perpetual_positions: SubaccountPerpetualPositionSDKType[]; + /** updated_asset_positions will each be for unique assets. */ + + updated_asset_positions: SubaccountAssetPositionSDKType[]; + /** + * Snapshot indicates if the response is from a snapshot of the subaccount. + * All updates should be ignored until snapshot is received. + * If the snapshot is true, then all previous entries should be + * discarded and the subaccount should be resynced. + * For a snapshot subaccount update, the `updated_perpetual_positions` and + * `updated_asset_positions` fields will contain the full state of the + * subaccount. + */ + + snapshot: boolean; +} +/** + * SubaccountPerpetualPosition provides information on a subaccount's updated + * perpetual positions. + */ + +export interface SubaccountPerpetualPosition { + /** The `Id` of the `Perpetual`. */ + perpetualId: number; + /** The size of the position in base quantums. Negative means short. */ + + quantums: Long; +} +/** + * SubaccountPerpetualPosition provides information on a subaccount's updated + * perpetual positions. + */ + +export interface SubaccountPerpetualPositionSDKType { + /** The `Id` of the `Perpetual`. */ + perpetual_id: number; + /** The size of the position in base quantums. Negative means short. */ + + quantums: Long; +} +/** + * SubaccountAssetPosition provides information on a subaccount's updated asset + * positions. + */ + +export interface SubaccountAssetPosition { + /** The `Id` of the `Asset`. */ + assetId: number; + /** The absolute size of the position in base quantums. */ + + quantums: Long; +} +/** + * SubaccountAssetPosition provides information on a subaccount's updated asset + * positions. + */ + +export interface SubaccountAssetPositionSDKType { + /** The `Id` of the `Asset`. */ + asset_id: number; + /** The absolute size of the position in base quantums. */ + + quantums: Long; +} + +function createBaseStreamSubaccountUpdate(): StreamSubaccountUpdate { + return { + subaccountId: undefined, + updatedPerpetualPositions: [], + updatedAssetPositions: [], + snapshot: false + }; +} + +export const StreamSubaccountUpdate = { + encode(message: StreamSubaccountUpdate, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subaccountId !== undefined) { + SubaccountId.encode(message.subaccountId, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.updatedPerpetualPositions) { + SubaccountPerpetualPosition.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + for (const v of message.updatedAssetPositions) { + SubaccountAssetPosition.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + if (message.snapshot === true) { + writer.uint32(32).bool(message.snapshot); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): StreamSubaccountUpdate { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStreamSubaccountUpdate(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.subaccountId = SubaccountId.decode(reader, reader.uint32()); + break; + + case 2: + message.updatedPerpetualPositions.push(SubaccountPerpetualPosition.decode(reader, reader.uint32())); + break; + + case 3: + message.updatedAssetPositions.push(SubaccountAssetPosition.decode(reader, reader.uint32())); + break; + + case 4: + message.snapshot = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): StreamSubaccountUpdate { + const message = createBaseStreamSubaccountUpdate(); + message.subaccountId = object.subaccountId !== undefined && object.subaccountId !== null ? SubaccountId.fromPartial(object.subaccountId) : undefined; + message.updatedPerpetualPositions = object.updatedPerpetualPositions?.map(e => SubaccountPerpetualPosition.fromPartial(e)) || []; + message.updatedAssetPositions = object.updatedAssetPositions?.map(e => SubaccountAssetPosition.fromPartial(e)) || []; + message.snapshot = object.snapshot ?? false; + return message; + } + +}; + +function createBaseSubaccountPerpetualPosition(): SubaccountPerpetualPosition { + return { + perpetualId: 0, + quantums: Long.ZERO + }; +} + +export const SubaccountPerpetualPosition = { + encode(message: SubaccountPerpetualPosition, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.perpetualId !== 0) { + writer.uint32(8).uint32(message.perpetualId); + } + + if (!message.quantums.isZero()) { + writer.uint32(16).int64(message.quantums); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SubaccountPerpetualPosition { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSubaccountPerpetualPosition(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.perpetualId = reader.uint32(); + break; + + case 2: + message.quantums = (reader.int64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SubaccountPerpetualPosition { + const message = createBaseSubaccountPerpetualPosition(); + message.perpetualId = object.perpetualId ?? 0; + message.quantums = object.quantums !== undefined && object.quantums !== null ? Long.fromValue(object.quantums) : Long.ZERO; + return message; + } + +}; + +function createBaseSubaccountAssetPosition(): SubaccountAssetPosition { + return { + assetId: 0, + quantums: Long.UZERO + }; +} + +export const SubaccountAssetPosition = { + encode(message: SubaccountAssetPosition, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.assetId !== 0) { + writer.uint32(8).uint32(message.assetId); + } + + if (!message.quantums.isZero()) { + writer.uint32(16).uint64(message.quantums); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SubaccountAssetPosition { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSubaccountAssetPosition(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.assetId = reader.uint32(); + break; + + case 2: + message.quantums = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SubaccountAssetPosition { + const message = createBaseSubaccountAssetPosition(); + message.assetId = object.assetId ?? 0; + message.quantums = object.quantums !== undefined && object.quantums !== null ? Long.fromValue(object.quantums) : Long.UZERO; + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/genesis.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/genesis.ts index 9cf3de2b6d9..72752037110 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/genesis.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/genesis.ts @@ -1,18 +1,178 @@ +import { NumShares, NumSharesSDKType, OwnerShare, OwnerShareSDKType, OwnerShareUnlocks, OwnerShareUnlocksSDKType } from "./share"; +import { QuotingParams, QuotingParamsSDKType, OperatorParams, OperatorParamsSDKType, VaultParams, VaultParamsSDKType } from "./params"; +import { VaultId, VaultIdSDKType } from "./vault"; import * as _m0 from "protobufjs/minimal"; import { DeepPartial } from "../../helpers"; /** GenesisState defines `x/vault`'s genesis state. */ -export interface GenesisState {} +export interface GenesisState { + /** The total number of shares, including any locked ones. */ + totalShares?: NumShares; + /** The shares of each owner, including any locked ones. */ + + ownerShares: OwnerShare[]; + /** The vaults. */ + + vaults: Vault[]; + /** The default quoting parameters for all vaults. */ + + defaultQuotingParams?: QuotingParams; + /** All owner share unlocks. */ + + allOwnerShareUnlocks: OwnerShareUnlocks[]; + /** The parameters regarding megavault operator. */ + + operatorParams?: OperatorParams; +} /** GenesisState defines `x/vault`'s genesis state. */ -export interface GenesisStateSDKType {} +export interface GenesisStateSDKType { + /** The total number of shares, including any locked ones. */ + total_shares?: NumSharesSDKType; + /** The shares of each owner, including any locked ones. */ + + owner_shares: OwnerShareSDKType[]; + /** The vaults. */ + + vaults: VaultSDKType[]; + /** The default quoting parameters for all vaults. */ + + default_quoting_params?: QuotingParamsSDKType; + /** All owner share unlocks. */ + + all_owner_share_unlocks: OwnerShareUnlocksSDKType[]; + /** The parameters regarding megavault operator. */ + + operator_params?: OperatorParamsSDKType; +} +/** Vault defines the state of a vault. */ + +export interface Vault { + /** The ID of the vault. */ + vaultId?: VaultId; + /** The parameters of the vault. */ + + vaultParams?: VaultParams; + /** The client IDs of the most recently placed orders of the vault. */ + + mostRecentClientIds: number[]; +} +/** Vault defines the state of a vault. */ + +export interface VaultSDKType { + /** The ID of the vault. */ + vault_id?: VaultIdSDKType; + /** The parameters of the vault. */ + + vault_params?: VaultParamsSDKType; + /** The client IDs of the most recently placed orders of the vault. */ + + most_recent_client_ids: number[]; +} +/** + * GenesisStateV6 defines `x/vault`'s genesis state in v6.x. + * Deprecated since v7.x in favor of GenesisState. + */ + +export interface GenesisStateV6 { + /** The vaults. */ + vaults: Vault[]; + /** The default quoting parameters for all vaults. */ + + defaultQuotingParams?: QuotingParams; +} +/** + * GenesisStateV6 defines `x/vault`'s genesis state in v6.x. + * Deprecated since v7.x in favor of GenesisState. + */ + +export interface GenesisStateV6SDKType { + /** The vaults. */ + vaults: VaultSDKType[]; + /** The default quoting parameters for all vaults. */ + + default_quoting_params?: QuotingParamsSDKType; +} +/** + * VaultV6 defines the state of a vault. + * Deprecated since v7.x in favor of Vault. + */ + +export interface VaultV6 { + /** The ID of the vault. */ + vaultId?: VaultId; + /** The total number of shares in the vault. */ + + totalShares?: NumShares; + /** The shares of each owner in the vault. */ + + ownerShares: OwnerShare[]; + /** The parameters of the vault. */ + + vaultParams?: VaultParams; + /** The client IDs of the most recently placed orders of the vault. */ + + mostRecentClientIds: number[]; +} +/** + * VaultV6 defines the state of a vault. + * Deprecated since v7.x in favor of Vault. + */ + +export interface VaultV6SDKType { + /** The ID of the vault. */ + vault_id?: VaultIdSDKType; + /** The total number of shares in the vault. */ + + total_shares?: NumSharesSDKType; + /** The shares of each owner in the vault. */ + + owner_shares: OwnerShareSDKType[]; + /** The parameters of the vault. */ + + vault_params?: VaultParamsSDKType; + /** The client IDs of the most recently placed orders of the vault. */ + + most_recent_client_ids: number[]; +} function createBaseGenesisState(): GenesisState { - return {}; + return { + totalShares: undefined, + ownerShares: [], + vaults: [], + defaultQuotingParams: undefined, + allOwnerShareUnlocks: [], + operatorParams: undefined + }; } export const GenesisState = { - encode(_: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.totalShares !== undefined) { + NumShares.encode(message.totalShares, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.ownerShares) { + OwnerShare.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + for (const v of message.vaults) { + Vault.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + if (message.defaultQuotingParams !== undefined) { + QuotingParams.encode(message.defaultQuotingParams, writer.uint32(34).fork()).ldelim(); + } + + for (const v of message.allOwnerShareUnlocks) { + OwnerShareUnlocks.encode(v!, writer.uint32(42).fork()).ldelim(); + } + + if (message.operatorParams !== undefined) { + OperatorParams.encode(message.operatorParams, writer.uint32(50).fork()).ldelim(); + } + return writer; }, @@ -25,6 +185,30 @@ export const GenesisState = { const tag = reader.uint32(); switch (tag >>> 3) { + case 1: + message.totalShares = NumShares.decode(reader, reader.uint32()); + break; + + case 2: + message.ownerShares.push(OwnerShare.decode(reader, reader.uint32())); + break; + + case 3: + message.vaults.push(Vault.decode(reader, reader.uint32())); + break; + + case 4: + message.defaultQuotingParams = QuotingParams.decode(reader, reader.uint32()); + break; + + case 5: + message.allOwnerShareUnlocks.push(OwnerShareUnlocks.decode(reader, reader.uint32())); + break; + + case 6: + message.operatorParams = OperatorParams.decode(reader, reader.uint32()); + break; + default: reader.skipType(tag & 7); break; @@ -34,8 +218,243 @@ export const GenesisState = { return message; }, - fromPartial(_: DeepPartial): GenesisState { + fromPartial(object: DeepPartial): GenesisState { const message = createBaseGenesisState(); + message.totalShares = object.totalShares !== undefined && object.totalShares !== null ? NumShares.fromPartial(object.totalShares) : undefined; + message.ownerShares = object.ownerShares?.map(e => OwnerShare.fromPartial(e)) || []; + message.vaults = object.vaults?.map(e => Vault.fromPartial(e)) || []; + message.defaultQuotingParams = object.defaultQuotingParams !== undefined && object.defaultQuotingParams !== null ? QuotingParams.fromPartial(object.defaultQuotingParams) : undefined; + message.allOwnerShareUnlocks = object.allOwnerShareUnlocks?.map(e => OwnerShareUnlocks.fromPartial(e)) || []; + message.operatorParams = object.operatorParams !== undefined && object.operatorParams !== null ? OperatorParams.fromPartial(object.operatorParams) : undefined; + return message; + } + +}; + +function createBaseVault(): Vault { + return { + vaultId: undefined, + vaultParams: undefined, + mostRecentClientIds: [] + }; +} + +export const Vault = { + encode(message: Vault, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vaultId !== undefined) { + VaultId.encode(message.vaultId, writer.uint32(10).fork()).ldelim(); + } + + if (message.vaultParams !== undefined) { + VaultParams.encode(message.vaultParams, writer.uint32(18).fork()).ldelim(); + } + + writer.uint32(26).fork(); + + for (const v of message.mostRecentClientIds) { + writer.uint32(v); + } + + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Vault { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseVault(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.vaultId = VaultId.decode(reader, reader.uint32()); + break; + + case 2: + message.vaultParams = VaultParams.decode(reader, reader.uint32()); + break; + + case 3: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.mostRecentClientIds.push(reader.uint32()); + } + } else { + message.mostRecentClientIds.push(reader.uint32()); + } + + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Vault { + const message = createBaseVault(); + message.vaultId = object.vaultId !== undefined && object.vaultId !== null ? VaultId.fromPartial(object.vaultId) : undefined; + message.vaultParams = object.vaultParams !== undefined && object.vaultParams !== null ? VaultParams.fromPartial(object.vaultParams) : undefined; + message.mostRecentClientIds = object.mostRecentClientIds?.map(e => e) || []; + return message; + } + +}; + +function createBaseGenesisStateV6(): GenesisStateV6 { + return { + vaults: [], + defaultQuotingParams: undefined + }; +} + +export const GenesisStateV6 = { + encode(message: GenesisStateV6, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.vaults) { + Vault.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + if (message.defaultQuotingParams !== undefined) { + QuotingParams.encode(message.defaultQuotingParams, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisStateV6 { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisStateV6(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 2: + message.vaults.push(Vault.decode(reader, reader.uint32())); + break; + + case 3: + message.defaultQuotingParams = QuotingParams.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisStateV6 { + const message = createBaseGenesisStateV6(); + message.vaults = object.vaults?.map(e => Vault.fromPartial(e)) || []; + message.defaultQuotingParams = object.defaultQuotingParams !== undefined && object.defaultQuotingParams !== null ? QuotingParams.fromPartial(object.defaultQuotingParams) : undefined; + return message; + } + +}; + +function createBaseVaultV6(): VaultV6 { + return { + vaultId: undefined, + totalShares: undefined, + ownerShares: [], + vaultParams: undefined, + mostRecentClientIds: [] + }; +} + +export const VaultV6 = { + encode(message: VaultV6, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vaultId !== undefined) { + VaultId.encode(message.vaultId, writer.uint32(10).fork()).ldelim(); + } + + if (message.totalShares !== undefined) { + NumShares.encode(message.totalShares, writer.uint32(18).fork()).ldelim(); + } + + for (const v of message.ownerShares) { + OwnerShare.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + if (message.vaultParams !== undefined) { + VaultParams.encode(message.vaultParams, writer.uint32(34).fork()).ldelim(); + } + + writer.uint32(42).fork(); + + for (const v of message.mostRecentClientIds) { + writer.uint32(v); + } + + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): VaultV6 { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseVaultV6(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.vaultId = VaultId.decode(reader, reader.uint32()); + break; + + case 2: + message.totalShares = NumShares.decode(reader, reader.uint32()); + break; + + case 3: + message.ownerShares.push(OwnerShare.decode(reader, reader.uint32())); + break; + + case 4: + message.vaultParams = VaultParams.decode(reader, reader.uint32()); + break; + + case 5: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.mostRecentClientIds.push(reader.uint32()); + } + } else { + message.mostRecentClientIds.push(reader.uint32()); + } + + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): VaultV6 { + const message = createBaseVaultV6(); + message.vaultId = object.vaultId !== undefined && object.vaultId !== null ? VaultId.fromPartial(object.vaultId) : undefined; + message.totalShares = object.totalShares !== undefined && object.totalShares !== null ? NumShares.fromPartial(object.totalShares) : undefined; + message.ownerShares = object.ownerShares?.map(e => OwnerShare.fromPartial(e)) || []; + message.vaultParams = object.vaultParams !== undefined && object.vaultParams !== null ? VaultParams.fromPartial(object.vaultParams) : undefined; + message.mostRecentClientIds = object.mostRecentClientIds?.map(e => e) || []; return message; } diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/params.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/params.ts new file mode 100644 index 00000000000..2d3e279da0d --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/params.ts @@ -0,0 +1,582 @@ +import { VaultStatus, VaultStatusSDKType } from "./vault"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** QuotingParams stores vault quoting parameters. */ + +export interface QuotingParams { + /** + * The number of layers of orders a vault places. For example if + * `layers=2`, a vault places 2 asks and 2 bids. + */ + layers: number; + /** The minimum base spread when a vault quotes around reservation price. */ + + spreadMinPpm: number; + /** + * The buffer amount to add to min_price_change_ppm to arrive at `spread` + * according to formula: + * `spread = max(spread_min_ppm, min_price_change_ppm + spread_buffer_ppm)`. + */ + + spreadBufferPpm: number; + /** The factor that determines how aggressive a vault skews its orders. */ + + skewFactorPpm: number; + /** The percentage of vault equity that each order is sized at. */ + + orderSizePctPpm: number; + /** The duration that a vault's orders are valid for. */ + + orderExpirationSeconds: number; + /** + * The number of quote quantums in quote asset that a vault with no perpetual + * positions must have to activate, i.e. if a vault has no perpetual positions + * and has strictly less than this amount of quote asset, it will not + * activate. + */ + + activationThresholdQuoteQuantums: Uint8Array; +} +/** QuotingParams stores vault quoting parameters. */ + +export interface QuotingParamsSDKType { + /** + * The number of layers of orders a vault places. For example if + * `layers=2`, a vault places 2 asks and 2 bids. + */ + layers: number; + /** The minimum base spread when a vault quotes around reservation price. */ + + spread_min_ppm: number; + /** + * The buffer amount to add to min_price_change_ppm to arrive at `spread` + * according to formula: + * `spread = max(spread_min_ppm, min_price_change_ppm + spread_buffer_ppm)`. + */ + + spread_buffer_ppm: number; + /** The factor that determines how aggressive a vault skews its orders. */ + + skew_factor_ppm: number; + /** The percentage of vault equity that each order is sized at. */ + + order_size_pct_ppm: number; + /** The duration that a vault's orders are valid for. */ + + order_expiration_seconds: number; + /** + * The number of quote quantums in quote asset that a vault with no perpetual + * positions must have to activate, i.e. if a vault has no perpetual positions + * and has strictly less than this amount of quote asset, it will not + * activate. + */ + + activation_threshold_quote_quantums: Uint8Array; +} +/** VaultParams stores vault parameters. */ + +export interface VaultParams { + /** Status of the vault. */ + status: VaultStatus; + /** Quoting parameters of the vault. */ + + quotingParams?: QuotingParams; +} +/** VaultParams stores vault parameters. */ + +export interface VaultParamsSDKType { + /** Status of the vault. */ + status: VaultStatusSDKType; + /** Quoting parameters of the vault. */ + + quoting_params?: QuotingParamsSDKType; +} +/** OperatorParams stores parameters regarding megavault operator. */ + +export interface OperatorParams { + /** Address of the operator. */ + operator: string; + /** Metadata of the operator. */ + + metadata?: OperatorMetadata; +} +/** OperatorParams stores parameters regarding megavault operator. */ + +export interface OperatorParamsSDKType { + /** Address of the operator. */ + operator: string; + /** Metadata of the operator. */ + + metadata?: OperatorMetadataSDKType; +} +/** OperatorMetadata stores metadata regarding megavault operator. */ + +export interface OperatorMetadata { + /** Name of the operator. */ + name: string; + /** Description of the operator. */ + + description: string; +} +/** OperatorMetadata stores metadata regarding megavault operator. */ + +export interface OperatorMetadataSDKType { + /** Name of the operator. */ + name: string; + /** Description of the operator. */ + + description: string; +} +/** + * Deprecated: Params stores `x/vault` parameters. + * Deprecated since v6.x as is replaced by QuotingParams. + */ + +export interface Params { + /** + * The number of layers of orders a vault places. For example if + * `layers=2`, a vault places 2 asks and 2 bids. + */ + layers: number; + /** The minimum base spread when a vault quotes around reservation price. */ + + spreadMinPpm: number; + /** + * The buffer amount to add to min_price_change_ppm to arrive at `spread` + * according to formula: + * `spread = max(spread_min_ppm, min_price_change_ppm + spread_buffer_ppm)`. + */ + + spreadBufferPpm: number; + /** The factor that determines how aggressive a vault skews its orders. */ + + skewFactorPpm: number; + /** The percentage of vault equity that each order is sized at. */ + + orderSizePctPpm: number; + /** The duration that a vault's orders are valid for. */ + + orderExpirationSeconds: number; + /** + * The number of quote quantums in quote asset that a vault with no perpetual + * positions must have to activate, i.e. if a vault has no perpetual positions + * and has strictly less than this amount of quote asset, it will not + * activate. + */ + + activationThresholdQuoteQuantums: Uint8Array; +} +/** + * Deprecated: Params stores `x/vault` parameters. + * Deprecated since v6.x as is replaced by QuotingParams. + */ + +export interface ParamsSDKType { + /** + * The number of layers of orders a vault places. For example if + * `layers=2`, a vault places 2 asks and 2 bids. + */ + layers: number; + /** The minimum base spread when a vault quotes around reservation price. */ + + spread_min_ppm: number; + /** + * The buffer amount to add to min_price_change_ppm to arrive at `spread` + * according to formula: + * `spread = max(spread_min_ppm, min_price_change_ppm + spread_buffer_ppm)`. + */ + + spread_buffer_ppm: number; + /** The factor that determines how aggressive a vault skews its orders. */ + + skew_factor_ppm: number; + /** The percentage of vault equity that each order is sized at. */ + + order_size_pct_ppm: number; + /** The duration that a vault's orders are valid for. */ + + order_expiration_seconds: number; + /** + * The number of quote quantums in quote asset that a vault with no perpetual + * positions must have to activate, i.e. if a vault has no perpetual positions + * and has strictly less than this amount of quote asset, it will not + * activate. + */ + + activation_threshold_quote_quantums: Uint8Array; +} + +function createBaseQuotingParams(): QuotingParams { + return { + layers: 0, + spreadMinPpm: 0, + spreadBufferPpm: 0, + skewFactorPpm: 0, + orderSizePctPpm: 0, + orderExpirationSeconds: 0, + activationThresholdQuoteQuantums: new Uint8Array() + }; +} + +export const QuotingParams = { + encode(message: QuotingParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.layers !== 0) { + writer.uint32(8).uint32(message.layers); + } + + if (message.spreadMinPpm !== 0) { + writer.uint32(16).uint32(message.spreadMinPpm); + } + + if (message.spreadBufferPpm !== 0) { + writer.uint32(24).uint32(message.spreadBufferPpm); + } + + if (message.skewFactorPpm !== 0) { + writer.uint32(32).uint32(message.skewFactorPpm); + } + + if (message.orderSizePctPpm !== 0) { + writer.uint32(40).uint32(message.orderSizePctPpm); + } + + if (message.orderExpirationSeconds !== 0) { + writer.uint32(48).uint32(message.orderExpirationSeconds); + } + + if (message.activationThresholdQuoteQuantums.length !== 0) { + writer.uint32(58).bytes(message.activationThresholdQuoteQuantums); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QuotingParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuotingParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.layers = reader.uint32(); + break; + + case 2: + message.spreadMinPpm = reader.uint32(); + break; + + case 3: + message.spreadBufferPpm = reader.uint32(); + break; + + case 4: + message.skewFactorPpm = reader.uint32(); + break; + + case 5: + message.orderSizePctPpm = reader.uint32(); + break; + + case 6: + message.orderExpirationSeconds = reader.uint32(); + break; + + case 7: + message.activationThresholdQuoteQuantums = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QuotingParams { + const message = createBaseQuotingParams(); + message.layers = object.layers ?? 0; + message.spreadMinPpm = object.spreadMinPpm ?? 0; + message.spreadBufferPpm = object.spreadBufferPpm ?? 0; + message.skewFactorPpm = object.skewFactorPpm ?? 0; + message.orderSizePctPpm = object.orderSizePctPpm ?? 0; + message.orderExpirationSeconds = object.orderExpirationSeconds ?? 0; + message.activationThresholdQuoteQuantums = object.activationThresholdQuoteQuantums ?? new Uint8Array(); + return message; + } + +}; + +function createBaseVaultParams(): VaultParams { + return { + status: 0, + quotingParams: undefined + }; +} + +export const VaultParams = { + encode(message: VaultParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.status !== 0) { + writer.uint32(8).int32(message.status); + } + + if (message.quotingParams !== undefined) { + QuotingParams.encode(message.quotingParams, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): VaultParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseVaultParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.status = (reader.int32() as any); + break; + + case 2: + message.quotingParams = QuotingParams.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): VaultParams { + const message = createBaseVaultParams(); + message.status = object.status ?? 0; + message.quotingParams = object.quotingParams !== undefined && object.quotingParams !== null ? QuotingParams.fromPartial(object.quotingParams) : undefined; + return message; + } + +}; + +function createBaseOperatorParams(): OperatorParams { + return { + operator: "", + metadata: undefined + }; +} + +export const OperatorParams = { + encode(message: OperatorParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.operator !== "") { + writer.uint32(10).string(message.operator); + } + + if (message.metadata !== undefined) { + OperatorMetadata.encode(message.metadata, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OperatorParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOperatorParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.operator = reader.string(); + break; + + case 2: + message.metadata = OperatorMetadata.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): OperatorParams { + const message = createBaseOperatorParams(); + message.operator = object.operator ?? ""; + message.metadata = object.metadata !== undefined && object.metadata !== null ? OperatorMetadata.fromPartial(object.metadata) : undefined; + return message; + } + +}; + +function createBaseOperatorMetadata(): OperatorMetadata { + return { + name: "", + description: "" + }; +} + +export const OperatorMetadata = { + encode(message: OperatorMetadata, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OperatorMetadata { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOperatorMetadata(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + + case 2: + message.description = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): OperatorMetadata { + const message = createBaseOperatorMetadata(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + return message; + } + +}; + +function createBaseParams(): Params { + return { + layers: 0, + spreadMinPpm: 0, + spreadBufferPpm: 0, + skewFactorPpm: 0, + orderSizePctPpm: 0, + orderExpirationSeconds: 0, + activationThresholdQuoteQuantums: new Uint8Array() + }; +} + +export const Params = { + encode(message: Params, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.layers !== 0) { + writer.uint32(8).uint32(message.layers); + } + + if (message.spreadMinPpm !== 0) { + writer.uint32(16).uint32(message.spreadMinPpm); + } + + if (message.spreadBufferPpm !== 0) { + writer.uint32(24).uint32(message.spreadBufferPpm); + } + + if (message.skewFactorPpm !== 0) { + writer.uint32(32).uint32(message.skewFactorPpm); + } + + if (message.orderSizePctPpm !== 0) { + writer.uint32(40).uint32(message.orderSizePctPpm); + } + + if (message.orderExpirationSeconds !== 0) { + writer.uint32(48).uint32(message.orderExpirationSeconds); + } + + if (message.activationThresholdQuoteQuantums.length !== 0) { + writer.uint32(58).bytes(message.activationThresholdQuoteQuantums); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Params { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.layers = reader.uint32(); + break; + + case 2: + message.spreadMinPpm = reader.uint32(); + break; + + case 3: + message.spreadBufferPpm = reader.uint32(); + break; + + case 4: + message.skewFactorPpm = reader.uint32(); + break; + + case 5: + message.orderSizePctPpm = reader.uint32(); + break; + + case 6: + message.orderExpirationSeconds = reader.uint32(); + break; + + case 7: + message.activationThresholdQuoteQuantums = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Params { + const message = createBaseParams(); + message.layers = object.layers ?? 0; + message.spreadMinPpm = object.spreadMinPpm ?? 0; + message.spreadBufferPpm = object.spreadBufferPpm ?? 0; + message.skewFactorPpm = object.skewFactorPpm ?? 0; + message.orderSizePctPpm = object.orderSizePctPpm ?? 0; + message.orderExpirationSeconds = object.orderExpirationSeconds ?? 0; + message.activationThresholdQuoteQuantums = object.activationThresholdQuoteQuantums ?? new Uint8Array(); + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/query.lcd.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/query.lcd.ts new file mode 100644 index 00000000000..afae739e584 --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/query.lcd.ts @@ -0,0 +1,91 @@ +import { setPaginationParams } from "../../helpers"; +import { LCDClient } from "@osmonauts/lcd"; +import { QueryParamsRequest, QueryParamsResponseSDKType, QueryVaultRequest, QueryVaultResponseSDKType, QueryAllVaultsRequest, QueryAllVaultsResponseSDKType, QueryMegavaultTotalSharesRequest, QueryMegavaultTotalSharesResponseSDKType, QueryMegavaultOwnerSharesRequest, QueryMegavaultOwnerSharesResponseSDKType, QueryMegavaultAllOwnerSharesRequest, QueryMegavaultAllOwnerSharesResponseSDKType, QueryVaultParamsRequest, QueryVaultParamsResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.params = this.params.bind(this); + this.vault = this.vault.bind(this); + this.allVaults = this.allVaults.bind(this); + this.megavaultTotalShares = this.megavaultTotalShares.bind(this); + this.megavaultOwnerShares = this.megavaultOwnerShares.bind(this); + this.megavaultAllOwnerShares = this.megavaultAllOwnerShares.bind(this); + this.vaultParams = this.vaultParams.bind(this); + } + /* Queries the Params. */ + + + async params(_params: QueryParamsRequest = {}): Promise { + const endpoint = `dydxprotocol/vault/params`; + return await this.req.get(endpoint); + } + /* Queries a Vault by type and number. */ + + + async vault(params: QueryVaultRequest): Promise { + const endpoint = `dydxprotocol/vault/vault/${params.type}/${params.number}`; + return await this.req.get(endpoint); + } + /* Queries all vaults. */ + + + async allVaults(params: QueryAllVaultsRequest = { + pagination: undefined + }): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `dydxprotocol/vault/vault`; + return await this.req.get(endpoint, options); + } + /* Queries total shares of megavault. */ + + + async megavaultTotalShares(_params: QueryMegavaultTotalSharesRequest = {}): Promise { + const endpoint = `dydxprotocol/vault/megavault/total_shares`; + return await this.req.get(endpoint); + } + /* Queries owner shares of megavault. */ + + + async megavaultOwnerShares(params: QueryMegavaultOwnerSharesRequest): Promise { + const endpoint = `dydxprotocol/vault/megavault/owner_shares/${params.address}`; + return await this.req.get(endpoint); + } + /* Queries all owner shares of megavault. */ + + + async megavaultAllOwnerShares(params: QueryMegavaultAllOwnerSharesRequest = { + pagination: undefined + }): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `dydxprotocol/vault/megavault/all_owner_shares`; + return await this.req.get(endpoint, options); + } + /* Queries vault params of a vault. */ + + + async vaultParams(params: QueryVaultParamsRequest): Promise { + const endpoint = `dydxprotocol/vault/params/${params.type}/${params.number}`; + return await this.req.get(endpoint); + } + +} \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/query.rpc.Query.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/query.rpc.Query.ts index ab81adee85c..78897009758 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/query.rpc.Query.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/query.rpc.Query.ts @@ -1,18 +1,137 @@ import { Rpc } from "../../helpers"; +import * as _m0 from "protobufjs/minimal"; import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryParamsRequest, QueryParamsResponse, QueryVaultRequest, QueryVaultResponse, QueryAllVaultsRequest, QueryAllVaultsResponse, QueryMegavaultTotalSharesRequest, QueryMegavaultTotalSharesResponse, QueryMegavaultOwnerSharesRequest, QueryMegavaultOwnerSharesResponse, QueryMegavaultAllOwnerSharesRequest, QueryMegavaultAllOwnerSharesResponse, QueryVaultParamsRequest, QueryVaultParamsResponse, QueryMegavaultWithdrawalInfoRequest, QueryMegavaultWithdrawalInfoResponse } from "./query"; /** Query defines the gRPC querier service. */ -export interface Query {} +export interface Query { + /** Queries the Params. */ + params(request?: QueryParamsRequest): Promise; + /** Queries a Vault by type and number. */ + + vault(request: QueryVaultRequest): Promise; + /** Queries all vaults. */ + + allVaults(request?: QueryAllVaultsRequest): Promise; + /** Queries total shares of megavault. */ + + megavaultTotalShares(request?: QueryMegavaultTotalSharesRequest): Promise; + /** Queries owner shares of megavault. */ + + megavaultOwnerShares(request: QueryMegavaultOwnerSharesRequest): Promise; + /** Queries all owner shares of megavault. */ + + megavaultAllOwnerShares(request?: QueryMegavaultAllOwnerSharesRequest): Promise; + /** Queries vault params of a vault. */ + + vaultParams(request: QueryVaultParamsRequest): Promise; + /** Queries withdrawal info for megavault. */ + + megavaultWithdrawalInfo(request: QueryMegavaultWithdrawalInfoRequest): Promise; +} export class QueryClientImpl implements Query { private readonly rpc: Rpc; constructor(rpc: Rpc) { this.rpc = rpc; + this.params = this.params.bind(this); + this.vault = this.vault.bind(this); + this.allVaults = this.allVaults.bind(this); + this.megavaultTotalShares = this.megavaultTotalShares.bind(this); + this.megavaultOwnerShares = this.megavaultOwnerShares.bind(this); + this.megavaultAllOwnerShares = this.megavaultAllOwnerShares.bind(this); + this.vaultParams = this.vaultParams.bind(this); + this.megavaultWithdrawalInfo = this.megavaultWithdrawalInfo.bind(this); + } + + params(request: QueryParamsRequest = {}): Promise { + const data = QueryParamsRequest.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.vault.Query", "Params", data); + return promise.then(data => QueryParamsResponse.decode(new _m0.Reader(data))); + } + + vault(request: QueryVaultRequest): Promise { + const data = QueryVaultRequest.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.vault.Query", "Vault", data); + return promise.then(data => QueryVaultResponse.decode(new _m0.Reader(data))); + } + + allVaults(request: QueryAllVaultsRequest = { + pagination: undefined + }): Promise { + const data = QueryAllVaultsRequest.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.vault.Query", "AllVaults", data); + return promise.then(data => QueryAllVaultsResponse.decode(new _m0.Reader(data))); + } + + megavaultTotalShares(request: QueryMegavaultTotalSharesRequest = {}): Promise { + const data = QueryMegavaultTotalSharesRequest.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.vault.Query", "MegavaultTotalShares", data); + return promise.then(data => QueryMegavaultTotalSharesResponse.decode(new _m0.Reader(data))); + } + + megavaultOwnerShares(request: QueryMegavaultOwnerSharesRequest): Promise { + const data = QueryMegavaultOwnerSharesRequest.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.vault.Query", "MegavaultOwnerShares", data); + return promise.then(data => QueryMegavaultOwnerSharesResponse.decode(new _m0.Reader(data))); + } + + megavaultAllOwnerShares(request: QueryMegavaultAllOwnerSharesRequest = { + pagination: undefined + }): Promise { + const data = QueryMegavaultAllOwnerSharesRequest.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.vault.Query", "MegavaultAllOwnerShares", data); + return promise.then(data => QueryMegavaultAllOwnerSharesResponse.decode(new _m0.Reader(data))); + } + + vaultParams(request: QueryVaultParamsRequest): Promise { + const data = QueryVaultParamsRequest.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.vault.Query", "VaultParams", data); + return promise.then(data => QueryVaultParamsResponse.decode(new _m0.Reader(data))); + } + + megavaultWithdrawalInfo(request: QueryMegavaultWithdrawalInfoRequest): Promise { + const data = QueryMegavaultWithdrawalInfoRequest.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.vault.Query", "MegavaultWithdrawalInfo", data); + return promise.then(data => QueryMegavaultWithdrawalInfoResponse.decode(new _m0.Reader(data))); } } export const createRpcQueryExtension = (base: QueryClient) => { const rpc = createProtobufRpcClient(base); const queryService = new QueryClientImpl(rpc); - return {}; + return { + params(request?: QueryParamsRequest): Promise { + return queryService.params(request); + }, + + vault(request: QueryVaultRequest): Promise { + return queryService.vault(request); + }, + + allVaults(request?: QueryAllVaultsRequest): Promise { + return queryService.allVaults(request); + }, + + megavaultTotalShares(request?: QueryMegavaultTotalSharesRequest): Promise { + return queryService.megavaultTotalShares(request); + }, + + megavaultOwnerShares(request: QueryMegavaultOwnerSharesRequest): Promise { + return queryService.megavaultOwnerShares(request); + }, + + megavaultAllOwnerShares(request?: QueryMegavaultAllOwnerSharesRequest): Promise { + return queryService.megavaultAllOwnerShares(request); + }, + + vaultParams(request: QueryVaultParamsRequest): Promise { + return queryService.vaultParams(request); + }, + + megavaultWithdrawalInfo(request: QueryMegavaultWithdrawalInfoRequest): Promise { + return queryService.megavaultWithdrawalInfo(request); + } + + }; }; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/query.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/query.ts index 693da49fc40..0d2f16b855f 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/query.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/query.ts @@ -1 +1,1191 @@ -export {} \ No newline at end of file +import { VaultType, VaultTypeSDKType, VaultId, VaultIdSDKType } from "./vault"; +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../cosmos/base/query/v1beta1/pagination"; +import { NumShares, NumSharesSDKType, ShareUnlock, ShareUnlockSDKType, OwnerShare, OwnerShareSDKType } from "./share"; +import { QuotingParams, QuotingParamsSDKType, OperatorParams, OperatorParamsSDKType, VaultParams, VaultParamsSDKType } from "./params"; +import { SubaccountId, SubaccountIdSDKType } from "../subaccounts/subaccount"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** QueryParamsRequest is a request type for the Params RPC method. */ + +export interface QueryParamsRequest {} +/** QueryParamsRequest is a request type for the Params RPC method. */ + +export interface QueryParamsRequestSDKType {} +/** QueryParamsResponse is a response type for the Params RPC method. */ + +export interface QueryParamsResponse { + defaultQuotingParams?: QuotingParams; + operatorParams?: OperatorParams; +} +/** QueryParamsResponse is a response type for the Params RPC method. */ + +export interface QueryParamsResponseSDKType { + default_quoting_params?: QuotingParamsSDKType; + operator_params?: OperatorParamsSDKType; +} +/** QueryVaultRequest is a request type for the Vault RPC method. */ + +export interface QueryVaultRequest { + type: VaultType; + number: number; +} +/** QueryVaultRequest is a request type for the Vault RPC method. */ + +export interface QueryVaultRequestSDKType { + type: VaultTypeSDKType; + number: number; +} +/** QueryVaultResponse is a response type for the Vault RPC method. */ + +export interface QueryVaultResponse { + vaultId?: VaultId; + subaccountId?: SubaccountId; + equity: Uint8Array; + inventory: Uint8Array; + vaultParams?: VaultParams; + mostRecentClientIds: number[]; +} +/** QueryVaultResponse is a response type for the Vault RPC method. */ + +export interface QueryVaultResponseSDKType { + vault_id?: VaultIdSDKType; + subaccount_id?: SubaccountIdSDKType; + equity: Uint8Array; + inventory: Uint8Array; + vault_params?: VaultParamsSDKType; + most_recent_client_ids: number[]; +} +/** QueryAllVaultsRequest is a request type for the AllVaults RPC method. */ + +export interface QueryAllVaultsRequest { + pagination?: PageRequest; +} +/** QueryAllVaultsRequest is a request type for the AllVaults RPC method. */ + +export interface QueryAllVaultsRequestSDKType { + pagination?: PageRequestSDKType; +} +/** QueryAllVaultsResponse is a response type for the AllVaults RPC method. */ + +export interface QueryAllVaultsResponse { + vaults: QueryVaultResponse[]; + pagination?: PageResponse; +} +/** QueryAllVaultsResponse is a response type for the AllVaults RPC method. */ + +export interface QueryAllVaultsResponseSDKType { + vaults: QueryVaultResponseSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QueryMegavaultTotalSharesRequest is a request type for the + * MegavaultTotalShares RPC method. + */ + +export interface QueryMegavaultTotalSharesRequest {} +/** + * QueryMegavaultTotalSharesRequest is a request type for the + * MegavaultTotalShares RPC method. + */ + +export interface QueryMegavaultTotalSharesRequestSDKType {} +/** + * QueryMegavaultTotalSharesResponse is a response type for the + * MegavaultTotalShares RPC method. + */ + +export interface QueryMegavaultTotalSharesResponse { + /** + * QueryMegavaultTotalSharesResponse is a response type for the + * MegavaultTotalShares RPC method. + */ + totalShares?: NumShares; +} +/** + * QueryMegavaultTotalSharesResponse is a response type for the + * MegavaultTotalShares RPC method. + */ + +export interface QueryMegavaultTotalSharesResponseSDKType { + /** + * QueryMegavaultTotalSharesResponse is a response type for the + * MegavaultTotalShares RPC method. + */ + total_shares?: NumSharesSDKType; +} +/** + * QueryMegavaultOwnerSharesRequest is a request type for the + * MegavaultOwnerShares RPC method. + */ + +export interface QueryMegavaultOwnerSharesRequest { + address: string; +} +/** + * QueryMegavaultOwnerSharesRequest is a request type for the + * MegavaultOwnerShares RPC method. + */ + +export interface QueryMegavaultOwnerSharesRequestSDKType { + address: string; +} +/** + * QueryMegavaultOwnerSharesResponse is a response type for the + * MegavaultOwnerShares RPC method. + */ + +export interface QueryMegavaultOwnerSharesResponse { + /** Owner address. */ + address: string; + /** Total number of shares that belong to the owner. */ + + shares?: NumShares; + /** All share unlocks. */ + + shareUnlocks: ShareUnlock[]; + /** Owner equity in megavault (in quote quantums). */ + + equity: Uint8Array; + /** + * Equity that owner can withdraw in quote quantums (as one cannot + * withdraw locked shares). + */ + + withdrawableEquity: Uint8Array; +} +/** + * QueryMegavaultOwnerSharesResponse is a response type for the + * MegavaultOwnerShares RPC method. + */ + +export interface QueryMegavaultOwnerSharesResponseSDKType { + /** Owner address. */ + address: string; + /** Total number of shares that belong to the owner. */ + + shares?: NumSharesSDKType; + /** All share unlocks. */ + + share_unlocks: ShareUnlockSDKType[]; + /** Owner equity in megavault (in quote quantums). */ + + equity: Uint8Array; + /** + * Equity that owner can withdraw in quote quantums (as one cannot + * withdraw locked shares). + */ + + withdrawable_equity: Uint8Array; +} +/** + * QueryMegavaultAllOwnerSharesRequest is a request type for the + * MegavaultAllOwnerShares RPC method. + */ + +export interface QueryMegavaultAllOwnerSharesRequest { + pagination?: PageRequest; +} +/** + * QueryMegavaultAllOwnerSharesRequest is a request type for the + * MegavaultAllOwnerShares RPC method. + */ + +export interface QueryMegavaultAllOwnerSharesRequestSDKType { + pagination?: PageRequestSDKType; +} +/** + * QueryMegavaultAllOwnerSharesResponse is a response type for the + * MegavaultAllOwnerShares RPC method. + */ + +export interface QueryMegavaultAllOwnerSharesResponse { + ownerShares: OwnerShare[]; + pagination?: PageResponse; +} +/** + * QueryMegavaultAllOwnerSharesResponse is a response type for the + * MegavaultAllOwnerShares RPC method. + */ + +export interface QueryMegavaultAllOwnerSharesResponseSDKType { + owner_shares: OwnerShareSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryVaultParamsRequest is a request for the VaultParams RPC method. */ + +export interface QueryVaultParamsRequest { + type: VaultType; + number: number; +} +/** QueryVaultParamsRequest is a request for the VaultParams RPC method. */ + +export interface QueryVaultParamsRequestSDKType { + type: VaultTypeSDKType; + number: number; +} +/** QueryVaultParamsResponse is a response for the VaultParams RPC method. */ + +export interface QueryVaultParamsResponse { + vaultId?: VaultId; + vaultParams?: VaultParams; +} +/** QueryVaultParamsResponse is a response for the VaultParams RPC method. */ + +export interface QueryVaultParamsResponseSDKType { + vault_id?: VaultIdSDKType; + vault_params?: VaultParamsSDKType; +} +/** + * QueryMegavaultWithdrawalInfoRequest is a request type for the + * MegavaultWithdrawalInfo RPC method. + */ + +export interface QueryMegavaultWithdrawalInfoRequest { + /** Number of shares to withdraw. */ + sharesToWithdraw?: NumShares; +} +/** + * QueryMegavaultWithdrawalInfoRequest is a request type for the + * MegavaultWithdrawalInfo RPC method. + */ + +export interface QueryMegavaultWithdrawalInfoRequestSDKType { + /** Number of shares to withdraw. */ + shares_to_withdraw?: NumSharesSDKType; +} +/** + * QueryMegavaultWithdrawalInfoResponse is a response type for the + * MegavaultWithdrawalInfo RPC method. + */ + +export interface QueryMegavaultWithdrawalInfoResponse { + /** Number of shares to withdraw. */ + sharesToWithdraw?: NumShares; + /** + * Number of quote quantums above `shares` are expected to redeem. + * Withdrawl slippage can be calculated by comparing + * `expected_quote_quantums` with + * `megavault_equity * shares_to_withdraw / total_shares` + */ + + expectedQuoteQuantums: Uint8Array; + /** Equity of megavault (in quote quantums). */ + + megavaultEquity: Uint8Array; + /** Total shares in megavault. */ + + totalShares?: NumShares; +} +/** + * QueryMegavaultWithdrawalInfoResponse is a response type for the + * MegavaultWithdrawalInfo RPC method. + */ + +export interface QueryMegavaultWithdrawalInfoResponseSDKType { + /** Number of shares to withdraw. */ + shares_to_withdraw?: NumSharesSDKType; + /** + * Number of quote quantums above `shares` are expected to redeem. + * Withdrawl slippage can be calculated by comparing + * `expected_quote_quantums` with + * `megavault_equity * shares_to_withdraw / total_shares` + */ + + expected_quote_quantums: Uint8Array; + /** Equity of megavault (in quote quantums). */ + + megavault_equity: Uint8Array; + /** Total shares in megavault. */ + + total_shares?: NumSharesSDKType; +} + +function createBaseQueryParamsRequest(): QueryParamsRequest { + return {}; +} + +export const QueryParamsRequest = { + encode(_: QueryParamsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryParamsRequest { + const message = createBaseQueryParamsRequest(); + return message; + } + +}; + +function createBaseQueryParamsResponse(): QueryParamsResponse { + return { + defaultQuotingParams: undefined, + operatorParams: undefined + }; +} + +export const QueryParamsResponse = { + encode(message: QueryParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.defaultQuotingParams !== undefined) { + QuotingParams.encode(message.defaultQuotingParams, writer.uint32(10).fork()).ldelim(); + } + + if (message.operatorParams !== undefined) { + OperatorParams.encode(message.operatorParams, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.defaultQuotingParams = QuotingParams.decode(reader, reader.uint32()); + break; + + case 2: + message.operatorParams = OperatorParams.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryParamsResponse { + const message = createBaseQueryParamsResponse(); + message.defaultQuotingParams = object.defaultQuotingParams !== undefined && object.defaultQuotingParams !== null ? QuotingParams.fromPartial(object.defaultQuotingParams) : undefined; + message.operatorParams = object.operatorParams !== undefined && object.operatorParams !== null ? OperatorParams.fromPartial(object.operatorParams) : undefined; + return message; + } + +}; + +function createBaseQueryVaultRequest(): QueryVaultRequest { + return { + type: 0, + number: 0 + }; +} + +export const QueryVaultRequest = { + encode(message: QueryVaultRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.type !== 0) { + writer.uint32(8).int32(message.type); + } + + if (message.number !== 0) { + writer.uint32(16).uint32(message.number); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVaultRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryVaultRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.type = (reader.int32() as any); + break; + + case 2: + message.number = reader.uint32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryVaultRequest { + const message = createBaseQueryVaultRequest(); + message.type = object.type ?? 0; + message.number = object.number ?? 0; + return message; + } + +}; + +function createBaseQueryVaultResponse(): QueryVaultResponse { + return { + vaultId: undefined, + subaccountId: undefined, + equity: new Uint8Array(), + inventory: new Uint8Array(), + vaultParams: undefined, + mostRecentClientIds: [] + }; +} + +export const QueryVaultResponse = { + encode(message: QueryVaultResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vaultId !== undefined) { + VaultId.encode(message.vaultId, writer.uint32(10).fork()).ldelim(); + } + + if (message.subaccountId !== undefined) { + SubaccountId.encode(message.subaccountId, writer.uint32(18).fork()).ldelim(); + } + + if (message.equity.length !== 0) { + writer.uint32(26).bytes(message.equity); + } + + if (message.inventory.length !== 0) { + writer.uint32(34).bytes(message.inventory); + } + + if (message.vaultParams !== undefined) { + VaultParams.encode(message.vaultParams, writer.uint32(42).fork()).ldelim(); + } + + writer.uint32(50).fork(); + + for (const v of message.mostRecentClientIds) { + writer.uint32(v); + } + + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVaultResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryVaultResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.vaultId = VaultId.decode(reader, reader.uint32()); + break; + + case 2: + message.subaccountId = SubaccountId.decode(reader, reader.uint32()); + break; + + case 3: + message.equity = reader.bytes(); + break; + + case 4: + message.inventory = reader.bytes(); + break; + + case 5: + message.vaultParams = VaultParams.decode(reader, reader.uint32()); + break; + + case 6: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.mostRecentClientIds.push(reader.uint32()); + } + } else { + message.mostRecentClientIds.push(reader.uint32()); + } + + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryVaultResponse { + const message = createBaseQueryVaultResponse(); + message.vaultId = object.vaultId !== undefined && object.vaultId !== null ? VaultId.fromPartial(object.vaultId) : undefined; + message.subaccountId = object.subaccountId !== undefined && object.subaccountId !== null ? SubaccountId.fromPartial(object.subaccountId) : undefined; + message.equity = object.equity ?? new Uint8Array(); + message.inventory = object.inventory ?? new Uint8Array(); + message.vaultParams = object.vaultParams !== undefined && object.vaultParams !== null ? VaultParams.fromPartial(object.vaultParams) : undefined; + message.mostRecentClientIds = object.mostRecentClientIds?.map(e => e) || []; + return message; + } + +}; + +function createBaseQueryAllVaultsRequest(): QueryAllVaultsRequest { + return { + pagination: undefined + }; +} + +export const QueryAllVaultsRequest = { + encode(message: QueryAllVaultsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllVaultsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllVaultsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryAllVaultsRequest { + const message = createBaseQueryAllVaultsRequest(); + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryAllVaultsResponse(): QueryAllVaultsResponse { + return { + vaults: [], + pagination: undefined + }; +} + +export const QueryAllVaultsResponse = { + encode(message: QueryAllVaultsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.vaults) { + QueryVaultResponse.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllVaultsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllVaultsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.vaults.push(QueryVaultResponse.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryAllVaultsResponse { + const message = createBaseQueryAllVaultsResponse(); + message.vaults = object.vaults?.map(e => QueryVaultResponse.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryMegavaultTotalSharesRequest(): QueryMegavaultTotalSharesRequest { + return {}; +} + +export const QueryMegavaultTotalSharesRequest = { + encode(_: QueryMegavaultTotalSharesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryMegavaultTotalSharesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryMegavaultTotalSharesRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryMegavaultTotalSharesRequest { + const message = createBaseQueryMegavaultTotalSharesRequest(); + return message; + } + +}; + +function createBaseQueryMegavaultTotalSharesResponse(): QueryMegavaultTotalSharesResponse { + return { + totalShares: undefined + }; +} + +export const QueryMegavaultTotalSharesResponse = { + encode(message: QueryMegavaultTotalSharesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.totalShares !== undefined) { + NumShares.encode(message.totalShares, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryMegavaultTotalSharesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryMegavaultTotalSharesResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.totalShares = NumShares.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryMegavaultTotalSharesResponse { + const message = createBaseQueryMegavaultTotalSharesResponse(); + message.totalShares = object.totalShares !== undefined && object.totalShares !== null ? NumShares.fromPartial(object.totalShares) : undefined; + return message; + } + +}; + +function createBaseQueryMegavaultOwnerSharesRequest(): QueryMegavaultOwnerSharesRequest { + return { + address: "" + }; +} + +export const QueryMegavaultOwnerSharesRequest = { + encode(message: QueryMegavaultOwnerSharesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryMegavaultOwnerSharesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryMegavaultOwnerSharesRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryMegavaultOwnerSharesRequest { + const message = createBaseQueryMegavaultOwnerSharesRequest(); + message.address = object.address ?? ""; + return message; + } + +}; + +function createBaseQueryMegavaultOwnerSharesResponse(): QueryMegavaultOwnerSharesResponse { + return { + address: "", + shares: undefined, + shareUnlocks: [], + equity: new Uint8Array(), + withdrawableEquity: new Uint8Array() + }; +} + +export const QueryMegavaultOwnerSharesResponse = { + encode(message: QueryMegavaultOwnerSharesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (message.shares !== undefined) { + NumShares.encode(message.shares, writer.uint32(18).fork()).ldelim(); + } + + for (const v of message.shareUnlocks) { + ShareUnlock.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + if (message.equity.length !== 0) { + writer.uint32(34).bytes(message.equity); + } + + if (message.withdrawableEquity.length !== 0) { + writer.uint32(42).bytes(message.withdrawableEquity); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryMegavaultOwnerSharesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryMegavaultOwnerSharesResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.shares = NumShares.decode(reader, reader.uint32()); + break; + + case 3: + message.shareUnlocks.push(ShareUnlock.decode(reader, reader.uint32())); + break; + + case 4: + message.equity = reader.bytes(); + break; + + case 5: + message.withdrawableEquity = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryMegavaultOwnerSharesResponse { + const message = createBaseQueryMegavaultOwnerSharesResponse(); + message.address = object.address ?? ""; + message.shares = object.shares !== undefined && object.shares !== null ? NumShares.fromPartial(object.shares) : undefined; + message.shareUnlocks = object.shareUnlocks?.map(e => ShareUnlock.fromPartial(e)) || []; + message.equity = object.equity ?? new Uint8Array(); + message.withdrawableEquity = object.withdrawableEquity ?? new Uint8Array(); + return message; + } + +}; + +function createBaseQueryMegavaultAllOwnerSharesRequest(): QueryMegavaultAllOwnerSharesRequest { + return { + pagination: undefined + }; +} + +export const QueryMegavaultAllOwnerSharesRequest = { + encode(message: QueryMegavaultAllOwnerSharesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryMegavaultAllOwnerSharesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryMegavaultAllOwnerSharesRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryMegavaultAllOwnerSharesRequest { + const message = createBaseQueryMegavaultAllOwnerSharesRequest(); + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryMegavaultAllOwnerSharesResponse(): QueryMegavaultAllOwnerSharesResponse { + return { + ownerShares: [], + pagination: undefined + }; +} + +export const QueryMegavaultAllOwnerSharesResponse = { + encode(message: QueryMegavaultAllOwnerSharesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.ownerShares) { + OwnerShare.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryMegavaultAllOwnerSharesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryMegavaultAllOwnerSharesResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.ownerShares.push(OwnerShare.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryMegavaultAllOwnerSharesResponse { + const message = createBaseQueryMegavaultAllOwnerSharesResponse(); + message.ownerShares = object.ownerShares?.map(e => OwnerShare.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryVaultParamsRequest(): QueryVaultParamsRequest { + return { + type: 0, + number: 0 + }; +} + +export const QueryVaultParamsRequest = { + encode(message: QueryVaultParamsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.type !== 0) { + writer.uint32(8).int32(message.type); + } + + if (message.number !== 0) { + writer.uint32(16).uint32(message.number); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVaultParamsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryVaultParamsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.type = (reader.int32() as any); + break; + + case 2: + message.number = reader.uint32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryVaultParamsRequest { + const message = createBaseQueryVaultParamsRequest(); + message.type = object.type ?? 0; + message.number = object.number ?? 0; + return message; + } + +}; + +function createBaseQueryVaultParamsResponse(): QueryVaultParamsResponse { + return { + vaultId: undefined, + vaultParams: undefined + }; +} + +export const QueryVaultParamsResponse = { + encode(message: QueryVaultParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vaultId !== undefined) { + VaultId.encode(message.vaultId, writer.uint32(10).fork()).ldelim(); + } + + if (message.vaultParams !== undefined) { + VaultParams.encode(message.vaultParams, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVaultParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryVaultParamsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.vaultId = VaultId.decode(reader, reader.uint32()); + break; + + case 2: + message.vaultParams = VaultParams.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryVaultParamsResponse { + const message = createBaseQueryVaultParamsResponse(); + message.vaultId = object.vaultId !== undefined && object.vaultId !== null ? VaultId.fromPartial(object.vaultId) : undefined; + message.vaultParams = object.vaultParams !== undefined && object.vaultParams !== null ? VaultParams.fromPartial(object.vaultParams) : undefined; + return message; + } + +}; + +function createBaseQueryMegavaultWithdrawalInfoRequest(): QueryMegavaultWithdrawalInfoRequest { + return { + sharesToWithdraw: undefined + }; +} + +export const QueryMegavaultWithdrawalInfoRequest = { + encode(message: QueryMegavaultWithdrawalInfoRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.sharesToWithdraw !== undefined) { + NumShares.encode(message.sharesToWithdraw, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryMegavaultWithdrawalInfoRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryMegavaultWithdrawalInfoRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.sharesToWithdraw = NumShares.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryMegavaultWithdrawalInfoRequest { + const message = createBaseQueryMegavaultWithdrawalInfoRequest(); + message.sharesToWithdraw = object.sharesToWithdraw !== undefined && object.sharesToWithdraw !== null ? NumShares.fromPartial(object.sharesToWithdraw) : undefined; + return message; + } + +}; + +function createBaseQueryMegavaultWithdrawalInfoResponse(): QueryMegavaultWithdrawalInfoResponse { + return { + sharesToWithdraw: undefined, + expectedQuoteQuantums: new Uint8Array(), + megavaultEquity: new Uint8Array(), + totalShares: undefined + }; +} + +export const QueryMegavaultWithdrawalInfoResponse = { + encode(message: QueryMegavaultWithdrawalInfoResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.sharesToWithdraw !== undefined) { + NumShares.encode(message.sharesToWithdraw, writer.uint32(10).fork()).ldelim(); + } + + if (message.expectedQuoteQuantums.length !== 0) { + writer.uint32(18).bytes(message.expectedQuoteQuantums); + } + + if (message.megavaultEquity.length !== 0) { + writer.uint32(26).bytes(message.megavaultEquity); + } + + if (message.totalShares !== undefined) { + NumShares.encode(message.totalShares, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryMegavaultWithdrawalInfoResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryMegavaultWithdrawalInfoResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.sharesToWithdraw = NumShares.decode(reader, reader.uint32()); + break; + + case 2: + message.expectedQuoteQuantums = reader.bytes(); + break; + + case 3: + message.megavaultEquity = reader.bytes(); + break; + + case 4: + message.totalShares = NumShares.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryMegavaultWithdrawalInfoResponse { + const message = createBaseQueryMegavaultWithdrawalInfoResponse(); + message.sharesToWithdraw = object.sharesToWithdraw !== undefined && object.sharesToWithdraw !== null ? NumShares.fromPartial(object.sharesToWithdraw) : undefined; + message.expectedQuoteQuantums = object.expectedQuoteQuantums ?? new Uint8Array(); + message.megavaultEquity = object.megavaultEquity ?? new Uint8Array(); + message.totalShares = object.totalShares !== undefined && object.totalShares !== null ? NumShares.fromPartial(object.totalShares) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/share.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/share.ts new file mode 100644 index 00000000000..1fca9d5cf3d --- /dev/null +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/share.ts @@ -0,0 +1,278 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** NumShares represents the number of shares. */ + +export interface NumShares { + /** Number of shares. */ + numShares: Uint8Array; +} +/** NumShares represents the number of shares. */ + +export interface NumSharesSDKType { + /** Number of shares. */ + num_shares: Uint8Array; +} +/** OwnerShare is a type for owner shares. */ + +export interface OwnerShare { + owner: string; + shares?: NumShares; +} +/** OwnerShare is a type for owner shares. */ + +export interface OwnerShareSDKType { + owner: string; + shares?: NumSharesSDKType; +} +/** OwnerShareUnlocks stores share unlocks for an owner. */ + +export interface OwnerShareUnlocks { + /** Address of the owner of below shares. */ + ownerAddress: string; + /** All share unlocks. */ + + shareUnlocks: ShareUnlock[]; +} +/** OwnerShareUnlocks stores share unlocks for an owner. */ + +export interface OwnerShareUnlocksSDKType { + /** Address of the owner of below shares. */ + owner_address: string; + /** All share unlocks. */ + + share_unlocks: ShareUnlockSDKType[]; +} +/** + * ShareUnlock stores a single instance of `shares` number of shares + * unlocking at block height `unlock_block_height`. + */ + +export interface ShareUnlock { + /** Number of shares to unlock. */ + shares?: NumShares; + /** Block height at which above shares unlock. */ + + unlockBlockHeight: number; +} +/** + * ShareUnlock stores a single instance of `shares` number of shares + * unlocking at block height `unlock_block_height`. + */ + +export interface ShareUnlockSDKType { + /** Number of shares to unlock. */ + shares?: NumSharesSDKType; + /** Block height at which above shares unlock. */ + + unlock_block_height: number; +} + +function createBaseNumShares(): NumShares { + return { + numShares: new Uint8Array() + }; +} + +export const NumShares = { + encode(message: NumShares, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.numShares.length !== 0) { + writer.uint32(18).bytes(message.numShares); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): NumShares { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseNumShares(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 2: + message.numShares = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): NumShares { + const message = createBaseNumShares(); + message.numShares = object.numShares ?? new Uint8Array(); + return message; + } + +}; + +function createBaseOwnerShare(): OwnerShare { + return { + owner: "", + shares: undefined + }; +} + +export const OwnerShare = { + encode(message: OwnerShare, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.owner !== "") { + writer.uint32(10).string(message.owner); + } + + if (message.shares !== undefined) { + NumShares.encode(message.shares, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OwnerShare { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOwnerShare(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.owner = reader.string(); + break; + + case 2: + message.shares = NumShares.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): OwnerShare { + const message = createBaseOwnerShare(); + message.owner = object.owner ?? ""; + message.shares = object.shares !== undefined && object.shares !== null ? NumShares.fromPartial(object.shares) : undefined; + return message; + } + +}; + +function createBaseOwnerShareUnlocks(): OwnerShareUnlocks { + return { + ownerAddress: "", + shareUnlocks: [] + }; +} + +export const OwnerShareUnlocks = { + encode(message: OwnerShareUnlocks, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.ownerAddress !== "") { + writer.uint32(10).string(message.ownerAddress); + } + + for (const v of message.shareUnlocks) { + ShareUnlock.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OwnerShareUnlocks { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOwnerShareUnlocks(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.ownerAddress = reader.string(); + break; + + case 2: + message.shareUnlocks.push(ShareUnlock.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): OwnerShareUnlocks { + const message = createBaseOwnerShareUnlocks(); + message.ownerAddress = object.ownerAddress ?? ""; + message.shareUnlocks = object.shareUnlocks?.map(e => ShareUnlock.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseShareUnlock(): ShareUnlock { + return { + shares: undefined, + unlockBlockHeight: 0 + }; +} + +export const ShareUnlock = { + encode(message: ShareUnlock, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.shares !== undefined) { + NumShares.encode(message.shares, writer.uint32(10).fork()).ldelim(); + } + + if (message.unlockBlockHeight !== 0) { + writer.uint32(16).uint32(message.unlockBlockHeight); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ShareUnlock { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseShareUnlock(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.shares = NumShares.decode(reader, reader.uint32()); + break; + + case 2: + message.unlockBlockHeight = reader.uint32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ShareUnlock { + const message = createBaseShareUnlock(); + message.shares = object.shares !== undefined && object.shares !== null ? NumShares.fromPartial(object.shares) : undefined; + message.unlockBlockHeight = object.unlockBlockHeight ?? 0; + return message; + } + +}; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/tx.rpc.msg.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/tx.rpc.msg.ts index e4028957d6e..fea36faaa43 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/tx.rpc.msg.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/tx.rpc.msg.ts @@ -1,24 +1,97 @@ import { Rpc } from "../../helpers"; import * as _m0 from "protobufjs/minimal"; -import { MsgDepositToVault, MsgDepositToVaultResponse } from "./tx"; +import { MsgDepositToMegavault, MsgDepositToMegavaultResponse, MsgWithdrawFromMegavault, MsgWithdrawFromMegavaultResponse, MsgUpdateDefaultQuotingParams, MsgUpdateDefaultQuotingParamsResponse, MsgUpdateOperatorParams, MsgUpdateOperatorParamsResponse, MsgSetVaultParams, MsgSetVaultParamsResponse, MsgUnlockShares, MsgUnlockSharesResponse, MsgAllocateToVault, MsgAllocateToVaultResponse, MsgRetrieveFromVault, MsgRetrieveFromVaultResponse } from "./tx"; /** Msg defines the Msg service. */ export interface Msg { - /** DepositToVault deposits funds into a vault. */ - depositToVault(request: MsgDepositToVault): Promise; + /** DepositToMegavault deposits funds into megavault. */ + depositToMegavault(request: MsgDepositToMegavault): Promise; + /** WithdrawFromMegavault withdraws shares from megavault. */ + + withdrawFromMegavault(request: MsgWithdrawFromMegavault): Promise; + /** UpdateDefaultQuotingParams updates the default quoting params in state. */ + + updateDefaultQuotingParams(request: MsgUpdateDefaultQuotingParams): Promise; + /** UpdateOperatorParams sets the parameters regarding megavault operator. */ + + updateOperatorParams(request: MsgUpdateOperatorParams): Promise; + /** SetVaultParams sets the parameters of a specific vault. */ + + setVaultParams(request: MsgSetVaultParams): Promise; + /** + * UnlockShares unlocks an owner's shares that are due to unlock by the block + * height that this transaction is included in. + */ + + unlockShares(request: MsgUnlockShares): Promise; + /** AllocateToVault allocates funds from main vault to a vault. */ + + allocateToVault(request: MsgAllocateToVault): Promise; + /** RetrieveFromVault retrieves funds from a vault to main vault. */ + + retrieveFromVault(request: MsgRetrieveFromVault): Promise; } export class MsgClientImpl implements Msg { private readonly rpc: Rpc; constructor(rpc: Rpc) { this.rpc = rpc; - this.depositToVault = this.depositToVault.bind(this); + this.depositToMegavault = this.depositToMegavault.bind(this); + this.withdrawFromMegavault = this.withdrawFromMegavault.bind(this); + this.updateDefaultQuotingParams = this.updateDefaultQuotingParams.bind(this); + this.updateOperatorParams = this.updateOperatorParams.bind(this); + this.setVaultParams = this.setVaultParams.bind(this); + this.unlockShares = this.unlockShares.bind(this); + this.allocateToVault = this.allocateToVault.bind(this); + this.retrieveFromVault = this.retrieveFromVault.bind(this); + } + + depositToMegavault(request: MsgDepositToMegavault): Promise { + const data = MsgDepositToMegavault.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.vault.Msg", "DepositToMegavault", data); + return promise.then(data => MsgDepositToMegavaultResponse.decode(new _m0.Reader(data))); + } + + withdrawFromMegavault(request: MsgWithdrawFromMegavault): Promise { + const data = MsgWithdrawFromMegavault.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.vault.Msg", "WithdrawFromMegavault", data); + return promise.then(data => MsgWithdrawFromMegavaultResponse.decode(new _m0.Reader(data))); + } + + updateDefaultQuotingParams(request: MsgUpdateDefaultQuotingParams): Promise { + const data = MsgUpdateDefaultQuotingParams.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.vault.Msg", "UpdateDefaultQuotingParams", data); + return promise.then(data => MsgUpdateDefaultQuotingParamsResponse.decode(new _m0.Reader(data))); + } + + updateOperatorParams(request: MsgUpdateOperatorParams): Promise { + const data = MsgUpdateOperatorParams.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.vault.Msg", "UpdateOperatorParams", data); + return promise.then(data => MsgUpdateOperatorParamsResponse.decode(new _m0.Reader(data))); + } + + setVaultParams(request: MsgSetVaultParams): Promise { + const data = MsgSetVaultParams.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.vault.Msg", "SetVaultParams", data); + return promise.then(data => MsgSetVaultParamsResponse.decode(new _m0.Reader(data))); + } + + unlockShares(request: MsgUnlockShares): Promise { + const data = MsgUnlockShares.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.vault.Msg", "UnlockShares", data); + return promise.then(data => MsgUnlockSharesResponse.decode(new _m0.Reader(data))); + } + + allocateToVault(request: MsgAllocateToVault): Promise { + const data = MsgAllocateToVault.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.vault.Msg", "AllocateToVault", data); + return promise.then(data => MsgAllocateToVaultResponse.decode(new _m0.Reader(data))); } - depositToVault(request: MsgDepositToVault): Promise { - const data = MsgDepositToVault.encode(request).finish(); - const promise = this.rpc.request("dydxprotocol.vault.Msg", "DepositToVault", data); - return promise.then(data => MsgDepositToVaultResponse.decode(new _m0.Reader(data))); + retrieveFromVault(request: MsgRetrieveFromVault): Promise { + const data = MsgRetrieveFromVault.encode(request).finish(); + const promise = this.rpc.request("dydxprotocol.vault.Msg", "RetrieveFromVault", data); + return promise.then(data => MsgRetrieveFromVaultResponse.decode(new _m0.Reader(data))); } } \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/tx.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/tx.ts index b3587f335bb..3a8212755b9 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/tx.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/tx.ts @@ -1,82 +1,470 @@ -import { VaultId, VaultIdSDKType } from "./vault"; import { SubaccountId, SubaccountIdSDKType } from "../subaccounts/subaccount"; +import { NumShares, NumSharesSDKType } from "./share"; +import { QuotingParams, QuotingParamsSDKType, OperatorParams, OperatorParamsSDKType, VaultParams, VaultParamsSDKType, Params, ParamsSDKType } from "./params"; +import { VaultId, VaultIdSDKType } from "./vault"; import * as _m0 from "protobufjs/minimal"; import { DeepPartial } from "../../helpers"; -/** MsgDepositToVault is the Msg/DepositToVault request type. */ +/** + * MsgDepositToMegavault deposits the specified asset from the subaccount to + * megavault. + */ -export interface MsgDepositToVault { - /** The vault to deposit into. */ - vaultId?: VaultId; +export interface MsgDepositToMegavault { /** The subaccount to deposit from. */ - subaccountId?: SubaccountId; /** Number of quote quantums to deposit. */ quoteQuantums: Uint8Array; } -/** MsgDepositToVault is the Msg/DepositToVault request type. */ +/** + * MsgDepositToMegavault deposits the specified asset from the subaccount to + * megavault. + */ -export interface MsgDepositToVaultSDKType { - /** The vault to deposit into. */ - vault_id?: VaultIdSDKType; +export interface MsgDepositToMegavaultSDKType { /** The subaccount to deposit from. */ - subaccount_id?: SubaccountIdSDKType; /** Number of quote quantums to deposit. */ quote_quantums: Uint8Array; } -/** MsgDepositToVaultResponse is the Msg/DepositToVault response type. */ +/** MsgDepositToMegavaultResponse is the Msg/DepositToMegavault response type. */ + +export interface MsgDepositToMegavaultResponse { + /** The number of shares minted from the deposit. */ + mintedShares?: NumShares; +} +/** MsgDepositToMegavaultResponse is the Msg/DepositToMegavault response type. */ + +export interface MsgDepositToMegavaultResponseSDKType { + /** The number of shares minted from the deposit. */ + minted_shares?: NumSharesSDKType; +} +/** + * MsgWithdrawFromMegavault withdraws the specified shares from megavault to + * a subaccount. + */ + +export interface MsgWithdrawFromMegavault { + /** The subaccount to withdraw to. */ + subaccountId?: SubaccountId; + /** Number of shares to withdraw. */ + + shares?: NumShares; + /** + * The minimum number of quote quantums above shares should redeem, i.e. + * transaction fails if above shares redeem less than min_quote_quantums. + */ + + minQuoteQuantums: Uint8Array; +} +/** + * MsgWithdrawFromMegavault withdraws the specified shares from megavault to + * a subaccount. + */ + +export interface MsgWithdrawFromMegavaultSDKType { + /** The subaccount to withdraw to. */ + subaccount_id?: SubaccountIdSDKType; + /** Number of shares to withdraw. */ + + shares?: NumSharesSDKType; + /** + * The minimum number of quote quantums above shares should redeem, i.e. + * transaction fails if above shares redeem less than min_quote_quantums. + */ + + min_quote_quantums: Uint8Array; +} +/** + * MsgWithdrawFromMegavaultResponse is the Msg/WithdrawFromMegavault response + * type. + */ + +export interface MsgWithdrawFromMegavaultResponse { + /** The number of quote quantums redeemed from the withdrawal. */ + quoteQuantums: Uint8Array; +} +/** + * MsgWithdrawFromMegavaultResponse is the Msg/WithdrawFromMegavault response + * type. + */ + +export interface MsgWithdrawFromMegavaultResponseSDKType { + /** The number of quote quantums redeemed from the withdrawal. */ + quote_quantums: Uint8Array; +} +/** + * MsgUpdateDefaultQuotingParams is the Msg/UpdateDefaultQuotingParams request + * type. + */ + +export interface MsgUpdateDefaultQuotingParams { + authority: string; + /** The quoting parameters to update to. Every field must be set. */ + + defaultQuotingParams?: QuotingParams; +} +/** + * MsgUpdateDefaultQuotingParams is the Msg/UpdateDefaultQuotingParams request + * type. + */ + +export interface MsgUpdateDefaultQuotingParamsSDKType { + authority: string; + /** The quoting parameters to update to. Every field must be set. */ + + default_quoting_params?: QuotingParamsSDKType; +} +/** + * MsgUpdateDefaultQuotingParamsResponse is the Msg/UpdateDefaultQuotingParams + * response type. + */ + +export interface MsgUpdateDefaultQuotingParamsResponse {} +/** + * MsgUpdateDefaultQuotingParamsResponse is the Msg/UpdateDefaultQuotingParams + * response type. + */ + +export interface MsgUpdateDefaultQuotingParamsResponseSDKType {} +/** MsgSetVaultParams is the Msg/SetVaultParams request type. */ + +export interface MsgSetVaultParams { + authority: string; + /** The vault to set params of. */ + + vaultId?: VaultId; + /** The parameters to set. */ + + vaultParams?: VaultParams; +} +/** MsgSetVaultParams is the Msg/SetVaultParams request type. */ + +export interface MsgSetVaultParamsSDKType { + authority: string; + /** The vault to set params of. */ + + vault_id?: VaultIdSDKType; + /** The parameters to set. */ + + vault_params?: VaultParamsSDKType; +} +/** MsgSetVaultParamsResponse is the Msg/SetVaultParams response type. */ + +export interface MsgSetVaultParamsResponse {} +/** MsgSetVaultParamsResponse is the Msg/SetVaultParams response type. */ + +export interface MsgSetVaultParamsResponseSDKType {} +/** MsgUnlockShares is the Msg/UnlockShares request type. */ + +export interface MsgUnlockShares { + authority: string; + /** Address of the owner to unlock shares of. */ + + ownerAddress: string; +} +/** MsgUnlockShares is the Msg/UnlockShares request type. */ + +export interface MsgUnlockSharesSDKType { + authority: string; + /** Address of the owner to unlock shares of. */ + + owner_address: string; +} +/** MsgUnlockSharesResponse is the Msg/UnlockShares response type. */ + +export interface MsgUnlockSharesResponse { + /** The number of shares unlocked. */ + unlockedShares?: NumShares; +} +/** MsgUnlockSharesResponse is the Msg/UnlockShares response type. */ + +export interface MsgUnlockSharesResponseSDKType { + /** The number of shares unlocked. */ + unlocked_shares?: NumSharesSDKType; +} +/** MsgUpdateOperatorParams is the Msg/UpdateOperatorParams request type. */ + +export interface MsgUpdateOperatorParams { + authority: string; + /** Operator parameters to set. */ + + params?: OperatorParams; +} +/** MsgUpdateOperatorParams is the Msg/UpdateOperatorParams request type. */ + +export interface MsgUpdateOperatorParamsSDKType { + authority: string; + /** Operator parameters to set. */ + + params?: OperatorParamsSDKType; +} +/** MsgUpdateVaultParamsResponse is the Msg/UpdateOperatorParams response type. */ + +export interface MsgUpdateOperatorParamsResponse {} +/** MsgUpdateVaultParamsResponse is the Msg/UpdateOperatorParams response type. */ + +export interface MsgUpdateOperatorParamsResponseSDKType {} +/** MsgAllocateToVault is the Msg/AllocateToVault request type. */ + +export interface MsgAllocateToVault { + authority: string; + /** The vault to allocate to. */ + + vaultId?: VaultId; + /** Number of quote quantums to allocate. */ + + quoteQuantums: Uint8Array; +} +/** MsgAllocateToVault is the Msg/AllocateToVault request type. */ + +export interface MsgAllocateToVaultSDKType { + authority: string; + /** The vault to allocate to. */ + + vault_id?: VaultIdSDKType; + /** Number of quote quantums to allocate. */ + + quote_quantums: Uint8Array; +} +/** MsgAllocateToVaultResponse is the Msg/AllocateToVault response type. */ + +export interface MsgAllocateToVaultResponse {} +/** MsgAllocateToVaultResponse is the Msg/AllocateToVault response type. */ + +export interface MsgAllocateToVaultResponseSDKType {} +/** MsgRetrieveFromVault is the Msg/RetrieveFromVault request type. */ + +export interface MsgRetrieveFromVault { + authority: string; + /** The vault to retrieve from. */ + + vaultId?: VaultId; + /** Number of quote quantums to retrieve. */ + + quoteQuantums: Uint8Array; +} +/** MsgRetrieveFromVault is the Msg/RetrieveFromVault request type. */ + +export interface MsgRetrieveFromVaultSDKType { + authority: string; + /** The vault to retrieve from. */ + + vault_id?: VaultIdSDKType; + /** Number of quote quantums to retrieve. */ + + quote_quantums: Uint8Array; +} +/** MsgRetrieveFromVaultResponse is the Msg/RetrieveFromVault response type. */ + +export interface MsgRetrieveFromVaultResponse {} +/** MsgRetrieveFromVaultResponse is the Msg/RetrieveFromVault response type. */ + +export interface MsgRetrieveFromVaultResponseSDKType {} +/** + * MsgUpdateParams is the Msg/UpdateParams request type. + * Deprecated since v6.x as is replaced by MsgUpdateDefaultQuotingParams. + */ + +/** @deprecated */ + +export interface MsgUpdateParams { + authority: string; + /** The parameters to update. Each field must be set. */ + + params?: Params; +} +/** + * MsgUpdateParams is the Msg/UpdateParams request type. + * Deprecated since v6.x as is replaced by MsgUpdateDefaultQuotingParams. + */ -export interface MsgDepositToVaultResponse {} -/** MsgDepositToVaultResponse is the Msg/DepositToVault response type. */ +/** @deprecated */ -export interface MsgDepositToVaultResponseSDKType {} +export interface MsgUpdateParamsSDKType { + authority: string; + /** The parameters to update. Each field must be set. */ -function createBaseMsgDepositToVault(): MsgDepositToVault { + params?: ParamsSDKType; +} +/** + * MsgSetVaultQuotingParams is the Msg/SetVaultQuotingParams request type. + * Deprecated since v6.x as is replaced by MsgSetVaultParams. + */ + +/** @deprecated */ + +export interface MsgSetVaultQuotingParams { + authority: string; + /** The vault to set quoting params of. */ + + vaultId?: VaultId; + /** The quoting parameters to set. Each field must be set. */ + + quotingParams?: QuotingParams; +} +/** + * MsgSetVaultQuotingParams is the Msg/SetVaultQuotingParams request type. + * Deprecated since v6.x as is replaced by MsgSetVaultParams. + */ + +/** @deprecated */ + +export interface MsgSetVaultQuotingParamsSDKType { + authority: string; + /** The vault to set quoting params of. */ + + vault_id?: VaultIdSDKType; + /** The quoting parameters to set. Each field must be set. */ + + quoting_params?: QuotingParamsSDKType; +} + +function createBaseMsgDepositToMegavault(): MsgDepositToMegavault { return { - vaultId: undefined, subaccountId: undefined, quoteQuantums: new Uint8Array() }; } -export const MsgDepositToVault = { - encode(message: MsgDepositToVault, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.vaultId !== undefined) { - VaultId.encode(message.vaultId, writer.uint32(10).fork()).ldelim(); - } - +export const MsgDepositToMegavault = { + encode(message: MsgDepositToMegavault, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { if (message.subaccountId !== undefined) { - SubaccountId.encode(message.subaccountId, writer.uint32(18).fork()).ldelim(); + SubaccountId.encode(message.subaccountId, writer.uint32(10).fork()).ldelim(); } if (message.quoteQuantums.length !== 0) { - writer.uint32(26).bytes(message.quoteQuantums); + writer.uint32(18).bytes(message.quoteQuantums); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): MsgDepositToVault { + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDepositToMegavault { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseMsgDepositToVault(); + const message = createBaseMsgDepositToMegavault(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.vaultId = VaultId.decode(reader, reader.uint32()); + message.subaccountId = SubaccountId.decode(reader, reader.uint32()); break; case 2: + message.quoteQuantums = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgDepositToMegavault { + const message = createBaseMsgDepositToMegavault(); + message.subaccountId = object.subaccountId !== undefined && object.subaccountId !== null ? SubaccountId.fromPartial(object.subaccountId) : undefined; + message.quoteQuantums = object.quoteQuantums ?? new Uint8Array(); + return message; + } + +}; + +function createBaseMsgDepositToMegavaultResponse(): MsgDepositToMegavaultResponse { + return { + mintedShares: undefined + }; +} + +export const MsgDepositToMegavaultResponse = { + encode(message: MsgDepositToMegavaultResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.mintedShares !== undefined) { + NumShares.encode(message.mintedShares, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDepositToMegavaultResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgDepositToMegavaultResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.mintedShares = NumShares.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgDepositToMegavaultResponse { + const message = createBaseMsgDepositToMegavaultResponse(); + message.mintedShares = object.mintedShares !== undefined && object.mintedShares !== null ? NumShares.fromPartial(object.mintedShares) : undefined; + return message; + } + +}; + +function createBaseMsgWithdrawFromMegavault(): MsgWithdrawFromMegavault { + return { + subaccountId: undefined, + shares: undefined, + minQuoteQuantums: new Uint8Array() + }; +} + +export const MsgWithdrawFromMegavault = { + encode(message: MsgWithdrawFromMegavault, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subaccountId !== undefined) { + SubaccountId.encode(message.subaccountId, writer.uint32(10).fork()).ldelim(); + } + + if (message.shares !== undefined) { + NumShares.encode(message.shares, writer.uint32(18).fork()).ldelim(); + } + + if (message.minQuoteQuantums.length !== 0) { + writer.uint32(26).bytes(message.minQuoteQuantums); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgWithdrawFromMegavault { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgWithdrawFromMegavault(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: message.subaccountId = SubaccountId.decode(reader, reader.uint32()); break; + case 2: + message.shares = NumShares.decode(reader, reader.uint32()); + break; + case 3: - message.quoteQuantums = reader.bytes(); + message.minQuoteQuantums = reader.bytes(); break; default: @@ -88,34 +476,98 @@ export const MsgDepositToVault = { return message; }, - fromPartial(object: DeepPartial): MsgDepositToVault { - const message = createBaseMsgDepositToVault(); - message.vaultId = object.vaultId !== undefined && object.vaultId !== null ? VaultId.fromPartial(object.vaultId) : undefined; + fromPartial(object: DeepPartial): MsgWithdrawFromMegavault { + const message = createBaseMsgWithdrawFromMegavault(); message.subaccountId = object.subaccountId !== undefined && object.subaccountId !== null ? SubaccountId.fromPartial(object.subaccountId) : undefined; + message.shares = object.shares !== undefined && object.shares !== null ? NumShares.fromPartial(object.shares) : undefined; + message.minQuoteQuantums = object.minQuoteQuantums ?? new Uint8Array(); + return message; + } + +}; + +function createBaseMsgWithdrawFromMegavaultResponse(): MsgWithdrawFromMegavaultResponse { + return { + quoteQuantums: new Uint8Array() + }; +} + +export const MsgWithdrawFromMegavaultResponse = { + encode(message: MsgWithdrawFromMegavaultResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.quoteQuantums.length !== 0) { + writer.uint32(10).bytes(message.quoteQuantums); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgWithdrawFromMegavaultResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgWithdrawFromMegavaultResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.quoteQuantums = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgWithdrawFromMegavaultResponse { + const message = createBaseMsgWithdrawFromMegavaultResponse(); message.quoteQuantums = object.quoteQuantums ?? new Uint8Array(); return message; } }; -function createBaseMsgDepositToVaultResponse(): MsgDepositToVaultResponse { - return {}; +function createBaseMsgUpdateDefaultQuotingParams(): MsgUpdateDefaultQuotingParams { + return { + authority: "", + defaultQuotingParams: undefined + }; } -export const MsgDepositToVaultResponse = { - encode(_: MsgDepositToVaultResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const MsgUpdateDefaultQuotingParams = { + encode(message: MsgUpdateDefaultQuotingParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authority !== "") { + writer.uint32(10).string(message.authority); + } + + if (message.defaultQuotingParams !== undefined) { + QuotingParams.encode(message.defaultQuotingParams, writer.uint32(18).fork()).ldelim(); + } + return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): MsgDepositToVaultResponse { + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateDefaultQuotingParams { const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseMsgDepositToVaultResponse(); + const message = createBaseMsgUpdateDefaultQuotingParams(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { + case 1: + message.authority = reader.string(); + break; + + case 2: + message.defaultQuotingParams = QuotingParams.decode(reader, reader.uint32()); + break; + default: reader.skipType(tag & 7); break; @@ -125,8 +577,650 @@ export const MsgDepositToVaultResponse = { return message; }, - fromPartial(_: DeepPartial): MsgDepositToVaultResponse { - const message = createBaseMsgDepositToVaultResponse(); + fromPartial(object: DeepPartial): MsgUpdateDefaultQuotingParams { + const message = createBaseMsgUpdateDefaultQuotingParams(); + message.authority = object.authority ?? ""; + message.defaultQuotingParams = object.defaultQuotingParams !== undefined && object.defaultQuotingParams !== null ? QuotingParams.fromPartial(object.defaultQuotingParams) : undefined; + return message; + } + +}; + +function createBaseMsgUpdateDefaultQuotingParamsResponse(): MsgUpdateDefaultQuotingParamsResponse { + return {}; +} + +export const MsgUpdateDefaultQuotingParamsResponse = { + encode(_: MsgUpdateDefaultQuotingParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateDefaultQuotingParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateDefaultQuotingParamsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgUpdateDefaultQuotingParamsResponse { + const message = createBaseMsgUpdateDefaultQuotingParamsResponse(); + return message; + } + +}; + +function createBaseMsgSetVaultParams(): MsgSetVaultParams { + return { + authority: "", + vaultId: undefined, + vaultParams: undefined + }; +} + +export const MsgSetVaultParams = { + encode(message: MsgSetVaultParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authority !== "") { + writer.uint32(10).string(message.authority); + } + + if (message.vaultId !== undefined) { + VaultId.encode(message.vaultId, writer.uint32(18).fork()).ldelim(); + } + + if (message.vaultParams !== undefined) { + VaultParams.encode(message.vaultParams, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSetVaultParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSetVaultParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.authority = reader.string(); + break; + + case 2: + message.vaultId = VaultId.decode(reader, reader.uint32()); + break; + + case 3: + message.vaultParams = VaultParams.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgSetVaultParams { + const message = createBaseMsgSetVaultParams(); + message.authority = object.authority ?? ""; + message.vaultId = object.vaultId !== undefined && object.vaultId !== null ? VaultId.fromPartial(object.vaultId) : undefined; + message.vaultParams = object.vaultParams !== undefined && object.vaultParams !== null ? VaultParams.fromPartial(object.vaultParams) : undefined; + return message; + } + +}; + +function createBaseMsgSetVaultParamsResponse(): MsgSetVaultParamsResponse { + return {}; +} + +export const MsgSetVaultParamsResponse = { + encode(_: MsgSetVaultParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSetVaultParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSetVaultParamsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgSetVaultParamsResponse { + const message = createBaseMsgSetVaultParamsResponse(); + return message; + } + +}; + +function createBaseMsgUnlockShares(): MsgUnlockShares { + return { + authority: "", + ownerAddress: "" + }; +} + +export const MsgUnlockShares = { + encode(message: MsgUnlockShares, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authority !== "") { + writer.uint32(10).string(message.authority); + } + + if (message.ownerAddress !== "") { + writer.uint32(18).string(message.ownerAddress); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUnlockShares { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUnlockShares(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.authority = reader.string(); + break; + + case 2: + message.ownerAddress = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgUnlockShares { + const message = createBaseMsgUnlockShares(); + message.authority = object.authority ?? ""; + message.ownerAddress = object.ownerAddress ?? ""; + return message; + } + +}; + +function createBaseMsgUnlockSharesResponse(): MsgUnlockSharesResponse { + return { + unlockedShares: undefined + }; +} + +export const MsgUnlockSharesResponse = { + encode(message: MsgUnlockSharesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.unlockedShares !== undefined) { + NumShares.encode(message.unlockedShares, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUnlockSharesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUnlockSharesResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.unlockedShares = NumShares.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgUnlockSharesResponse { + const message = createBaseMsgUnlockSharesResponse(); + message.unlockedShares = object.unlockedShares !== undefined && object.unlockedShares !== null ? NumShares.fromPartial(object.unlockedShares) : undefined; + return message; + } + +}; + +function createBaseMsgUpdateOperatorParams(): MsgUpdateOperatorParams { + return { + authority: "", + params: undefined + }; +} + +export const MsgUpdateOperatorParams = { + encode(message: MsgUpdateOperatorParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authority !== "") { + writer.uint32(10).string(message.authority); + } + + if (message.params !== undefined) { + OperatorParams.encode(message.params, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateOperatorParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateOperatorParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.authority = reader.string(); + break; + + case 2: + message.params = OperatorParams.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgUpdateOperatorParams { + const message = createBaseMsgUpdateOperatorParams(); + message.authority = object.authority ?? ""; + message.params = object.params !== undefined && object.params !== null ? OperatorParams.fromPartial(object.params) : undefined; + return message; + } + +}; + +function createBaseMsgUpdateOperatorParamsResponse(): MsgUpdateOperatorParamsResponse { + return {}; +} + +export const MsgUpdateOperatorParamsResponse = { + encode(_: MsgUpdateOperatorParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateOperatorParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateOperatorParamsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgUpdateOperatorParamsResponse { + const message = createBaseMsgUpdateOperatorParamsResponse(); + return message; + } + +}; + +function createBaseMsgAllocateToVault(): MsgAllocateToVault { + return { + authority: "", + vaultId: undefined, + quoteQuantums: new Uint8Array() + }; +} + +export const MsgAllocateToVault = { + encode(message: MsgAllocateToVault, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authority !== "") { + writer.uint32(10).string(message.authority); + } + + if (message.vaultId !== undefined) { + VaultId.encode(message.vaultId, writer.uint32(18).fork()).ldelim(); + } + + if (message.quoteQuantums.length !== 0) { + writer.uint32(26).bytes(message.quoteQuantums); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgAllocateToVault { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgAllocateToVault(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.authority = reader.string(); + break; + + case 2: + message.vaultId = VaultId.decode(reader, reader.uint32()); + break; + + case 3: + message.quoteQuantums = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgAllocateToVault { + const message = createBaseMsgAllocateToVault(); + message.authority = object.authority ?? ""; + message.vaultId = object.vaultId !== undefined && object.vaultId !== null ? VaultId.fromPartial(object.vaultId) : undefined; + message.quoteQuantums = object.quoteQuantums ?? new Uint8Array(); + return message; + } + +}; + +function createBaseMsgAllocateToVaultResponse(): MsgAllocateToVaultResponse { + return {}; +} + +export const MsgAllocateToVaultResponse = { + encode(_: MsgAllocateToVaultResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgAllocateToVaultResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgAllocateToVaultResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgAllocateToVaultResponse { + const message = createBaseMsgAllocateToVaultResponse(); + return message; + } + +}; + +function createBaseMsgRetrieveFromVault(): MsgRetrieveFromVault { + return { + authority: "", + vaultId: undefined, + quoteQuantums: new Uint8Array() + }; +} + +export const MsgRetrieveFromVault = { + encode(message: MsgRetrieveFromVault, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authority !== "") { + writer.uint32(10).string(message.authority); + } + + if (message.vaultId !== undefined) { + VaultId.encode(message.vaultId, writer.uint32(18).fork()).ldelim(); + } + + if (message.quoteQuantums.length !== 0) { + writer.uint32(26).bytes(message.quoteQuantums); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRetrieveFromVault { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRetrieveFromVault(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.authority = reader.string(); + break; + + case 2: + message.vaultId = VaultId.decode(reader, reader.uint32()); + break; + + case 3: + message.quoteQuantums = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgRetrieveFromVault { + const message = createBaseMsgRetrieveFromVault(); + message.authority = object.authority ?? ""; + message.vaultId = object.vaultId !== undefined && object.vaultId !== null ? VaultId.fromPartial(object.vaultId) : undefined; + message.quoteQuantums = object.quoteQuantums ?? new Uint8Array(); + return message; + } + +}; + +function createBaseMsgRetrieveFromVaultResponse(): MsgRetrieveFromVaultResponse { + return {}; +} + +export const MsgRetrieveFromVaultResponse = { + encode(_: MsgRetrieveFromVaultResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRetrieveFromVaultResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRetrieveFromVaultResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgRetrieveFromVaultResponse { + const message = createBaseMsgRetrieveFromVaultResponse(); + return message; + } + +}; + +function createBaseMsgUpdateParams(): MsgUpdateParams { + return { + authority: "", + params: undefined + }; +} + +export const MsgUpdateParams = { + encode(message: MsgUpdateParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authority !== "") { + writer.uint32(10).string(message.authority); + } + + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.authority = reader.string(); + break; + + case 2: + message.params = Params.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgUpdateParams { + const message = createBaseMsgUpdateParams(); + message.authority = object.authority ?? ""; + message.params = object.params !== undefined && object.params !== null ? Params.fromPartial(object.params) : undefined; + return message; + } + +}; + +function createBaseMsgSetVaultQuotingParams(): MsgSetVaultQuotingParams { + return { + authority: "", + vaultId: undefined, + quotingParams: undefined + }; +} + +export const MsgSetVaultQuotingParams = { + encode(message: MsgSetVaultQuotingParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authority !== "") { + writer.uint32(10).string(message.authority); + } + + if (message.vaultId !== undefined) { + VaultId.encode(message.vaultId, writer.uint32(18).fork()).ldelim(); + } + + if (message.quotingParams !== undefined) { + QuotingParams.encode(message.quotingParams, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSetVaultQuotingParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSetVaultQuotingParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.authority = reader.string(); + break; + + case 2: + message.vaultId = VaultId.decode(reader, reader.uint32()); + break; + + case 3: + message.quotingParams = QuotingParams.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgSetVaultQuotingParams { + const message = createBaseMsgSetVaultQuotingParams(); + message.authority = object.authority ?? ""; + message.vaultId = object.vaultId !== undefined && object.vaultId !== null ? VaultId.fromPartial(object.vaultId) : undefined; + message.quotingParams = object.quotingParams !== undefined && object.quotingParams !== null ? QuotingParams.fromPartial(object.quotingParams) : undefined; return message; } diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/vault.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/vault.ts index cc93d62a580..0992b3373a1 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/vault.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/vault/vault.ts @@ -49,6 +49,100 @@ export function vaultTypeToJSON(object: VaultType): string { return "UNRECOGNIZED"; } } +/** VaultStatus represents the status of a vault. */ + +export enum VaultStatus { + /** VAULT_STATUS_UNSPECIFIED - Default value, invalid and unused. */ + VAULT_STATUS_UNSPECIFIED = 0, + + /** + * VAULT_STATUS_DEACTIVATED - Don’t place orders. Does not count toward global vault balances. + * A vault can only be set to this status if its equity is non-positive. + */ + VAULT_STATUS_DEACTIVATED = 1, + + /** VAULT_STATUS_STAND_BY - Don’t place orders. Does count towards global vault balances. */ + VAULT_STATUS_STAND_BY = 2, + + /** VAULT_STATUS_QUOTING - Places orders on both sides of the book. */ + VAULT_STATUS_QUOTING = 3, + + /** VAULT_STATUS_CLOSE_ONLY - Only place orders that close the position. */ + VAULT_STATUS_CLOSE_ONLY = 4, + UNRECOGNIZED = -1, +} +/** VaultStatus represents the status of a vault. */ + +export enum VaultStatusSDKType { + /** VAULT_STATUS_UNSPECIFIED - Default value, invalid and unused. */ + VAULT_STATUS_UNSPECIFIED = 0, + + /** + * VAULT_STATUS_DEACTIVATED - Don’t place orders. Does not count toward global vault balances. + * A vault can only be set to this status if its equity is non-positive. + */ + VAULT_STATUS_DEACTIVATED = 1, + + /** VAULT_STATUS_STAND_BY - Don’t place orders. Does count towards global vault balances. */ + VAULT_STATUS_STAND_BY = 2, + + /** VAULT_STATUS_QUOTING - Places orders on both sides of the book. */ + VAULT_STATUS_QUOTING = 3, + + /** VAULT_STATUS_CLOSE_ONLY - Only place orders that close the position. */ + VAULT_STATUS_CLOSE_ONLY = 4, + UNRECOGNIZED = -1, +} +export function vaultStatusFromJSON(object: any): VaultStatus { + switch (object) { + case 0: + case "VAULT_STATUS_UNSPECIFIED": + return VaultStatus.VAULT_STATUS_UNSPECIFIED; + + case 1: + case "VAULT_STATUS_DEACTIVATED": + return VaultStatus.VAULT_STATUS_DEACTIVATED; + + case 2: + case "VAULT_STATUS_STAND_BY": + return VaultStatus.VAULT_STATUS_STAND_BY; + + case 3: + case "VAULT_STATUS_QUOTING": + return VaultStatus.VAULT_STATUS_QUOTING; + + case 4: + case "VAULT_STATUS_CLOSE_ONLY": + return VaultStatus.VAULT_STATUS_CLOSE_ONLY; + + case -1: + case "UNRECOGNIZED": + default: + return VaultStatus.UNRECOGNIZED; + } +} +export function vaultStatusToJSON(object: VaultStatus): string { + switch (object) { + case VaultStatus.VAULT_STATUS_UNSPECIFIED: + return "VAULT_STATUS_UNSPECIFIED"; + + case VaultStatus.VAULT_STATUS_DEACTIVATED: + return "VAULT_STATUS_DEACTIVATED"; + + case VaultStatus.VAULT_STATUS_STAND_BY: + return "VAULT_STATUS_STAND_BY"; + + case VaultStatus.VAULT_STATUS_QUOTING: + return "VAULT_STATUS_QUOTING"; + + case VaultStatus.VAULT_STATUS_CLOSE_ONLY: + return "VAULT_STATUS_CLOSE_ONLY"; + + case VaultStatus.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} /** VaultId uniquely identifies a vault by its type and number. */ export interface VaultId { @@ -67,18 +161,6 @@ export interface VaultIdSDKType { number: number; } -/** NumShares represents the number of shares in a vault. */ - -export interface NumShares { - /** Number of shares. */ - numShares: Uint8Array; -} -/** NumShares represents the number of shares in a vault. */ - -export interface NumSharesSDKType { - /** Number of shares. */ - num_shares: Uint8Array; -} function createBaseVaultId(): VaultId { return { @@ -133,49 +215,4 @@ export const VaultId = { return message; } -}; - -function createBaseNumShares(): NumShares { - return { - numShares: new Uint8Array() - }; -} - -export const NumShares = { - encode(message: NumShares, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.numShares.length !== 0) { - writer.uint32(10).bytes(message.numShares); - } - - return writer; - }, - - decode(input: _m0.Reader | Uint8Array, length?: number): NumShares { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseNumShares(); - - while (reader.pos < end) { - const tag = reader.uint32(); - - switch (tag >>> 3) { - case 1: - message.numShares = reader.bytes(); - break; - - default: - reader.skipType(tag & 7); - break; - } - } - - return message; - }, - - fromPartial(object: DeepPartial): NumShares { - const message = createBaseNumShares(); - message.numShares = object.numShares ?? new Uint8Array(); - return message; - } - }; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/gogoproto/bundle.ts b/indexer/packages/v4-protos/src/codegen/gogoproto/bundle.ts index 376d487cb4d..7268721d904 100644 --- a/indexer/packages/v4-protos/src/codegen/gogoproto/bundle.ts +++ b/indexer/packages/v4-protos/src/codegen/gogoproto/bundle.ts @@ -1,3 +1,3 @@ -import * as _102 from "./gogo"; -export const gogoproto = { ..._102 +import * as _130 from "./gogo"; +export const gogoproto = { ..._130 }; \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/codegen/google/bundle.ts b/indexer/packages/v4-protos/src/codegen/google/bundle.ts index bb317adb1e7..c730dc4e520 100644 --- a/indexer/packages/v4-protos/src/codegen/google/bundle.ts +++ b/indexer/packages/v4-protos/src/codegen/google/bundle.ts @@ -1,16 +1,16 @@ -import * as _103 from "./api/annotations"; -import * as _104 from "./api/http"; -import * as _105 from "./protobuf/descriptor"; -import * as _106 from "./protobuf/duration"; -import * as _107 from "./protobuf/timestamp"; -import * as _108 from "./protobuf/any"; +import * as _131 from "./api/annotations"; +import * as _132 from "./api/http"; +import * as _133 from "./protobuf/descriptor"; +import * as _134 from "./protobuf/duration"; +import * as _135 from "./protobuf/timestamp"; +import * as _136 from "./protobuf/any"; export namespace google { - export const api = { ..._103, - ..._104 + export const api = { ..._131, + ..._132 }; - export const protobuf = { ..._105, - ..._106, - ..._107, - ..._108 + export const protobuf = { ..._133, + ..._134, + ..._135, + ..._136 }; } \ No newline at end of file diff --git a/indexer/packages/v4-protos/src/index.ts b/indexer/packages/v4-protos/src/index.ts index 6d05ae5fc48..81c6df94f5a 100644 --- a/indexer/packages/v4-protos/src/index.ts +++ b/indexer/packages/v4-protos/src/index.ts @@ -16,3 +16,5 @@ export * from './codegen/google/protobuf/timestamp'; export * from './codegen/dydxprotocol/indexer/protocol/v1/clob'; export * from './codegen/dydxprotocol/indexer/protocol/v1/subaccount'; export * from './codegen/dydxprotocol/indexer/shared/removal_reason'; +export * from './codegen/dydxprotocol/vault/vault'; +export * from './utils'; diff --git a/indexer/packages/v4-protos/src/utils.ts b/indexer/packages/v4-protos/src/utils.ts new file mode 100644 index 00000000000..dfbd71f77da --- /dev/null +++ b/indexer/packages/v4-protos/src/utils.ts @@ -0,0 +1,12 @@ +import { Timestamp } from './codegen/google/protobuf/timestamp'; + +export const MILLIS_IN_NANOS: number = 1_000_000; +export const SECONDS_IN_MILLIS: number = 1_000; +export function protoTimestampToDate( + protoTime: Timestamp, +): Date { + const timeInMillis: number = Number(protoTime.seconds) * SECONDS_IN_MILLIS + + Math.floor(protoTime.nanos / MILLIS_IN_NANOS); + + return new Date(timeInMillis); +} diff --git a/indexer/patches/@types+ws+8.5.10.patch b/indexer/patches/@types+ws+8.5.10.patch new file mode 100644 index 00000000000..ca5d3f68472 --- /dev/null +++ b/indexer/patches/@types+ws+8.5.10.patch @@ -0,0 +1,22 @@ +# generated by patch-package 6.4.14 +# +# declared package: +# @types/ws: 8.5.10 +# +diff --git a/node_modules/@types/ws/index.d.ts b/node_modules/@types/ws/index.d.ts +index 94d1dca..d9abf0b 100644 +--- a/node_modules/@types/ws/index.d.ts ++++ b/node_modules/@types/ws/index.d.ts +@@ -318,7 +318,11 @@ declare namespace WebSocket { + host?: string | undefined; + port?: number | undefined; + backlog?: number | undefined; +- server?: HTTPServer | HTTPSServer | undefined; ++ // LOCAL DYDX PATCH ++ allowSynchronousEvents?: boolean; ++ autoPong?: boolean; ++ server?: HTTPServer | HTTPSServer | undefined; ++ // END LOCAL DYDX PATCH + verifyClient?: + | VerifyClientCallbackAsync> + | VerifyClientCallbackSync> diff --git a/indexer/pnpm-lock.yaml b/indexer/pnpm-lock.yaml index 83b3739a358..5d1e3613d20 100644 --- a/indexer/pnpm-lock.yaml +++ b/indexer/pnpm-lock.yaml @@ -3,10 +3,17 @@ lockfileVersion: 5.3 importers: .: - specifiers: {} + specifiers: + '@milahu/patch-package': 6.4.14 + '@types/ws': 8.5.10 + dependencies: + '@milahu/patch-package': 6.4.14 + devDependencies: + '@types/ws': 8.5.10 packages/base: specifiers: + '@aws-sdk/client-ec2': ^3.354.0 '@bugsnag/core': ^7.18.0 '@bugsnag/js': ^7.18.0 '@bugsnag/node': ^7.18.0 @@ -22,12 +29,15 @@ importers: dotenv-flow: ^3.2.0 express: ^4.18.1 hot-shots: ^9.1.0 + jest: ^28.1.2 lodash: ^4.17.21 traverse: ^0.6.6 typescript: ^4.7.4 + uuid: ^8.3.2 winston: ^3.8.1 winston-transport: ^4.5.0 dependencies: + '@aws-sdk/client-ec2': 3.658.1 '@bugsnag/core': 7.18.0 '@bugsnag/js': 7.18.0 '@bugsnag/node': 7.18.0 @@ -39,6 +49,7 @@ importers: hot-shots: 9.1.0 lodash: 4.17.21 traverse: 0.6.6 + uuid: 8.3.2 winston: 3.8.1 winston-transport: 4.5.0 devDependencies: @@ -48,6 +59,7 @@ importers: '@types/lodash': 4.14.182 '@types/traverse': 0.6.32 express: 4.18.1 + jest: 28.1.2 typescript: 4.7.4 packages/compliance: @@ -121,20 +133,24 @@ importers: specifiers: '@dydxprotocol-indexer/base': workspace:^0.0.1 '@dydxprotocol-indexer/dev': workspace:^0.0.1 + '@dydxprotocol-indexer/postgres': workspace:^0.0.1 + '@dydxprotocol-indexer/v4-protos': workspace:^0.0.1 '@types/jest': ^28.1.4 '@types/lodash': ^4.14.182 '@types/node': ^18.0.3 '@types/uuid': ^8.3.4 dotenv-flow: ^3.2.0 jest: ^28.1.2 - kafkajs: ^2.1.0 + kafkajs: ^2.2.4 lodash: ^4.17.21 typescript: ^4.7.4 uuid: ^8.3.2 dependencies: '@dydxprotocol-indexer/base': link:../base + '@dydxprotocol-indexer/postgres': link:../postgres + '@dydxprotocol-indexer/v4-protos': link:../v4-protos dotenv-flow: 3.2.0 - kafkajs: 2.1.0 + kafkajs: 2.2.4 lodash: 4.17.21 uuid: 8.3.2 devDependencies: @@ -146,6 +162,29 @@ importers: jest: 28.1.2_@types+node@18.0.3 typescript: 4.7.4 + packages/notifications: + specifiers: + '@dydxprotocol-indexer/base': workspace:^0.0.1 + '@dydxprotocol-indexer/dev': workspace:^0.0.1 + '@dydxprotocol-indexer/postgres': workspace:^0.0.1 + '@types/jest': ^28.1.4 + dotenv-flow: ^3.2.0 + firebase-admin: ^12.4.0 + jest: ^28.1.2 + ts-node: ^10.8.2 + typescript: ^4.7.4 + dependencies: + '@dydxprotocol-indexer/base': link:../base + '@dydxprotocol-indexer/postgres': link:../postgres + dotenv-flow: 3.2.0 + firebase-admin: 12.4.0 + devDependencies: + '@dydxprotocol-indexer/dev': link:../dev + '@types/jest': 28.1.4 + jest: 28.1.2_ts-node@10.8.2 + ts-node: 10.8.2_typescript@4.7.4 + typescript: 4.7.4 + packages/postgres: specifiers: '@dydxprotocol-indexer/base': workspace:^0.0.1 @@ -321,7 +360,7 @@ importers: dotenv-flow: ^3.2.0 esbuild: ^0.15.11 jest: ^28.1.2 - kafkajs: 2.1.0 + kafkajs: ^2.2.4 lodash: ^4.17.21 redis: 2.8.0 ts-node: ^10.8.2 @@ -333,7 +372,7 @@ importers: '@aws-sdk/client-lambda': 3.353.0 '@dydxprotocol-indexer/base': link:../../packages/base dotenv-flow: 3.2.0 - kafkajs: 2.1.0 + kafkajs: 2.2.4 lodash: 4.17.21 redis: 2.8.0 devDependencies: @@ -367,7 +406,7 @@ importers: dotenv-flow: ^3.2.0 esbuild: ^0.15.11 jest: ^28.1.2 - kafkajs: 2.1.0 + kafkajs: ^2.2.4 lodash: ^4.17.21 long: ^5.2.1 redis: 2.8.0 @@ -383,7 +422,7 @@ importers: '@dydxprotocol-indexer/v4-protos': link:../../packages/v4-protos big.js: 6.2.1 dotenv-flow: 3.2.0 - kafkajs: 2.1.0 + kafkajs: 2.2.4 lodash: 4.17.21 long: 5.2.1 redis: 2.8.0 @@ -407,13 +446,16 @@ importers: '@bugsnag/node': ^7.18.0 '@bugsnag/plugin-express': ^7.18.0 '@cosmjs/crypto': 0.32.1 + '@cosmjs/encoding': ^0.32.3 '@dydxprotocol-indexer/base': workspace:^0.0.1 '@dydxprotocol-indexer/compliance': workspace:^0.0.1 '@dydxprotocol-indexer/dev': workspace:^0.0.1 + '@dydxprotocol-indexer/notifications': workspace:^0.0.1 '@dydxprotocol-indexer/postgres': workspace:^0.0.1 '@dydxprotocol-indexer/redis': workspace:^0.0.1 '@dydxprotocol-indexer/v4-proto-parser': workspace:^0.0.1 '@dydxprotocol-indexer/v4-protos': workspace:^0.0.1 + '@keplr-wallet/cosmos': ^0.12.122 '@tsoa/runtime': ^5.0.0 '@types/big.js': ^6.1.5 '@types/body-parser': ^1.19.2 @@ -428,13 +470,15 @@ importers: '@types/response-time': ^2.3.5 '@types/supertest': ^2.0.12 '@types/swagger-ui-express': ^4.1.3 + bech32: 1.1.4 big.js: ^6.2.1 + binary-searching: ^2.0.5 body-parser: ^1.20.0 concurrently: ^7.6.0 cors: ^2.8.5 dd-trace: ^3.32.1 dotenv-flow: ^3.2.0 - dydx-widdershins: ^4.0.1 + dydx-widdershins: ^4.0.8 express: ^4.18.1 express-request-id: ^1.4.0 express-validator: ^6.14.2 @@ -457,19 +501,24 @@ importers: '@bugsnag/node': 7.18.0 '@bugsnag/plugin-express': 7.18.0_@bugsnag+core@7.18.0 '@cosmjs/crypto': 0.32.1 + '@cosmjs/encoding': 0.32.3 '@dydxprotocol-indexer/base': link:../../packages/base '@dydxprotocol-indexer/compliance': link:../../packages/compliance + '@dydxprotocol-indexer/notifications': link:../../packages/notifications '@dydxprotocol-indexer/postgres': link:../../packages/postgres '@dydxprotocol-indexer/redis': link:../../packages/redis '@dydxprotocol-indexer/v4-proto-parser': link:../../packages/v4-proto-parser '@dydxprotocol-indexer/v4-protos': link:../../packages/v4-protos + '@keplr-wallet/cosmos': 0.12.122 '@tsoa/runtime': 5.0.0 + bech32: 1.1.4 big.js: 6.2.1 + binary-searching: 2.0.5 body-parser: 1.20.0 cors: 2.8.5 dd-trace: 3.32.1 dotenv-flow: 3.2.0 - dydx-widdershins: 4.0.1 + dydx-widdershins: 4.0.8 express: 4.18.1 express-request-id: 1.4.1 express-validator: 6.14.2 @@ -508,6 +557,7 @@ importers: '@dydxprotocol-indexer/base': workspace:^0.0.1 '@dydxprotocol-indexer/dev': workspace:^0.0.1 '@dydxprotocol-indexer/kafka': workspace:^0.0.1 + '@dydxprotocol-indexer/notifications': workspace:^0.0.1 '@dydxprotocol-indexer/postgres': workspace:^0.0.1 '@dydxprotocol-indexer/redis': workspace:^0.0.1 '@dydxprotocol-indexer/v4-proto-parser': workspace:^0.0.1 @@ -523,7 +573,7 @@ importers: dotenv-flow: ^3.2.0 jest: ^28.1.2 jest-mock-extended: ^3.0.5 - kafkajs: ^2.1.0 + kafkajs: ^2.2.4 lodash: ^4.17.21 long: ^5.2.1 luxon: ^3.0.1 @@ -534,6 +584,7 @@ importers: dependencies: '@dydxprotocol-indexer/base': link:../../packages/base '@dydxprotocol-indexer/kafka': link:../../packages/kafka + '@dydxprotocol-indexer/notifications': link:../../packages/notifications '@dydxprotocol-indexer/postgres': link:../../packages/postgres '@dydxprotocol-indexer/redis': link:../../packages/redis '@dydxprotocol-indexer/v4-proto-parser': link:../../packages/v4-proto-parser @@ -542,7 +593,7 @@ importers: dd-trace: 3.32.1 dotenv-flow: 3.2.0 jest-mock-extended: 3.0.5_jest@28.1.2+typescript@4.7.4 - kafkajs: 2.1.0 + kafkajs: 2.2.4 lodash: 4.17.21 long: 5.2.1 luxon: 3.0.1 @@ -597,16 +648,18 @@ importers: '@types/luxon': ^3.0.0 '@types/node': ^18.0.3 '@types/redis': 2.8.27 + '@types/seedrandom': ^3.0.8 '@types/uuid': ^8.3.4 aws-sdk: ^2.1399.0 big.js: ^6.2.1 dd-trace: ^3.32.1 dotenv-flow: ^3.2.0 jest: ^28.1.2 - kafkajs: ^2.1.0 + kafkajs: ^2.2.4 lodash: ^4.17.21 luxon: ^3.0.1 redis: 2.8.0 + seedrandom: ^3.0.5 ts-node: ^10.8.2 tsconfig-paths: ^4.0.0 typescript: ^4.7.4 @@ -623,10 +676,11 @@ importers: big.js: 6.2.1 dd-trace: 3.32.1 dotenv-flow: 3.2.0 - kafkajs: 2.2.3 + kafkajs: 2.2.4 lodash: 4.17.21 luxon: 3.0.1 redis: 2.8.0 + seedrandom: 3.0.5 uuid: 8.3.2 devDependencies: '@dydxprotocol-indexer/dev': link:../../packages/dev @@ -636,6 +690,7 @@ importers: '@types/luxon': 3.0.0 '@types/node': 18.0.3 '@types/redis': 2.8.27 + '@types/seedrandom': 3.0.8 '@types/uuid': 8.3.4 jest: 28.1.2_250642e41d506bccecc9f35ad915bcb5 ts-node: 10.8.2_2dd5d46eecda2aef953638919121af58 @@ -656,7 +711,7 @@ importers: big.js: ^6.0.2 dotenv-flow: ^3.2.0 jest: ^28.1.2 - kafkajs: ^2.1.0 + kafkajs: ^2.2.4 lodash: ^4.17.21 long: ^5.2.1 ts-node: ^10.8.2 @@ -671,7 +726,7 @@ importers: '@dydxprotocol-indexer/v4-protos': link:../../packages/v4-protos big.js: 6.2.1 dotenv-flow: 3.2.0 - kafkajs: 2.2.3 + kafkajs: 2.2.4 lodash: 4.17.21 long: 5.2.1 yargs: 13.3.2 @@ -699,10 +754,10 @@ importers: '@types/express-request-id': ^1.4.3 '@types/jest': ^28.1.4 '@types/lodash': ^4.14.182 - '@types/node': ^18.0.3 + '@types/node': ^18.19.31 '@types/response-time': ^2.3.5 '@types/uuid': ^8.3.4 - '@types/ws': ^8.5.3 + '@types/ws': ^8.5.10 axios: ^1.2.1 body-parser: ^1.20.0 cors: ^2.8.5 @@ -711,7 +766,7 @@ importers: express: ^4.18.1 express-request-id: ^1.4.0 jest: ^28.1.2 - kafkajs: ^2.1.0 + kafkajs: ^2.2.4 lodash: ^4.17.21 nocache: ^3.0.4 response-time: ^2.3.2 @@ -719,7 +774,7 @@ importers: tsconfig-paths: ^4.0.0 typescript: ^4.7.4 uuid: ^8.3.2 - ws: ^8.8.1 + ws: ^8.16.0 dependencies: '@dydxprotocol-indexer/base': link:../../packages/base '@dydxprotocol-indexer/compliance': link:../../packages/compliance @@ -733,12 +788,12 @@ importers: dotenv-flow: 3.2.0 express: 4.18.1 express-request-id: 1.4.1 - kafkajs: 2.1.0 + kafkajs: 2.2.4 lodash: 4.17.21 nocache: 3.0.4 response-time: 2.3.2 uuid: 8.3.2 - ws: 8.8.1 + ws: 8.16.0 devDependencies: '@dydxprotocol-indexer/dev': link:../../packages/dev '@types/body-parser': 1.19.2 @@ -747,12 +802,12 @@ importers: '@types/express-request-id': 1.4.3 '@types/jest': 28.1.4 '@types/lodash': 4.14.182 - '@types/node': 18.0.3 + '@types/node': 18.19.31 '@types/response-time': 2.3.5 '@types/uuid': 8.3.4 - '@types/ws': 8.5.3 - jest: 28.1.2_250642e41d506bccecc9f35ad915bcb5 - ts-node: 10.8.2_2dd5d46eecda2aef953638919121af58 + '@types/ws': 8.5.10 + jest: 28.1.2_e1489a60da1bfeaddb37cf23d6a3b371 + ts-node: 10.8.2_4ea55324100c26d4019c6e6bcc89fac6 tsconfig-paths: 4.0.0 typescript: 4.7.4 @@ -774,7 +829,7 @@ importers: dd-trace: ^3.32.1 dotenv-flow: ^3.2.0 jest: ^28.1.2 - kafkajs: ^2.2.3 + kafkajs: ^2.2.4 long: ^5.2.1 luxon: ^3.0.1 redis: 2.8.0 @@ -791,7 +846,7 @@ importers: big.js: 6.2.1 dd-trace: 3.32.1 dotenv-flow: 3.2.0 - kafkajs: 2.2.3 + kafkajs: 2.2.4 long: 5.2.1 luxon: 3.0.1 redis: 2.8.0 @@ -814,7 +869,7 @@ packages: engines: {node: '>=6.0.0'} dependencies: '@jridgewell/gen-mapping': 0.1.1 - '@jridgewell/trace-mapping': 0.3.14 + '@jridgewell/trace-mapping': 0.3.17 /@aws-crypto/crc32/3.0.0: resolution: {integrity: sha512-IzSgsrxUcsrejQbPVilIKy16kAT52EwB6zSaI+M3xxIhKh5+aldEyvI+z6erM7TCLB2BJsFrtHjp6/4/sr+3dA==} @@ -843,6 +898,18 @@ packages: tslib: 1.14.1 dev: false + /@aws-crypto/sha256-browser/5.2.0: + resolution: {integrity: sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==} + dependencies: + '@aws-crypto/sha256-js': 5.2.0 + '@aws-crypto/supports-web-crypto': 5.2.0 + '@aws-crypto/util': 5.2.0 + '@aws-sdk/types': 3.654.0 + '@aws-sdk/util-locate-window': 3.310.0 + '@smithy/util-utf8': 2.3.0 + tslib: 2.7.0 + dev: false + /@aws-crypto/sha256-js/3.0.0: resolution: {integrity: sha512-PnNN7os0+yd1XvXAy23CFOmTbMaDxgxXtTKHybrJ39Y8kGzBATgBFibWJKH6BhytLI/Zyszs87xCOBNyBig6vQ==} dependencies: @@ -851,12 +918,27 @@ packages: tslib: 1.14.1 dev: false + /@aws-crypto/sha256-js/5.2.0: + resolution: {integrity: sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==} + engines: {node: '>=16.0.0'} + dependencies: + '@aws-crypto/util': 5.2.0 + '@aws-sdk/types': 3.654.0 + tslib: 2.7.0 + dev: false + /@aws-crypto/supports-web-crypto/3.0.0: resolution: {integrity: sha512-06hBdMwUAb2WFTuGG73LSC0wfPu93xWwo5vL2et9eymgmu3Id5vFAHBbajVWiGhPO37qcsdCap/FqXvJGJWPIg==} dependencies: tslib: 1.14.1 dev: false + /@aws-crypto/supports-web-crypto/5.2.0: + resolution: {integrity: sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==} + dependencies: + tslib: 2.7.0 + dev: false + /@aws-crypto/util/3.0.0: resolution: {integrity: sha512-2OJlpeJpCR48CC8r+uKVChzs9Iungj9wkZrl8Z041DWEWvyIHILYKCPNzJghKsivj+S3mLo6BVc7mBNzdxA46w==} dependencies: @@ -865,6 +947,14 @@ packages: tslib: 1.14.1 dev: false + /@aws-crypto/util/5.2.0: + resolution: {integrity: sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==} + dependencies: + '@aws-sdk/types': 3.654.0 + '@smithy/util-utf8': 2.3.0 + tslib: 2.7.0 + dev: false + /@aws-sdk/abort-controller/3.347.0: resolution: {integrity: sha512-P/2qE6ntYEmYG4Ez535nJWZbXqgbkJx8CMz7ChEuEg3Gp3dvVYEKg+iEUEvlqQ2U5dWP5J3ehw5po9t86IsVPQ==} engines: {node: '>=14.0.0'} @@ -873,6 +963,58 @@ packages: tslib: 2.5.0 dev: false + /@aws-sdk/client-ec2/3.658.1: + resolution: {integrity: sha512-J/TdGg7Z8pwIL826QKwaX/EgND5Tst5N5hKcjwnj0jGfsJOkRTMdZTwOgvShYWgs6BplFFZqkl3t2dKsNfsVcg==} + engines: {node: '>=16.0.0'} + dependencies: + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/client-sso-oidc': 3.658.1_@aws-sdk+client-sts@3.658.1 + '@aws-sdk/client-sts': 3.658.1 + '@aws-sdk/core': 3.658.1 + '@aws-sdk/credential-provider-node': 3.658.1_48432e9e1d4872afb099a0b2260c0550 + '@aws-sdk/middleware-host-header': 3.654.0 + '@aws-sdk/middleware-logger': 3.654.0 + '@aws-sdk/middleware-recursion-detection': 3.654.0 + '@aws-sdk/middleware-sdk-ec2': 3.658.1 + '@aws-sdk/middleware-user-agent': 3.654.0 + '@aws-sdk/region-config-resolver': 3.654.0 + '@aws-sdk/types': 3.654.0 + '@aws-sdk/util-endpoints': 3.654.0 + '@aws-sdk/util-user-agent-browser': 3.654.0 + '@aws-sdk/util-user-agent-node': 3.654.0 + '@smithy/config-resolver': 3.0.8 + '@smithy/core': 2.4.6 + '@smithy/fetch-http-handler': 3.2.8 + '@smithy/hash-node': 3.0.6 + '@smithy/invalid-dependency': 3.0.6 + '@smithy/middleware-content-length': 3.0.8 + '@smithy/middleware-endpoint': 3.1.3 + '@smithy/middleware-retry': 3.0.21 + '@smithy/middleware-serde': 3.0.6 + '@smithy/middleware-stack': 3.0.6 + '@smithy/node-config-provider': 3.1.7 + '@smithy/node-http-handler': 3.2.3 + '@smithy/protocol-http': 4.1.3 + '@smithy/smithy-client': 3.3.5 + '@smithy/types': 3.4.2 + '@smithy/url-parser': 3.0.6 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.21 + '@smithy/util-defaults-mode-node': 3.0.21 + '@smithy/util-endpoints': 2.1.2 + '@smithy/util-middleware': 3.0.6 + '@smithy/util-retry': 3.0.6 + '@smithy/util-utf8': 3.0.0 + '@smithy/util-waiter': 3.1.5 + tslib: 2.7.0 + uuid: 9.0.1 + transitivePeerDependencies: + - aws-crt + dev: false + /@aws-sdk/client-ecr/3.354.0: resolution: {integrity: sha512-gy6cNm2y4TatqCoGkUspAgPfEGT2fsMIZGrfyPcx7cLsOiq+L5Wbs5AWFw6jywaRC88c6raUrpFLkPeVZjDZiQ==} engines: {node: '>=14.0.0'} @@ -1093,6 +1235,56 @@ packages: - aws-crt dev: false + /@aws-sdk/client-sso-oidc/3.658.1_@aws-sdk+client-sts@3.658.1: + resolution: {integrity: sha512-RGcZAI3qEA05JszPKwa0cAyp8rnS1nUvs0Sqw4hqLNQ1kD7b7V6CPjRXe7EFQqCOMvM4kGqx0+cEEVTOmBsFLw==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@aws-sdk/client-sts': ^3.658.1 + dependencies: + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/client-sts': 3.658.1 + '@aws-sdk/core': 3.658.1 + '@aws-sdk/credential-provider-node': 3.658.1_48432e9e1d4872afb099a0b2260c0550 + '@aws-sdk/middleware-host-header': 3.654.0 + '@aws-sdk/middleware-logger': 3.654.0 + '@aws-sdk/middleware-recursion-detection': 3.654.0 + '@aws-sdk/middleware-user-agent': 3.654.0 + '@aws-sdk/region-config-resolver': 3.654.0 + '@aws-sdk/types': 3.654.0 + '@aws-sdk/util-endpoints': 3.654.0 + '@aws-sdk/util-user-agent-browser': 3.654.0 + '@aws-sdk/util-user-agent-node': 3.654.0 + '@smithy/config-resolver': 3.0.8 + '@smithy/core': 2.4.6 + '@smithy/fetch-http-handler': 3.2.8 + '@smithy/hash-node': 3.0.6 + '@smithy/invalid-dependency': 3.0.6 + '@smithy/middleware-content-length': 3.0.8 + '@smithy/middleware-endpoint': 3.1.3 + '@smithy/middleware-retry': 3.0.21 + '@smithy/middleware-serde': 3.0.6 + '@smithy/middleware-stack': 3.0.6 + '@smithy/node-config-provider': 3.1.7 + '@smithy/node-http-handler': 3.2.3 + '@smithy/protocol-http': 4.1.3 + '@smithy/smithy-client': 3.3.5 + '@smithy/types': 3.4.2 + '@smithy/url-parser': 3.0.6 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.21 + '@smithy/util-defaults-mode-node': 3.0.21 + '@smithy/util-endpoints': 2.1.2 + '@smithy/util-middleware': 3.0.6 + '@smithy/util-retry': 3.0.6 + '@smithy/util-utf8': 3.0.0 + tslib: 2.7.0 + transitivePeerDependencies: + - aws-crt + dev: false + /@aws-sdk/client-sso/3.353.0: resolution: {integrity: sha512-/dP5jLvZYskk6eVxI/5uaC1AVEbE7B2yuQ+9O3Z9plPIlZXyZxzXHf06s4gwsS4hAc7TDs3DaB+AnfMVLOPHbQ==} engines: {node: '>=14.0.0'} @@ -1175,6 +1367,52 @@ packages: - aws-crt dev: false + /@aws-sdk/client-sso/3.658.1: + resolution: {integrity: sha512-lOuaBtqPTYGn6xpXlQF4LsNDsQ8Ij2kOdnk+i69Kp6yS76TYvtUuukyLL5kx8zE1c8WbYtxj9y8VNw9/6uKl7Q==} + engines: {node: '>=16.0.0'} + dependencies: + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/core': 3.658.1 + '@aws-sdk/middleware-host-header': 3.654.0 + '@aws-sdk/middleware-logger': 3.654.0 + '@aws-sdk/middleware-recursion-detection': 3.654.0 + '@aws-sdk/middleware-user-agent': 3.654.0 + '@aws-sdk/region-config-resolver': 3.654.0 + '@aws-sdk/types': 3.654.0 + '@aws-sdk/util-endpoints': 3.654.0 + '@aws-sdk/util-user-agent-browser': 3.654.0 + '@aws-sdk/util-user-agent-node': 3.654.0 + '@smithy/config-resolver': 3.0.8 + '@smithy/core': 2.4.6 + '@smithy/fetch-http-handler': 3.2.8 + '@smithy/hash-node': 3.0.6 + '@smithy/invalid-dependency': 3.0.6 + '@smithy/middleware-content-length': 3.0.8 + '@smithy/middleware-endpoint': 3.1.3 + '@smithy/middleware-retry': 3.0.21 + '@smithy/middleware-serde': 3.0.6 + '@smithy/middleware-stack': 3.0.6 + '@smithy/node-config-provider': 3.1.7 + '@smithy/node-http-handler': 3.2.3 + '@smithy/protocol-http': 4.1.3 + '@smithy/smithy-client': 3.3.5 + '@smithy/types': 3.4.2 + '@smithy/url-parser': 3.0.6 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.21 + '@smithy/util-defaults-mode-node': 3.0.21 + '@smithy/util-endpoints': 2.1.2 + '@smithy/util-middleware': 3.0.6 + '@smithy/util-retry': 3.0.6 + '@smithy/util-utf8': 3.0.0 + tslib: 2.7.0 + transitivePeerDependencies: + - aws-crt + dev: false + /@aws-sdk/client-sts/3.353.0: resolution: {integrity: sha512-jOnh242TtxG6st60AxLSav0MTgYlJn4c8ZDxk4Wk4+n5bypnXRrqgVXob99lyVnCRfP3OsDl1eilcVp94EXzVw==} engines: {node: '>=14.0.0'} @@ -1265,6 +1503,54 @@ packages: - aws-crt dev: false + /@aws-sdk/client-sts/3.658.1: + resolution: {integrity: sha512-yw9hc5blTnbT1V6mR7Cx9HGc9KQpcLQ1QXj8rntiJi6tIYu3aFNVEyy81JHL7NsuBSeQulJTvHO3y6r3O0sfRg==} + engines: {node: '>=16.0.0'} + dependencies: + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/client-sso-oidc': 3.658.1_@aws-sdk+client-sts@3.658.1 + '@aws-sdk/core': 3.658.1 + '@aws-sdk/credential-provider-node': 3.658.1_48432e9e1d4872afb099a0b2260c0550 + '@aws-sdk/middleware-host-header': 3.654.0 + '@aws-sdk/middleware-logger': 3.654.0 + '@aws-sdk/middleware-recursion-detection': 3.654.0 + '@aws-sdk/middleware-user-agent': 3.654.0 + '@aws-sdk/region-config-resolver': 3.654.0 + '@aws-sdk/types': 3.654.0 + '@aws-sdk/util-endpoints': 3.654.0 + '@aws-sdk/util-user-agent-browser': 3.654.0 + '@aws-sdk/util-user-agent-node': 3.654.0 + '@smithy/config-resolver': 3.0.8 + '@smithy/core': 2.4.6 + '@smithy/fetch-http-handler': 3.2.8 + '@smithy/hash-node': 3.0.6 + '@smithy/invalid-dependency': 3.0.6 + '@smithy/middleware-content-length': 3.0.8 + '@smithy/middleware-endpoint': 3.1.3 + '@smithy/middleware-retry': 3.0.21 + '@smithy/middleware-serde': 3.0.6 + '@smithy/middleware-stack': 3.0.6 + '@smithy/node-config-provider': 3.1.7 + '@smithy/node-http-handler': 3.2.3 + '@smithy/protocol-http': 4.1.3 + '@smithy/smithy-client': 3.3.5 + '@smithy/types': 3.4.2 + '@smithy/url-parser': 3.0.6 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.21 + '@smithy/util-defaults-mode-node': 3.0.21 + '@smithy/util-endpoints': 2.1.2 + '@smithy/util-middleware': 3.0.6 + '@smithy/util-retry': 3.0.6 + '@smithy/util-utf8': 3.0.0 + tslib: 2.7.0 + transitivePeerDependencies: + - aws-crt + dev: false + /@aws-sdk/config-resolver/3.353.0: resolution: {integrity: sha512-rJJ1ebb8E4vfdGWym6jql1vodV+NUEATI1QqlwxQ0AZ8MGPIsT3uR52VyX7gp+yIrLZBJZdGYVNwrWSJgZ3B3w==} engines: {node: '>=14.0.0'} @@ -1285,6 +1571,22 @@ packages: tslib: 2.5.0 dev: false + /@aws-sdk/core/3.658.1: + resolution: {integrity: sha512-vJVMoMcSKXK2gBRSu9Ywwv6wQ7tXH8VL1fqB1uVxgCqBZ3IHfqNn4zvpMPWrwgO2/3wv7XFyikGQ5ypPTCw4jA==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/core': 2.4.6 + '@smithy/node-config-provider': 3.1.7 + '@smithy/property-provider': 3.1.6 + '@smithy/protocol-http': 4.1.3 + '@smithy/signature-v4': 4.1.4 + '@smithy/smithy-client': 3.3.5 + '@smithy/types': 3.4.2 + '@smithy/util-middleware': 3.0.6 + fast-xml-parser: 4.4.1 + tslib: 2.7.0 + dev: false + /@aws-sdk/credential-provider-env/3.353.0: resolution: {integrity: sha512-Y4VsNS8O1FAD5J7S5itOhnOghQ5LIXlZ44t35nF8cbcF+JPvY3ToKzYpjYN1jM7DXKqU4shtqgYpzSqxlvEgKQ==} engines: {node: '>=14.0.0'} @@ -1294,6 +1596,31 @@ packages: tslib: 2.5.0 dev: false + /@aws-sdk/credential-provider-env/3.654.0: + resolution: {integrity: sha512-kogsx3Ql81JouHS7DkheCDU9MYAvK0AokxjcshDveGmf7BbgbWCA8Fnb9wjQyNDaOXNvkZu8Z8rgkX91z324/w==} + engines: {node: '>=16.0.0'} + dependencies: + '@aws-sdk/types': 3.654.0 + '@smithy/property-provider': 3.1.6 + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + + /@aws-sdk/credential-provider-http/3.658.1: + resolution: {integrity: sha512-4ubkJjEVCZflxkZnV1JDQv8P2pburxk1LrEp55telfJRzXrnowzBKwuV2ED0QMNC448g2B3VCaffS+Ct7c4IWQ==} + engines: {node: '>=16.0.0'} + dependencies: + '@aws-sdk/types': 3.654.0 + '@smithy/fetch-http-handler': 3.2.8 + '@smithy/node-http-handler': 3.2.3 + '@smithy/property-provider': 3.1.6 + '@smithy/protocol-http': 4.1.3 + '@smithy/smithy-client': 3.3.5 + '@smithy/types': 3.4.2 + '@smithy/util-stream': 3.1.8 + tslib: 2.7.0 + dev: false + /@aws-sdk/credential-provider-imds/3.353.0: resolution: {integrity: sha512-n70yvXBN7E6NX7vA/wLTqyVayu/QKYsPvVn8Y+0A/j5oXXlVY+hQvjjEaNo0Zq1U8Z0L/kj3mutDpe57nTLKSg==} engines: {node: '>=14.0.0'} @@ -1350,6 +1677,29 @@ packages: - aws-crt dev: false + /@aws-sdk/credential-provider-ini/3.658.1_48432e9e1d4872afb099a0b2260c0550: + resolution: {integrity: sha512-2uwOamQg5ppwfegwen1ddPu5HM3/IBSnaGlaKLFhltkdtZ0jiqTZWUtX2V+4Q+buLnT0hQvLS/frQ+7QUam+0Q==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@aws-sdk/client-sts': ^3.658.1 + dependencies: + '@aws-sdk/client-sts': 3.658.1 + '@aws-sdk/credential-provider-env': 3.654.0 + '@aws-sdk/credential-provider-http': 3.658.1 + '@aws-sdk/credential-provider-process': 3.654.0 + '@aws-sdk/credential-provider-sso': 3.658.1_@aws-sdk+client-sso-oidc@3.658.1 + '@aws-sdk/credential-provider-web-identity': 3.654.0_@aws-sdk+client-sts@3.658.1 + '@aws-sdk/types': 3.654.0 + '@smithy/credential-provider-imds': 3.2.3 + '@smithy/property-provider': 3.1.6 + '@smithy/shared-ini-file-loader': 3.1.7 + '@smithy/types': 3.4.2 + tslib: 2.7.0 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - aws-crt + dev: false + /@aws-sdk/credential-provider-node/3.353.0: resolution: {integrity: sha512-OIyZ7OG1OQJ1aQGAu78hggSkK4jiWO1/Sm6wj5wvwylbST8NnR+dHjikZGFB3hoYt1uEe2O2LeGW67bI54VIEQ==} engines: {node: '>=14.0.0'} @@ -1386,6 +1736,28 @@ packages: - aws-crt dev: false + /@aws-sdk/credential-provider-node/3.658.1_48432e9e1d4872afb099a0b2260c0550: + resolution: {integrity: sha512-XwxW6N+uPXPYAuyq+GfOEdfL/MZGAlCSfB5gEWtLBFmFbikhmEuqfWtI6CD60OwudCUOh6argd21BsJf8o1SJA==} + engines: {node: '>=16.0.0'} + dependencies: + '@aws-sdk/credential-provider-env': 3.654.0 + '@aws-sdk/credential-provider-http': 3.658.1 + '@aws-sdk/credential-provider-ini': 3.658.1_48432e9e1d4872afb099a0b2260c0550 + '@aws-sdk/credential-provider-process': 3.654.0 + '@aws-sdk/credential-provider-sso': 3.658.1_@aws-sdk+client-sso-oidc@3.658.1 + '@aws-sdk/credential-provider-web-identity': 3.654.0_@aws-sdk+client-sts@3.658.1 + '@aws-sdk/types': 3.654.0 + '@smithy/credential-provider-imds': 3.2.3 + '@smithy/property-provider': 3.1.6 + '@smithy/shared-ini-file-loader': 3.1.7 + '@smithy/types': 3.4.2 + tslib: 2.7.0 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - '@aws-sdk/client-sts' + - aws-crt + dev: false + /@aws-sdk/credential-provider-process/3.353.0: resolution: {integrity: sha512-IBkuxj3pCdmnTzIcRXhq+5sp1hsWACQLi9fHLK+mDEgaiaO+u2r3Th5tV3rJUfNhZY4qa62QNGsHwsVstVxGvw==} engines: {node: '>=14.0.0'} @@ -1406,6 +1778,17 @@ packages: tslib: 2.5.0 dev: false + /@aws-sdk/credential-provider-process/3.654.0: + resolution: {integrity: sha512-PmQoo8sZ9Q2Ow8OMzK++Z9lI7MsRUG7sNq3E72DVA215dhtTICTDQwGlXH2AAmIp7n+G9LLRds+4wo2ehG4mkg==} + engines: {node: '>=16.0.0'} + dependencies: + '@aws-sdk/types': 3.654.0 + '@smithy/property-provider': 3.1.6 + '@smithy/shared-ini-file-loader': 3.1.7 + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + /@aws-sdk/credential-provider-sso/3.353.0: resolution: {integrity: sha512-S16tpQ7Zra2O3PNCV4a89wn8wVEgv8oRwjF7p87AM902fXEuag4VHIhaI/TgANQT737JDA/ZCFL2XSilCbHxYQ==} engines: {node: '>=14.0.0'} @@ -1434,6 +1817,22 @@ packages: - aws-crt dev: false + /@aws-sdk/credential-provider-sso/3.658.1_@aws-sdk+client-sso-oidc@3.658.1: + resolution: {integrity: sha512-YOagVEsZEk9DmgJEBg+4MBXrPcw/tYas0VQ5OVBqC5XHNbi2OBGJqgmjVPesuu393E7W0VQxtJFDS00O1ewQgA==} + engines: {node: '>=16.0.0'} + dependencies: + '@aws-sdk/client-sso': 3.658.1 + '@aws-sdk/token-providers': 3.654.0_@aws-sdk+client-sso-oidc@3.658.1 + '@aws-sdk/types': 3.654.0 + '@smithy/property-provider': 3.1.6 + '@smithy/shared-ini-file-loader': 3.1.7 + '@smithy/types': 3.4.2 + tslib: 2.7.0 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - aws-crt + dev: false + /@aws-sdk/credential-provider-web-identity/3.353.0: resolution: {integrity: sha512-l3TdZB6tEDhLIl0oLIIy1njlxogpyIXSMW9fpuHBt7LDUwfBdCwVPE6+JpGXra6tJAfRQSv5l0lYx5osSLq98g==} engines: {node: '>=14.0.0'} @@ -1452,6 +1851,19 @@ packages: tslib: 2.5.0 dev: false + /@aws-sdk/credential-provider-web-identity/3.654.0_@aws-sdk+client-sts@3.658.1: + resolution: {integrity: sha512-6a2g9gMtZToqSu+CusjNK5zvbLJahQ9di7buO3iXgbizXpLXU1rnawCpWxwslMpT5fLgMSKDnKDrr6wdEk7jSw==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@aws-sdk/client-sts': ^3.654.0 + dependencies: + '@aws-sdk/client-sts': 3.658.1 + '@aws-sdk/types': 3.654.0 + '@smithy/property-provider': 3.1.6 + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + /@aws-sdk/eventstream-codec/3.347.0: resolution: {integrity: sha512-61q+SyspjsaQ4sdgjizMyRgVph2CiW4aAtfpoH69EJFJfTxTR/OqnZ9Jx/3YiYi0ksrvDenJddYodfWWJqD8/w==} dependencies: @@ -1559,6 +1971,16 @@ packages: tslib: 2.5.0 dev: false + /@aws-sdk/middleware-host-header/3.654.0: + resolution: {integrity: sha512-rxGgVHWKp8U2ubMv+t+vlIk7QYUaRCHaVpmUlJv0Wv6Q0KeO9a42T9FxHphjOTlCGQOLcjCreL9CF8Qhtb4mdQ==} + engines: {node: '>=16.0.0'} + dependencies: + '@aws-sdk/types': 3.654.0 + '@smithy/protocol-http': 4.1.3 + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + /@aws-sdk/middleware-logger/3.347.0: resolution: {integrity: sha512-NYC+Id5UCkVn+3P1t/YtmHt75uED06vwaKyxDy0UmB2K66PZLVtwWbLpVWrhbroaw1bvUHYcRyQ9NIfnVcXQjA==} engines: {node: '>=14.0.0'} @@ -1567,6 +1989,15 @@ packages: tslib: 2.5.0 dev: false + /@aws-sdk/middleware-logger/3.654.0: + resolution: {integrity: sha512-OQYb+nWlmASyXfRb989pwkJ9EVUMP1CrKn2eyTk3usl20JZmKo2Vjis6I0tLUkMSxMhnBJJlQKyWkRpD/u1FVg==} + engines: {node: '>=16.0.0'} + dependencies: + '@aws-sdk/types': 3.654.0 + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + /@aws-sdk/middleware-recursion-detection/3.347.0: resolution: {integrity: sha512-qfnSvkFKCAMjMHR31NdsT0gv5Sq/ZHTUD4yQsSLpbVQ6iYAS834lrzXt41iyEHt57Y514uG7F/Xfvude3u4icQ==} engines: {node: '>=14.0.0'} @@ -1576,6 +2007,16 @@ packages: tslib: 2.5.0 dev: false + /@aws-sdk/middleware-recursion-detection/3.654.0: + resolution: {integrity: sha512-gKSomgltKVmsT8sC6W7CrADZ4GHwX9epk3GcH6QhebVO3LA9LRbkL3TwOPUXakxxOLLUTYdOZLIOtFf7iH00lg==} + engines: {node: '>=16.0.0'} + dependencies: + '@aws-sdk/types': 3.654.0 + '@smithy/protocol-http': 4.1.3 + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + /@aws-sdk/middleware-retry/3.353.0: resolution: {integrity: sha512-v81NEzDGGvnpvFUy388razpicn7STwBA5gItlr3Ukz8ZWWudfQarTBr0nfVyODXb+76du2LwzEQOd6YtfoOZ+w==} engines: {node: '>=14.0.0'} @@ -1602,6 +2043,20 @@ packages: uuid: 8.3.2 dev: false + /@aws-sdk/middleware-sdk-ec2/3.658.1: + resolution: {integrity: sha512-CnkMajiLD8c+PyiqMjdRt3n87oZnd8jw+8mbtB0jX7Q9ED2z+oeG+RTZMXp2QEiZ0Q+7RyKjXf/PLRhARppFog==} + engines: {node: '>=16.0.0'} + dependencies: + '@aws-sdk/types': 3.654.0 + '@aws-sdk/util-format-url': 3.654.0 + '@smithy/middleware-endpoint': 3.1.3 + '@smithy/protocol-http': 4.1.3 + '@smithy/signature-v4': 4.1.4 + '@smithy/smithy-client': 3.3.5 + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + /@aws-sdk/middleware-sdk-sts/3.353.0: resolution: {integrity: sha512-GDpjznRBjvCvBfyLEhWb/FSmsnFR+nhBQC0N7d8pqWRqI084sy2ZRyQ6hNDWnImi6AvOabTBSfDm6cB5RexDow==} engines: {node: '>=14.0.0'} @@ -1669,6 +2124,17 @@ packages: tslib: 2.5.0 dev: false + /@aws-sdk/middleware-user-agent/3.654.0: + resolution: {integrity: sha512-liCcqPAyRsr53cy2tYu4qeH4MMN0eh9g6k56XzI5xd4SghXH5YWh4qOYAlQ8T66ZV4nPMtD8GLtLXGzsH8moFg==} + engines: {node: '>=16.0.0'} + dependencies: + '@aws-sdk/types': 3.654.0 + '@aws-sdk/util-endpoints': 3.654.0 + '@smithy/protocol-http': 4.1.3 + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + /@aws-sdk/node-config-provider/3.353.0: resolution: {integrity: sha512-4j0dFHAIa0NwQOPZ/PgkyfCWRaaLhilGbL/cOHkndtUdV54WtG+9+21pKNtakfxncF0irtZvVOv/CW/5x909ZQ==} engines: {node: '>=14.0.0'} @@ -1733,6 +2199,18 @@ packages: tslib: 2.5.0 dev: false + /@aws-sdk/region-config-resolver/3.654.0: + resolution: {integrity: sha512-ydGOrXJxj3x0sJhsXyTmvJVLAE0xxuTWFJihTl67RtaO7VRNtd82I3P3bwoMMaDn5WpmV5mPo8fEUDRlBm3fPg==} + engines: {node: '>=16.0.0'} + dependencies: + '@aws-sdk/types': 3.654.0 + '@smithy/node-config-provider': 3.1.7 + '@smithy/types': 3.4.2 + '@smithy/util-config-provider': 3.0.0 + '@smithy/util-middleware': 3.0.6 + tslib: 2.7.0 + dev: false + /@aws-sdk/service-error-classification/3.347.0: resolution: {integrity: sha512-xZ3MqSY81Oy2gh5g0fCtooAbahqh9VhsF8vcKjVX8+XPbGC8y+kej82+MsMg4gYL8gRFB9u4hgYbNgIS6JTAvg==} engines: {node: '>=14.0.0'} @@ -1817,6 +2295,20 @@ packages: - aws-crt dev: false + /@aws-sdk/token-providers/3.654.0_@aws-sdk+client-sso-oidc@3.658.1: + resolution: {integrity: sha512-D8GeJYmvbfWkQDtTB4owmIobSMexZel0fOoetwvgCQ/7L8VPph3Q2bn1TRRIXvH7wdt6DcDxA3tKMHPBkT3GlA==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@aws-sdk/client-sso-oidc': ^3.654.0 + dependencies: + '@aws-sdk/client-sso-oidc': 3.658.1_@aws-sdk+client-sts@3.658.1 + '@aws-sdk/types': 3.654.0 + '@smithy/property-provider': 3.1.6 + '@smithy/shared-ini-file-loader': 3.1.7 + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + /@aws-sdk/types/3.347.0: resolution: {integrity: sha512-GkCMy79mdjU9OTIe5KT58fI/6uqdf8UmMdWqVHmFJ+UpEzOci7L/uw4sOXWo7xpPzLs6cJ7s5ouGZW4GRPmHFA==} engines: {node: '>=14.0.0'} @@ -1824,6 +2316,14 @@ packages: tslib: 2.5.0 dev: false + /@aws-sdk/types/3.654.0: + resolution: {integrity: sha512-VWvbED3SV+10QJIcmU/PKjsKilsTV16d1I7/on4bvD/jo1qGeMXqLDBSen3ks/tuvXZF/mFc7ZW/W2DiLVtO7A==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + /@aws-sdk/url-parser/3.347.0: resolution: {integrity: sha512-lhrnVjxdV7hl+yCnJfDZOaVLSqKjxN20MIOiijRiqaWGLGEAiSqBreMhL89X1WKCifxAs4zZf9YB9SbdziRpAA==} dependencies: @@ -1910,6 +2410,26 @@ packages: tslib: 2.5.0 dev: false + /@aws-sdk/util-endpoints/3.654.0: + resolution: {integrity: sha512-i902fcBknHs0Irgdpi62+QMvzxE+bczvILXigYrlHL4+PiEnlMVpni5L5W1qCkNZXf8AaMrSBuR1NZAGp6UOUw==} + engines: {node: '>=16.0.0'} + dependencies: + '@aws-sdk/types': 3.654.0 + '@smithy/types': 3.4.2 + '@smithy/util-endpoints': 2.1.2 + tslib: 2.7.0 + dev: false + + /@aws-sdk/util-format-url/3.654.0: + resolution: {integrity: sha512-2yAlJ/l1uTJhS52iu4+/EvdIyQhDBL+nATY8rEjFI0H+BHGVrJIH2CL4DByhvi2yvYwsqQX0HYah6pF/yoXukA==} + engines: {node: '>=16.0.0'} + dependencies: + '@aws-sdk/types': 3.654.0 + '@smithy/querystring-builder': 3.0.6 + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + /@aws-sdk/util-hex-encoding/3.310.0: resolution: {integrity: sha512-sVN7mcCCDSJ67pI1ZMtk84SKGqyix6/0A1Ab163YKn+lFBQRMKexleZzpYzNGxYzmQS6VanP/cfU7NiLQOaSfA==} engines: {node: '>=14.0.0'} @@ -1921,7 +2441,7 @@ packages: resolution: {integrity: sha512-qo2t/vBTnoXpjKxlsC2e1gBrRm80M3bId27r0BRB2VniSSe7bL1mmzM+/HFtujm0iAxtPM+aLEflLJlJeDPg0w==} engines: {node: '>=14.0.0'} dependencies: - tslib: 2.5.0 + tslib: 2.7.0 dev: false /@aws-sdk/util-middleware/3.347.0: @@ -1954,6 +2474,15 @@ packages: tslib: 2.5.0 dev: false + /@aws-sdk/util-user-agent-browser/3.654.0: + resolution: {integrity: sha512-ykYAJqvnxLt7wfrqya28wuH3/7NdrwzfiFd7NqEVQf7dXVxL5RPEpD7DxjcyQo3DsHvvdUvGZVaQhozycn1pzA==} + dependencies: + '@aws-sdk/types': 3.654.0 + '@smithy/types': 3.4.2 + bowser: 2.11.0 + tslib: 2.7.0 + dev: false + /@aws-sdk/util-user-agent-node/3.353.0: resolution: {integrity: sha512-wAviGE0NFqGnaBi6JdjCjp/3DA4AprXQayg9fGphRmP6ncOHNHGonPj/60l+Itu+m78V2CbIS76jqCdUtyAZEQ==} engines: {node: '>=14.0.0'} @@ -1982,6 +2511,21 @@ packages: tslib: 2.5.0 dev: false + /@aws-sdk/util-user-agent-node/3.654.0: + resolution: {integrity: sha512-a0ojjdBN6pqv6gB4H/QPPSfhs7mFtlVwnmKCM/QrTaFzN0U810PJ1BST3lBx5sa23I5jWHGaoFY+5q65C3clLQ==} + engines: {node: '>=16.0.0'} + peerDependencies: + aws-crt: '>=1.0.0' + peerDependenciesMeta: + aws-crt: + optional: true + dependencies: + '@aws-sdk/types': 3.654.0 + '@smithy/node-config-provider': 3.1.7 + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + /@aws-sdk/util-utf8-browser/3.259.0: resolution: {integrity: sha512-UvFa/vR+e19XookZF8RzFZBrw2EUkQWxiBW0yYQAhvk3C+QVGl0H3ouca8LDBlBfQKXwmW3huo/59H8rwb1wJw==} dependencies: @@ -4688,7 +5232,7 @@ packages: /@cosmjs/crypto/0.32.1: resolution: {integrity: sha512-AsKucEg5o8evU0wXF/lDwX+ZSwCKF4bbc57nFzraHywlp3sNu4dfPPURoMrT0r7kT7wQZAy4Pdnvmm9nnCCm/Q==} dependencies: - '@cosmjs/encoding': 0.32.1 + '@cosmjs/encoding': 0.32.3 '@cosmjs/math': 0.32.1 '@cosmjs/utils': 0.32.1 '@noble/hashes': 1.3.0 @@ -4705,6 +5249,14 @@ packages: readonly-date: 1.0.0 dev: false + /@cosmjs/encoding/0.32.3: + resolution: {integrity: sha512-p4KF7hhv8jBQX3MkB3Defuhz/W0l3PwWVYU2vkVuBJ13bJcXyhU9nJjiMkaIv+XP+W2QgRceqNNgFUC5chNR7w==} + dependencies: + base64-js: 1.5.1 + bech32: 1.1.4 + readonly-date: 1.0.0 + dev: false + /@cosmjs/json-rpc/0.32.1: resolution: {integrity: sha512-Hsj3Sg+m/JF8qfISp/G4TXQ0FAO01mzDKtNcgKufIHCrvJNDiE69xGyGgSm/qKwsXLBmzRTSxHWK0+yZef3LNQ==} dependencies: @@ -4919,9 +5471,199 @@ packages: - supports-color dev: true - /@exodus/schemasafe/1.0.1: - resolution: {integrity: sha512-PQdbF8dGd4LnbwBlcc4ML8RKYdplm+e9sUeWBTr4zgF13/Shiuov9XznvM4T8cb1CfyKK21yTUkuAIIh/DAH/g==} + /@ethersproject/address/5.7.0: + resolution: {integrity: sha512-9wYhYt7aghVGo758POM5nqcOMaE168Q6aRLJZwUmiqSrAungkG74gSSeKEIR7ukixesdRZGPgVqme6vmxs1fkA==} + dependencies: + '@ethersproject/bignumber': 5.7.0 + '@ethersproject/bytes': 5.7.0 + '@ethersproject/keccak256': 5.7.0 + '@ethersproject/logger': 5.7.0 + '@ethersproject/rlp': 5.7.0 + dev: false + + /@ethersproject/bignumber/5.7.0: + resolution: {integrity: sha512-n1CAdIHRWjSucQO3MC1zPSVgV/6dy/fjL9pMrPP9peL+QxEg9wOsVqwD4+818B6LUEtaXzVHQiuivzRoxPxUGw==} + dependencies: + '@ethersproject/bytes': 5.7.0 + '@ethersproject/logger': 5.7.0 + bn.js: 5.2.1 + dev: false + + /@ethersproject/bytes/5.7.0: + resolution: {integrity: sha512-nsbxwgFXWh9NyYWo+U8atvmMsSdKJprTcICAkvbBffT75qDocbuggBU0SJiVK2MuTrp0q+xvLkTnGMPK1+uA9A==} + dependencies: + '@ethersproject/logger': 5.7.0 + dev: false + + /@ethersproject/keccak256/5.7.0: + resolution: {integrity: sha512-2UcPboeL/iW+pSg6vZ6ydF8tCnv3Iu/8tUmLLzWWGzxWKFFqOBQFLo6uLUv6BDrLgCDfN28RJ/wtByx+jZ4KBg==} + dependencies: + '@ethersproject/bytes': 5.7.0 + js-sha3: 0.8.0 + dev: false + + /@ethersproject/logger/5.7.0: + resolution: {integrity: sha512-0odtFdXu/XHtjQXJYA3u9G0G8btm0ND5Cu8M7i5vhEcE8/HmF4Lbdqanwyv4uQTr2tx6b7fQRmgLrsnpQlmnig==} + dev: false + + /@ethersproject/rlp/5.7.0: + resolution: {integrity: sha512-rBxzX2vK8mVF7b0Tol44t5Tb8gomOHkj5guL+HhzQ1yBh/ydjGnpw6at+X6Iw0Kp3OzzzkcKp8N9r0W4kYSs9w==} + dependencies: + '@ethersproject/bytes': 5.7.0 + '@ethersproject/logger': 5.7.0 + dev: false + + /@exodus/schemasafe/1.3.0: + resolution: {integrity: sha512-5Aap/GaRupgNx/feGBwLLTVv8OQFfv3pq2lPRzPg9R+IOBnDgghTGW7l7EuVXOvg5cc/xSAlRW8rBrjIC3Nvqw==} + dev: false + + /@fastify/busboy/3.0.0: + resolution: {integrity: sha512-83rnH2nCvclWaPQQKvkJ2pdOjG4TZyEVuFDnlOF6KP08lDaaceVyw/W63mDuafQT+MKHCvXIPpE5uYWeM0rT4w==} + dev: false + + /@firebase/app-check-interop-types/0.3.2: + resolution: {integrity: sha512-LMs47Vinv2HBMZi49C09dJxp0QT5LwDzFaVGf/+ITHe3BlIhUiLNttkATSXplc89A2lAaeTqjgqVkiRfUGyQiQ==} + dev: false + + /@firebase/app-types/0.9.2: + resolution: {integrity: sha512-oMEZ1TDlBz479lmABwWsWjzHwheQKiAgnuKxE0pz0IXCVx7/rtlkx1fQ6GfgK24WCrxDKMplZrT50Kh04iMbXQ==} + dev: false + + /@firebase/auth-interop-types/0.2.3: + resolution: {integrity: sha512-Fc9wuJGgxoxQeavybiuwgyi+0rssr76b+nHpj+eGhXFYAdudMWyfBHvFL/I5fEHniUM/UQdFzi9VXJK2iZF7FQ==} + dev: false + + /@firebase/component/0.6.8: + resolution: {integrity: sha512-LcNvxGLLGjBwB0dJUsBGCej2fqAepWyBubs4jt1Tiuns7QLbXHuyObZ4aMeBjZjWx4m8g1LoVI9QFpSaq/k4/g==} + dependencies: + '@firebase/util': 1.9.7 + tslib: 2.5.0 + dev: false + + /@firebase/database-compat/1.0.7: + resolution: {integrity: sha512-R/3B+VVzEFN5YcHmfWns3eitA8fHLTL03io+FIoMcTYkajFnrBdS3A+g/KceN9omP7FYYYGTQWF9lvbEx6eMEg==} + dependencies: + '@firebase/component': 0.6.8 + '@firebase/database': 1.0.7 + '@firebase/database-types': 1.0.4 + '@firebase/logger': 0.4.2 + '@firebase/util': 1.9.7 + tslib: 2.5.0 + dev: false + + /@firebase/database-types/1.0.4: + resolution: {integrity: sha512-mz9ZzbH6euFXbcBo+enuJ36I5dR5w+enJHHjy9Y5ThCdKUseqfDjW3vCp1YxE9zygFCSjJJ/z1cQ+zodvUcwPQ==} + dependencies: + '@firebase/app-types': 0.9.2 + '@firebase/util': 1.9.7 + dev: false + + /@firebase/database/1.0.7: + resolution: {integrity: sha512-wjXr5AO8RPxVVg7rRCYffT7FMtBjHRfJ9KMwi19MbOf0vBf0H9YqW3WCgcnLpXI6ehiUcU3z3qgPnnU0nK6SnA==} + dependencies: + '@firebase/app-check-interop-types': 0.3.2 + '@firebase/auth-interop-types': 0.2.3 + '@firebase/component': 0.6.8 + '@firebase/logger': 0.4.2 + '@firebase/util': 1.9.7 + faye-websocket: 0.11.4 + tslib: 2.5.0 + dev: false + + /@firebase/logger/0.4.2: + resolution: {integrity: sha512-Q1VuA5M1Gjqrwom6I6NUU4lQXdo9IAQieXlujeHZWvRt1b7qQ0KwBaNAjgxG27jgF9/mUwsNmO8ptBCGVYhB0A==} + dependencies: + tslib: 2.5.0 + dev: false + + /@firebase/util/1.9.7: + resolution: {integrity: sha512-fBVNH/8bRbYjqlbIhZ+lBtdAAS4WqZumx03K06/u7fJSpz1TGjEMm1ImvKD47w+xaFKIP2ori6z8BrbakRfjJA==} + dependencies: + tslib: 2.5.0 + dev: false + + /@google-cloud/firestore/7.9.0: + resolution: {integrity: sha512-c4ALHT3G08rV7Zwv8Z2KG63gZh66iKdhCBeDfCpIkLrjX6EAjTD/szMdj14M+FnQuClZLFfW5bAgoOjfNmLtJg==} + engines: {node: '>=14.0.0'} + requiresBuild: true + dependencies: + fast-deep-equal: 3.1.3 + functional-red-black-tree: 1.0.1 + google-gax: 4.4.1 + protobufjs: 7.3.2 + transitivePeerDependencies: + - encoding + - supports-color + dev: false + optional: true + + /@google-cloud/paginator/5.0.2: + resolution: {integrity: sha512-DJS3s0OVH4zFDB1PzjxAsHqJT6sKVbRwwML0ZBP9PbU7Yebtu/7SWMRzvO2J3nUi9pRNITCfu4LJeooM2w4pjg==} + engines: {node: '>=14.0.0'} + dependencies: + arrify: 2.0.1 + extend: 3.0.2 + dev: false + optional: true + + /@google-cloud/projectify/4.0.0: + resolution: {integrity: sha512-MmaX6HeSvyPbWGwFq7mXdo0uQZLGBYCwziiLIGq5JVX+/bdI3SAq6bP98trV5eTWfLuvsMcIC1YJOF2vfteLFA==} + engines: {node: '>=14.0.0'} + dev: false + optional: true + + /@google-cloud/promisify/4.0.0: + resolution: {integrity: sha512-Orxzlfb9c67A15cq2JQEyVc7wEsmFBmHjZWZYQMUyJ1qivXyMwdyNOs9odi79hze+2zqdTtu1E19IM/FtqZ10g==} + engines: {node: '>=14'} + dev: false + optional: true + + /@google-cloud/storage/7.12.1: + resolution: {integrity: sha512-Z3ZzOnF3YKLuvpkvF+TjQ6lztxcAyTILp+FjKonmVpEwPa9vFvxpZjubLR4sB6bf19i/8HL2AXRjA0YFgHFRmQ==} + engines: {node: '>=14'} + requiresBuild: true + dependencies: + '@google-cloud/paginator': 5.0.2 + '@google-cloud/projectify': 4.0.0 + '@google-cloud/promisify': 4.0.0 + abort-controller: 3.0.0 + async-retry: 1.3.3 + duplexify: 4.1.3 + fast-xml-parser: 4.5.0 + gaxios: 6.7.1 + google-auth-library: 9.14.1 + html-entities: 2.5.2 + mime: 3.0.0 + p-limit: 3.1.0 + retry-request: 7.0.2 + teeny-request: 9.0.0 + uuid: 8.3.2 + transitivePeerDependencies: + - encoding + - supports-color + dev: false + optional: true + + /@grpc/grpc-js/1.11.1: + resolution: {integrity: sha512-gyt/WayZrVPH2w/UTLansS7F9Nwld472JxxaETamrM8HNlsa+jSLNyKAZmhxI2Me4c3mQHFiS1wWHDY1g1Kthw==} + engines: {node: '>=12.10.0'} + dependencies: + '@grpc/proto-loader': 0.7.13 + '@js-sdsl/ordered-map': 4.4.2 + dev: false + optional: true + + /@grpc/proto-loader/0.7.13: + resolution: {integrity: sha512-AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw==} + engines: {node: '>=6'} + hasBin: true + dependencies: + lodash.camelcase: 4.3.0 + long: 5.2.1 + protobufjs: 7.3.2 + yargs: 17.7.2 dev: false + optional: true /@humanwhocodes/config-array/0.9.5: resolution: {integrity: sha512-ObyMyWxZiCu/yTisA7uzx81s40xR2fD5Cg/2Kq7G02ajkNubJf6BopgDTmDyc3U7sXpNKM8cYOw7s7Tyr+DnCw==} @@ -5250,7 +5992,7 @@ packages: dependencies: '@jridgewell/set-array': 1.1.2 '@jridgewell/sourcemap-codec': 1.4.14 - '@jridgewell/trace-mapping': 0.3.14 + '@jridgewell/trace-mapping': 0.3.17 /@jridgewell/resolve-uri/3.1.0: resolution: {integrity: sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==} @@ -5274,7 +6016,6 @@ packages: dependencies: '@jridgewell/resolve-uri': 3.1.0 '@jridgewell/sourcemap-codec': 1.4.14 - dev: true /@jridgewell/trace-mapping/0.3.9: resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} @@ -5282,29 +6023,119 @@ packages: '@jridgewell/resolve-uri': 3.1.0 '@jridgewell/sourcemap-codec': 1.4.14 + /@js-sdsl/ordered-map/4.4.2: + resolution: {integrity: sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==} + dev: false + optional: true + /@jsdevtools/ono/7.1.3: resolution: {integrity: sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg==} dev: true - /@mapbox/node-pre-gyp/1.0.10: - resolution: {integrity: sha512-4ySo4CjzStuprMwk35H5pPbkymjv1SF3jGLj6rAHp/xT/RF7TL7bd9CTm1xDY49K2qF7jmR/g7k+SkLETP6opA==} - hasBin: true + /@keplr-wallet/common/0.12.122: + resolution: {integrity: sha512-Q+8+wmGYDarEcyXQQSD//ugKmBTl0Gxam0MGydERHSnm5BEl/1bQNAT4ZSo+1zeuncKdwHQ6MSRegX46/XjMTQ==} dependencies: - detect-libc: 2.0.1 - https-proxy-agent: 5.0.1 - make-dir: 3.1.0 - node-fetch: 2.6.11 - nopt: 5.0.0 - npmlog: 5.0.1 - rimraf: 3.0.2 - semver: 7.3.7 - tar: 6.1.15 + '@keplr-wallet/crypto': 0.12.122 + '@keplr-wallet/types': 0.12.122 + buffer: 6.0.3 + delay: 4.4.1 + dev: false + + /@keplr-wallet/cosmos/0.12.122: + resolution: {integrity: sha512-t0p40l7UQ4hK0Sw2fw54qnuNPE+riBlP1ITy/LnU0UzLxZkR7bGnAQF5V32OLnT75QVChwsjDxNA4xSNCsgImQ==} + dependencies: + '@ethersproject/address': 5.7.0 + '@keplr-wallet/common': 0.12.122 + '@keplr-wallet/crypto': 0.12.122 + '@keplr-wallet/proto-types': 0.12.122 + '@keplr-wallet/simple-fetch': 0.12.122 + '@keplr-wallet/types': 0.12.122 + '@keplr-wallet/unit': 0.12.122 + bech32: 1.1.4 + buffer: 6.0.3 + long: 4.0.0 + protobufjs: 6.11.3 + dev: false + + /@keplr-wallet/crypto/0.12.122: + resolution: {integrity: sha512-prD0XdmlbTldysisnMTH3oDXmENtZnhur6YvL/7Q+uorr0HutfwfZYTsMy2RI1DUyzudyQ7kDsyNdCSLWI8nqw==} + dependencies: + '@ethersproject/keccak256': 5.7.0 + bip32: 2.0.6 + bip39: 3.1.0 + bs58check: 2.1.2 + buffer: 6.0.3 + crypto-js: 4.2.0 + elliptic: 6.5.4 + sha.js: 2.4.11 + dev: false + + /@keplr-wallet/proto-types/0.12.122: + resolution: {integrity: sha512-JM1Mx1cmikmscUdJ8qVyJ+DEdzOHnG5xT8QyWEf/xWq6kwy4+VWXghdbLk70mWtbMQAc00RpTy5IM4ALsHwAoQ==} + dependencies: + long: 4.0.0 + protobufjs: 6.11.3 + dev: false + + /@keplr-wallet/simple-fetch/0.12.122: + resolution: {integrity: sha512-yxBQDKQwGfD7XH8dui/BL8SdfTyLuDdqCfKianb66K0JxzJoc7qXJm6b71erOHh3ncJXPXHHtsTHDO5u1MO7/w==} + dev: false + + /@keplr-wallet/types/0.12.122: + resolution: {integrity: sha512-VdFecbB3pQyHIiLw0QOSypv69iWRQwRDKscNKQfmdLhuNX12XdZpBYxTi0IBUUSltGvn81NYPZjkw1b/OnSkNw==} + dependencies: + long: 4.0.0 + dev: false + + /@keplr-wallet/unit/0.12.122: + resolution: {integrity: sha512-RWEmYf9TBuArfpCvfhfd3P7w4UqDISTcApPq8QE/r+eyhFx9nSdE6s/4NqAxGBXn3SRbPgylSu0DpP3pmMQH1g==} + dependencies: + '@keplr-wallet/types': 0.12.122 + big-integer: 1.6.52 + utility-types: 3.11.0 + dev: false + + /@mapbox/node-pre-gyp/1.0.11: + resolution: {integrity: sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ==} + hasBin: true + dependencies: + detect-libc: 2.0.3 + https-proxy-agent: 5.0.1 + make-dir: 3.1.0 + node-fetch: 2.7.0 + nopt: 5.0.0 + npmlog: 5.0.1 + rimraf: 3.0.2 + semver: 7.5.4 + tar: 6.2.1 transitivePeerDependencies: - encoding - supports-color dev: false optional: true + /@milahu/patch-package/6.4.14: + resolution: {integrity: sha512-sfjl5rZPGu8T7Yl3oSnpwCLp7WGK1rKoSMkC2iZCI7M5y1lijF1GFAuJl2xKCGB4SubgChm+HGJ3YkWYLc3xVg==} + engines: {npm: '>5'} + hasBin: true + dependencies: + '@types/dashdash': 1.14.3 + '@yarnpkg/lockfile': 1.1.0 + chalk: 2.4.2 + cross-spawn: 7.0.3 + dashdash: 2.0.0 + find-yarn-workspace-root: 2.0.0 + fs-extra: 10.1.0 + is-ci: 3.0.1 + klaw-sync: 6.0.0 + open: 8.4.2 + rimraf: 3.0.2 + semver: 7.5.4 + shlex: 2.1.2 + slash: 2.0.0 + tmp: 0.2.3 + dev: false + /@noble/hashes/1.3.0: resolution: {integrity: sha512-ilHEACi9DwqJB0pw7kv+Apvh50jiiSyR/cQ3y4W7lOR5mhvn/50FLUfsnfJz0BDZtl/RR16kXvptiv6q1msYZg==} dev: false @@ -5568,6 +6399,175 @@ packages: dependencies: '@sinonjs/commons': 1.8.3 + /@smithy/abort-controller/3.1.4: + resolution: {integrity: sha512-VupaALAQlXViW3/enTf/f5l5JZYSAxoJL7f0nanhNNKnww6DGCg1oYIuNP78KDugnkwthBO6iEcym16HhWV8RQ==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + + /@smithy/config-resolver/3.0.8: + resolution: {integrity: sha512-Tv1obAC18XOd2OnDAjSWmmthzx6Pdeh63FbLin8MlPiuJ2ATpKkq0NcNOJFr0dO+JmZXnwu8FQxKJ3TKJ3Hulw==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/node-config-provider': 3.1.7 + '@smithy/types': 3.4.2 + '@smithy/util-config-provider': 3.0.0 + '@smithy/util-middleware': 3.0.6 + tslib: 2.7.0 + dev: false + + /@smithy/core/2.4.6: + resolution: {integrity: sha512-6lQQp99hnyuNNIzeTYSzCUXJHwvvFLY7hfdFGSJM95tjRDJGfzWYFRBXPaM9766LiiTsQ561KErtbufzUFSYUg==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/middleware-endpoint': 3.1.3 + '@smithy/middleware-retry': 3.0.21 + '@smithy/middleware-serde': 3.0.6 + '@smithy/protocol-http': 4.1.3 + '@smithy/smithy-client': 3.3.5 + '@smithy/types': 3.4.2 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-middleware': 3.0.6 + '@smithy/util-utf8': 3.0.0 + tslib: 2.7.0 + dev: false + + /@smithy/credential-provider-imds/3.2.3: + resolution: {integrity: sha512-VoxMzSzdvkkjMJNE38yQgx4CfnmT+Z+5EUXkg4x7yag93eQkVQgZvN3XBSHC/ylfBbLbAtdu7flTCChX9I+mVg==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/node-config-provider': 3.1.7 + '@smithy/property-provider': 3.1.6 + '@smithy/types': 3.4.2 + '@smithy/url-parser': 3.0.6 + tslib: 2.7.0 + dev: false + + /@smithy/fetch-http-handler/3.2.8: + resolution: {integrity: sha512-Lqe0B8F5RM7zkw//6avq1SJ8AfaRd3ubFUS1eVp5WszV7p6Ne5hQ4dSuMHDpNRPhgTvj4va9Kd/pcVigHEHRow==} + dependencies: + '@smithy/protocol-http': 4.1.3 + '@smithy/querystring-builder': 3.0.6 + '@smithy/types': 3.4.2 + '@smithy/util-base64': 3.0.0 + tslib: 2.7.0 + dev: false + + /@smithy/hash-node/3.0.6: + resolution: {integrity: sha512-c/FHEdKK/7DU2z6ZE91L36ahyXWayR3B+FzELjnYq7wH5YqIseM24V+pWCS9kFn1Ln8OFGTf+pyYPiHZuX0s/Q==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/types': 3.4.2 + '@smithy/util-buffer-from': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.7.0 + dev: false + + /@smithy/invalid-dependency/3.0.6: + resolution: {integrity: sha512-czM7Ioq3s8pIXht7oD+vmgy4Wfb4XavU/k/irO8NdXFFOx7YAlsCCcKOh/lJD1mJSYQqiR7NmpZ9JviryD/7AQ==} + dependencies: + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + + /@smithy/is-array-buffer/2.2.0: + resolution: {integrity: sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==} + engines: {node: '>=14.0.0'} + dependencies: + tslib: 2.7.0 + dev: false + + /@smithy/is-array-buffer/3.0.0: + resolution: {integrity: sha512-+Fsu6Q6C4RSJiy81Y8eApjEB5gVtM+oFKTffg+jSuwtvomJJrhUJBu2zS8wjXSgH/g1MKEWrzyChTBe6clb5FQ==} + engines: {node: '>=16.0.0'} + dependencies: + tslib: 2.7.0 + dev: false + + /@smithy/middleware-content-length/3.0.8: + resolution: {integrity: sha512-VuyszlSO49WKh3H9/kIO2kf07VUwGV80QRiaDxUfP8P8UKlokz381ETJvwLhwuypBYhLymCYyNhB3fLAGBX2og==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/protocol-http': 4.1.3 + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + + /@smithy/middleware-endpoint/3.1.3: + resolution: {integrity: sha512-KeM/OrK8MVFUsoJsmCN0MZMVPjKKLudn13xpgwIMpGTYpA8QZB2Xq5tJ+RE6iu3A6NhOI4VajDTwBsm8pwwrhg==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/middleware-serde': 3.0.6 + '@smithy/node-config-provider': 3.1.7 + '@smithy/shared-ini-file-loader': 3.1.7 + '@smithy/types': 3.4.2 + '@smithy/url-parser': 3.0.6 + '@smithy/util-middleware': 3.0.6 + tslib: 2.7.0 + dev: false + + /@smithy/middleware-retry/3.0.21: + resolution: {integrity: sha512-/h0fElV95LekVVEJuSw+aI11S1Y3zIUwBc6h9ZbUv43Gl2weXsbQwjLoet6j/Qtb0phfrSxS6pNg6FqgJOWZkA==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/node-config-provider': 3.1.7 + '@smithy/protocol-http': 4.1.3 + '@smithy/service-error-classification': 3.0.6 + '@smithy/smithy-client': 3.3.5 + '@smithy/types': 3.4.2 + '@smithy/util-middleware': 3.0.6 + '@smithy/util-retry': 3.0.6 + tslib: 2.7.0 + uuid: 9.0.1 + dev: false + + /@smithy/middleware-serde/3.0.6: + resolution: {integrity: sha512-KKTUSl1MzOM0MAjGbudeaVNtIDo+PpekTBkCNwvfZlKndodrnvRo+00USatiyLOc0ujjO9UydMRu3O9dYML7ag==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + + /@smithy/middleware-stack/3.0.6: + resolution: {integrity: sha512-2c0eSYhTQ8xQqHMcRxLMpadFbTXg6Zla5l0mwNftFCZMQmuhI7EbAJMx6R5eqfuV3YbJ3QGyS3d5uSmrHV8Khg==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + + /@smithy/node-config-provider/3.1.7: + resolution: {integrity: sha512-g3mfnC3Oo8pOI0dYuPXLtdW1WGVb3bR2tkV21GNkm0ZvQjLTtamXAwCWt/FCb0HGvKt3gHHmF1XerG0ICfalOg==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/property-provider': 3.1.6 + '@smithy/shared-ini-file-loader': 3.1.7 + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + + /@smithy/node-http-handler/3.2.3: + resolution: {integrity: sha512-/gcm5DJ3k1b1zEInzBGAZC8ntJ+jwrz1NcSIu+9dSXd1FfG0G6QgkDI40tt8/WYUbHtLyo8fEqtm2v29koWo/w==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/abort-controller': 3.1.4 + '@smithy/protocol-http': 4.1.3 + '@smithy/querystring-builder': 3.0.6 + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + + /@smithy/property-provider/3.1.6: + resolution: {integrity: sha512-NK3y/T7Q/Bw+Z8vsVs9MYIQ5v7gOX7clyrXcwhhIBQhbPgRl6JDrZbusO9qWDhcEus75Tg+VCxtIRfo3H76fpw==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + /@smithy/protocol-http/1.0.1: resolution: {integrity: sha512-9OrEn0WfOVtBNYJUjUAn9AOiJ4lzERCJJ/JeZs8E6yajTGxBaFRxUnNBHiNqoDJVg076hY36UmEnPx7xXrvUSg==} engines: {node: '>=14.0.0'} @@ -5576,6 +6576,72 @@ packages: tslib: 2.5.0 dev: false + /@smithy/protocol-http/4.1.3: + resolution: {integrity: sha512-GcbMmOYpH9iRqtC05RbRnc/0FssxSTHlmaNhYBTgSgNCYpdR3Kt88u5GAZTBmouzv+Zlj/VRv92J9ruuDeJuEw==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + + /@smithy/querystring-builder/3.0.6: + resolution: {integrity: sha512-sQe08RunoObe+Usujn9+R2zrLuQERi3CWvRO3BvnoWSYUaIrLKuAIeY7cMeDax6xGyfIP3x/yFWbEKSXvOnvVg==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/types': 3.4.2 + '@smithy/util-uri-escape': 3.0.0 + tslib: 2.7.0 + dev: false + + /@smithy/querystring-parser/3.0.6: + resolution: {integrity: sha512-UJKw4LlEkytzz2Wq+uIdHf6qOtFfee/o7ruH0jF5I6UAuU+19r9QV7nU3P/uI0l6+oElRHmG/5cBBcGJrD7Ozg==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + + /@smithy/service-error-classification/3.0.6: + resolution: {integrity: sha512-53SpchU3+DUZrN7J6sBx9tBiCVGzsib2e4sc512Q7K9fpC5zkJKs6Z9s+qbMxSYrkEkle6hnMtrts7XNkMJJMg==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/types': 3.4.2 + dev: false + + /@smithy/shared-ini-file-loader/3.1.7: + resolution: {integrity: sha512-IA4K2qTJYXkF5OfVN4vsY1hfnUZjaslEE8Fsr/gGFza4TAC2A9NfnZuSY2srQIbt9bwtjHiAayrRVgKse4Q7fA==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + + /@smithy/signature-v4/4.1.4: + resolution: {integrity: sha512-72MiK7xYukNsnLJI9NqvUHqTu0ziEsfMsYNlWpiJfuGQnCTFKpckThlEatirvcA/LmT1h7rRO+pJD06PYsPu9Q==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/is-array-buffer': 3.0.0 + '@smithy/protocol-http': 4.1.3 + '@smithy/types': 3.4.2 + '@smithy/util-hex-encoding': 3.0.0 + '@smithy/util-middleware': 3.0.6 + '@smithy/util-uri-escape': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.7.0 + dev: false + + /@smithy/smithy-client/3.3.5: + resolution: {integrity: sha512-7IZi8J3Dr9n3tX+lcpmJ/5tCYIqoXdblFBaPuv0SEKZFRpCxE+TqIWL6I3t7jLlk9TWu3JSvEZAhtjB9yvB+zA==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/middleware-endpoint': 3.1.3 + '@smithy/middleware-stack': 3.0.6 + '@smithy/protocol-http': 4.1.3 + '@smithy/types': 3.4.2 + '@smithy/util-stream': 3.1.8 + tslib: 2.7.0 + dev: false + /@smithy/types/1.0.0: resolution: {integrity: sha512-kc1m5wPBHQCTixwuaOh9vnak/iJm21DrSf9UK6yDE5S3mQQ4u11pqAUiKWnlrZnYkeLfAI9UEHj9OaMT1v5Umg==} engines: {node: '>=14.0.0'} @@ -5583,6 +6649,175 @@ packages: tslib: 2.5.0 dev: false + /@smithy/types/3.4.2: + resolution: {integrity: sha512-tHiFcfcVedVBHpmHUEUHOCCih8iZbIAYn9NvPsNzaPm/237I3imdDdZoOC8c87H5HBAVEa06tTgb+OcSWV9g5w==} + engines: {node: '>=16.0.0'} + dependencies: + tslib: 2.7.0 + dev: false + + /@smithy/url-parser/3.0.6: + resolution: {integrity: sha512-47Op/NU8Opt49KyGpHtVdnmmJMsp2hEwBdyjuFB9M2V5QVOwA7pBhhxKN5z6ztKGrMw76gd8MlbPuzzvaAncuQ==} + dependencies: + '@smithy/querystring-parser': 3.0.6 + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + + /@smithy/util-base64/3.0.0: + resolution: {integrity: sha512-Kxvoh5Qtt0CDsfajiZOCpJxgtPHXOKwmM+Zy4waD43UoEMA+qPxxa98aE/7ZhdnBFZFXMOiBR5xbcaMhLtznQQ==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/util-buffer-from': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.7.0 + dev: false + + /@smithy/util-body-length-browser/3.0.0: + resolution: {integrity: sha512-cbjJs2A1mLYmqmyVl80uoLTJhAcfzMOyPgjwAYusWKMdLeNtzmMz9YxNl3/jRLoxSS3wkqkf0jwNdtXWtyEBaQ==} + dependencies: + tslib: 2.7.0 + dev: false + + /@smithy/util-body-length-node/3.0.0: + resolution: {integrity: sha512-Tj7pZ4bUloNUP6PzwhN7K386tmSmEET9QtQg0TgdNOnxhZvCssHji+oZTUIuzxECRfG8rdm2PMw2WCFs6eIYkA==} + engines: {node: '>=16.0.0'} + dependencies: + tslib: 2.7.0 + dev: false + + /@smithy/util-buffer-from/2.2.0: + resolution: {integrity: sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==} + engines: {node: '>=14.0.0'} + dependencies: + '@smithy/is-array-buffer': 2.2.0 + tslib: 2.7.0 + dev: false + + /@smithy/util-buffer-from/3.0.0: + resolution: {integrity: sha512-aEOHCgq5RWFbP+UDPvPot26EJHjOC+bRgse5A8V3FSShqd5E5UN4qc7zkwsvJPPAVsf73QwYcHN1/gt/rtLwQA==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/is-array-buffer': 3.0.0 + tslib: 2.7.0 + dev: false + + /@smithy/util-config-provider/3.0.0: + resolution: {integrity: sha512-pbjk4s0fwq3Di/ANL+rCvJMKM5bzAQdE5S/6RL5NXgMExFAi6UgQMPOm5yPaIWPpr+EOXKXRonJ3FoxKf4mCJQ==} + engines: {node: '>=16.0.0'} + dependencies: + tslib: 2.7.0 + dev: false + + /@smithy/util-defaults-mode-browser/3.0.21: + resolution: {integrity: sha512-M/FhTBk4c/SsB91dD/M4gMGfJO7z/qJaM9+XQQIqBOf4qzZYMExnP7R4VdGwxxH8IKMGW+8F0I4rNtVRrcfPoA==} + engines: {node: '>= 10.0.0'} + dependencies: + '@smithy/property-provider': 3.1.6 + '@smithy/smithy-client': 3.3.5 + '@smithy/types': 3.4.2 + bowser: 2.11.0 + tslib: 2.7.0 + dev: false + + /@smithy/util-defaults-mode-node/3.0.21: + resolution: {integrity: sha512-NiLinPvF86U3S2Pdx/ycqd4bnY5dmFSPNL5KYRwbNjqQFS09M5Wzqk8BNk61/47xCYz1X/6KeiSk9qgYPTtuDw==} + engines: {node: '>= 10.0.0'} + dependencies: + '@smithy/config-resolver': 3.0.8 + '@smithy/credential-provider-imds': 3.2.3 + '@smithy/node-config-provider': 3.1.7 + '@smithy/property-provider': 3.1.6 + '@smithy/smithy-client': 3.3.5 + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + + /@smithy/util-endpoints/2.1.2: + resolution: {integrity: sha512-FEISzffb4H8DLzGq1g4MuDpcv6CIG15fXoQzDH9SjpRJv6h7J++1STFWWinilG0tQh9H1v2UKWG19Jjr2B16zQ==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/node-config-provider': 3.1.7 + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + + /@smithy/util-hex-encoding/3.0.0: + resolution: {integrity: sha512-eFndh1WEK5YMUYvy3lPlVmYY/fZcQE1D8oSf41Id2vCeIkKJXPcYDCZD+4+xViI6b1XSd7tE+s5AmXzz5ilabQ==} + engines: {node: '>=16.0.0'} + dependencies: + tslib: 2.7.0 + dev: false + + /@smithy/util-middleware/3.0.6: + resolution: {integrity: sha512-BxbX4aBhI1O9p87/xM+zWy0GzT3CEVcXFPBRDoHAM+pV0eSW156pR+PSYEz0DQHDMYDsYAflC2bQNz2uaDBUZQ==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + + /@smithy/util-retry/3.0.6: + resolution: {integrity: sha512-BRZiuF7IwDntAbevqMco67an0Sr9oLQJqqRCsSPZZHYRnehS0LHDAkJk/pSmI7Z8c/1Vet294H7fY2fWUgB+Rg==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/service-error-classification': 3.0.6 + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + + /@smithy/util-stream/3.1.8: + resolution: {integrity: sha512-hoKOqSmb8FD3WLObuB5hwbM7bNIWgcnvkThokTvVq7J5PKjlLUK5qQQcB9zWLHIoSaIlf3VIv2OxZY2wtQjcRQ==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/fetch-http-handler': 3.2.8 + '@smithy/node-http-handler': 3.2.3 + '@smithy/types': 3.4.2 + '@smithy/util-base64': 3.0.0 + '@smithy/util-buffer-from': 3.0.0 + '@smithy/util-hex-encoding': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.7.0 + dev: false + + /@smithy/util-uri-escape/3.0.0: + resolution: {integrity: sha512-LqR7qYLgZTD7nWLBecUi4aqolw8Mhza9ArpNEQ881MJJIU2sE5iHCK6TdyqqzcDLy0OPe10IY4T8ctVdtynubg==} + engines: {node: '>=16.0.0'} + dependencies: + tslib: 2.7.0 + dev: false + + /@smithy/util-utf8/2.3.0: + resolution: {integrity: sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==} + engines: {node: '>=14.0.0'} + dependencies: + '@smithy/util-buffer-from': 2.2.0 + tslib: 2.7.0 + dev: false + + /@smithy/util-utf8/3.0.0: + resolution: {integrity: sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/util-buffer-from': 3.0.0 + tslib: 2.7.0 + dev: false + + /@smithy/util-waiter/3.1.5: + resolution: {integrity: sha512-jYOSvM3H6sZe3CHjzD2VQNCjWBJs+4DbtwBMvUp9y5EnnwNa7NQxTeYeQw0CKCAdGGZ3QvVkyJmvbvs5M/B10A==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/abort-controller': 3.1.4 + '@smithy/types': 3.4.2 + tslib: 2.7.0 + dev: false + + /@tootallnate/once/2.0.0: + resolution: {integrity: sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==} + engines: {node: '>= 10'} + dev: false + optional: true + /@tsconfig/node10/1.0.9: resolution: {integrity: sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==} @@ -5610,7 +6845,7 @@ packages: typescript: 4.9.5 validator: 13.7.0 yamljs: 0.3.0 - yargs: 17.5.1 + yargs: 17.7.2 dev: false /@tsoa/runtime/5.0.0: @@ -5666,6 +6901,11 @@ packages: '@types/connect': 3.4.35 '@types/node': 18.0.3 + /@types/caseless/0.12.5: + resolution: {integrity: sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg==} + dev: false + optional: true + /@types/connect/3.4.35: resolution: {integrity: sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==} dependencies: @@ -5679,6 +6919,10 @@ packages: resolution: {integrity: sha512-vt+kDhq/M2ayberEtJcIN/hxXy1Pk+59g2FV/ZQceeaTyCtCucjL2Q7FXlFjtWn4n15KCr1NE2lNNFhp0lEThw==} dev: true + /@types/dashdash/1.14.3: + resolution: {integrity: sha512-1BKd5kepSM4R+92c1SV1V0tcCletn2RDHh7QnuI9pTUVpVPwGJPi/3JPdaXR9l7TmwRlV9Zn24hiwxybjWR3Lw==} + dev: false + /@types/eslint-visitor-keys/1.0.0: resolution: {integrity: sha512-OCutwjDZ4aFS6PB1UZ988C4YgwlBHJd6wCeQqaLdmadZ/7e+w79+hbMUFC1QXDNCmdyoRfAFdm0RypzwR+Qpag==} dev: true @@ -5696,6 +6940,15 @@ packages: '@types/qs': 6.9.7 '@types/range-parser': 1.2.4 + /@types/express-serve-static-core/4.19.5: + resolution: {integrity: sha512-y6W03tvrACO72aijJ5uF02FRq5cgDR9lUxddQ8vyF+GvmjJQqbzDcJngEjURc+ZsG31VI3hODNZJ2URj86pzmg==} + dependencies: + '@types/node': 22.5.4 + '@types/qs': 6.9.7 + '@types/range-parser': 1.2.4 + '@types/send': 0.17.4 + dev: false + /@types/express/4.17.13: resolution: {integrity: sha512-6bSZTPaTIACxn48l50SR+axgrqm6qXFIxrdAKaG6PaJk3+zuUr35hBlgT7vOmJcum+OEaIBLtHV/qloEAFITeA==} dependencies: @@ -5704,6 +6957,15 @@ packages: '@types/qs': 6.9.7 '@types/serve-static': 1.15.0 + /@types/express/4.17.21: + resolution: {integrity: sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==} + dependencies: + '@types/body-parser': 1.19.2 + '@types/express-serve-static-core': 4.19.5 + '@types/qs': 6.9.7 + '@types/serve-static': 1.15.0 + dev: false + /@types/glob/7.2.0: resolution: {integrity: sha512-ZUxbzKl0IfJILTS6t7ip5fQQM/J3TJYubDm3nMbgubNNYS62eXeUpoLUC8/7fJNiFYHTrGPQn7hspDUzIHX3UA==} dependencies: @@ -5743,6 +7005,12 @@ packages: resolution: {integrity: sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==} dev: true + /@types/jsonwebtoken/9.0.6: + resolution: {integrity: sha512-/5hndP5dCjloafCXns6SZyESp3Ldq7YjH3zwzwczYnjxIT0Fqzk5ROSYVGfFyczIue7IUEj8hkvLbPoLQ18vQw==} + dependencies: + '@types/node': 22.5.4 + dev: false + /@types/lodash/4.14.182: resolution: {integrity: sha512-/THyiqyQAP9AfARo4pF+aCGcyiQ94tX/Is2I7HofNRqoYLgN1PBoOWu2/zTA5zMxzP5EFutMtWtGAFRKUe961Q==} dev: true @@ -5754,6 +7022,10 @@ packages: resolution: {integrity: sha512-Lx+EZoJxUKw4dp8uei9XiUVNlgkYmax5+ovqt6Xf3LzJOnWhlfJw/jLBmqfGVwOP/pDr4HT8bI1WtxK0IChMLw==} dev: true + /@types/mime/1.3.5: + resolution: {integrity: sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==} + dev: false + /@types/mime/3.0.1: resolution: {integrity: sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA==} @@ -5767,9 +7039,24 @@ packages: '@types/express': 4.17.13 dev: false + /@types/node/10.12.18: + resolution: {integrity: sha512-fh+pAqt4xRzPfqA6eh3Z2y6fyZavRIumvjhaCL753+TVkGKGhpPeyrJG2JftD0T9q4GF00KjefsQ+PQNDdWQaQ==} + dev: false + /@types/node/18.0.3: resolution: {integrity: sha512-HzNRZtp4eepNitP+BD6k2L6DROIDG4Q0fm4x+dwfsr6LGmROENnok75VGw40628xf+iR24WeMFcHuuBDUAzzsQ==} + /@types/node/18.19.31: + resolution: {integrity: sha512-ArgCD39YpyyrtFKIqMDvjz79jto5fcI/SVUs2HwB+f0dAzq68yqOdyaSivLiLugSziTpNXLQrVb7RZFmdZzbhA==} + dependencies: + undici-types: 5.26.5 + + /@types/node/22.5.4: + resolution: {integrity: sha512-FDuKUJQm/ju9fT/SeX/6+gBzoPzlVCzfzmGkwKvRHQVxi4BntVbyIwf6a4Xn62mrvndLiml6z/UBXIdEVjQLXg==} + dependencies: + undici-types: 6.19.8 + dev: false + /@types/parse-package-name/0.1.0: resolution: {integrity: sha512-+vF4M3Cd3Ec22Uwb+OKhDrSAcXQ5I6evRx+1letx4KzfzycU+AOEDHnCifus8In11i8iYNFXPfzg9HWTcC1h+Q==} dev: true @@ -5797,6 +7084,16 @@ packages: '@types/node': 18.0.3 dev: true + /@types/request/2.48.12: + resolution: {integrity: sha512-G3sY+NpsA9jnwm0ixhAFQSJ3Q9JkpLZpJbI3GMv0mIAT0y3mRabYeINzal5WOChIiaTEGQYlHOKgkaM9EisWHw==} + dependencies: + '@types/caseless': 0.12.5 + '@types/node': 22.5.4 + '@types/tough-cookie': 4.0.5 + form-data: 2.5.1 + dev: false + optional: true + /@types/response-time/2.3.5: resolution: {integrity: sha512-4ANzp+I3K7sztFFAGPALWBvSl4ayaDSKzI2Bok+WNz+en2eB2Pvk6VCjR47PBXBWOkEg2r4uWpZOlXA5DNINOQ==} dependencies: @@ -5804,6 +7101,17 @@ packages: '@types/node': 18.0.3 dev: true + /@types/seedrandom/3.0.8: + resolution: {integrity: sha512-TY1eezMU2zH2ozQoAFAQFOPpvP15g+ZgSfTZt31AUUH/Rxtnz3H+A/Sv1Snw2/amp//omibc+AEkTaA8KUeOLQ==} + dev: true + + /@types/send/0.17.4: + resolution: {integrity: sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA==} + dependencies: + '@types/mime': 1.3.5 + '@types/node': 22.5.4 + dev: false + /@types/serve-static/1.15.0: resolution: {integrity: sha512-z5xyF6uh8CbjAu9760KDKsH2FcDxZ2tFCsA4HIMWE6IkiYMXfVoa+4f9KX+FN0ZLsaMw1WNG2ETLA6N+/YA+cg==} dependencies: @@ -5833,6 +7141,11 @@ packages: '@types/serve-static': 1.15.0 dev: true + /@types/tough-cookie/4.0.5: + resolution: {integrity: sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==} + dev: false + optional: true + /@types/traverse/0.6.32: resolution: {integrity: sha512-RBz2uRZVCXuMg93WD//aTS5B120QlT4lR/gL+935QtGsKHLS6sCtZBaKfWjIfk7ZXv/r8mtGbwjVIee6/3XTow==} dev: true @@ -5841,10 +7154,10 @@ packages: resolution: {integrity: sha512-c/I8ZRb51j+pYGAu5CrFMRxqZ2ke4y2grEBO5AUjgSkSk+qT2Ea+OdWElz/OiMf5MNpn2b17kuVBwZLQJXzihw==} dev: true - /@types/ws/8.5.3: - resolution: {integrity: sha512-6YOoWjruKj1uLf3INHH7D3qTXwFfEsg1kf3c0uDdSBJwfa/llkwIjrAGV7j7mVgGNbzTQ3HiHKKDXl6bJPD97w==} + /@types/ws/8.5.10: + resolution: {integrity: sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A==} dependencies: - '@types/node': 18.0.3 + '@types/node': 18.19.31 dev: true /@types/yargs-parser/21.0.0: @@ -6059,11 +7372,23 @@ packages: eslint-visitor-keys: 3.3.0 dev: true + /@yarnpkg/lockfile/1.1.0: + resolution: {integrity: sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ==} + dev: false + /abbrev/1.1.1: resolution: {integrity: sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==} dev: false optional: true + /abort-controller/3.0.0: + resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==} + engines: {node: '>=6.5'} + dependencies: + event-target-shim: 5.0.1 + dev: false + optional: true + /accepts/1.3.8: resolution: {integrity: sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==} engines: {node: '>= 0.6'} @@ -6112,6 +7437,16 @@ packages: dev: false optional: true + /agent-base/7.1.1: + resolution: {integrity: sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==} + engines: {node: '>= 14'} + dependencies: + debug: 4.3.4 + transitivePeerDependencies: + - supports-color + dev: false + optional: true + /ajv/6.12.6: resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==} dependencies: @@ -6191,6 +7526,7 @@ packages: /are-we-there-yet/2.0.0: resolution: {integrity: sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==} engines: {node: '>=10'} + deprecated: This package is no longer supported. dependencies: delegates: 1.0.0 readable-stream: 3.6.0 @@ -6302,6 +7638,12 @@ packages: is-string: 1.0.7 dev: false + /arrify/2.0.1: + resolution: {integrity: sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==} + engines: {node: '>=8'} + dev: false + optional: true + /asap/2.0.6: resolution: {integrity: sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==} dev: false @@ -6315,7 +7657,6 @@ packages: /assert-plus/1.0.0: resolution: {integrity: sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==} engines: {node: '>=0.8'} - dev: true /assign-symbols/1.0.0: resolution: {integrity: sha512-Q+JC7Whu8HhmTdBph/Tq59IoRtoy6KAm5zzPv00WdujX82lbAL8K7WVjne7vdCsAmbF4AYaDOPyO3k0kl8qIrw==} @@ -6332,6 +7673,13 @@ packages: resolution: {integrity: sha512-eBvWn1lvIApYMhzQMsu9ciLfkBY499mFZlNqG+/9WR7PVlroQw0vG30cOQQbaKz3sCEc44TAOu2ykzqXSNnwag==} dev: true + /async-retry/1.3.3: + resolution: {integrity: sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==} + dependencies: + retry: 0.13.1 + dev: false + optional: true + /async/3.2.4: resolution: {integrity: sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ==} dev: false @@ -6553,6 +7901,12 @@ packages: /balanced-match/1.0.2: resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + /base-x/3.0.10: + resolution: {integrity: sha512-7d0s06rR9rYaIWHkpfLIFICM/tkSVdoPC9qYAQRpxn9DdKNWNsKC0uk++akckyLq16Tx2WIinnZ6WRriAt6njQ==} + dependencies: + safe-buffer: 5.2.1 + dev: false + /base/0.11.2: resolution: {integrity: sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg==} engines: {node: '>=0.10.0'} @@ -6584,6 +7938,11 @@ packages: resolution: {integrity: sha512-LcknSilhIGatDAsY1ak2I8VtGaHNhgMSYVxFrGLXv+xLHytaKZKcaUJJUE7qmBr7h33o5YQwP55pMI0xmkpJwg==} dev: false + /big-integer/1.6.52: + resolution: {integrity: sha512-QxD8cf2eVqJOOz63z6JIN9BzvVs/dlySa5HGSBH5xtR8dPteIRQnBxxKqkNTiT6jbDTF6jAfrd4oMcND9RGbQg==} + engines: {node: '>=0.6'} + dev: false + /big.js/6.2.1: resolution: {integrity: sha512-bCtHMwL9LeDIozFn+oNhhFoq+yQ3BNdnsLSASUxLciOb1vgvpHsIO1dsENiGMgbb4SkP5TrzWzRiLddn8ahVOQ==} dev: false @@ -6592,12 +7951,34 @@ packages: resolution: {integrity: sha512-GAcQvbpsM0pUb0zw1EI0KhQEZ+lRwR5fYaAp3vPOYuP7aDvGy6cVN6XHLauvF8SOga2y0dcLcjt3iQDTSEliyw==} dev: false + /binary-searching/2.0.5: + resolution: {integrity: sha512-v4N2l3RxL+m4zDxyxz3Ne2aTmiPn8ZUpKFpdPtO+ItW1NcTCXA7JeHG5GMBSvoKSkQZ9ycS+EouDVxYB9ufKWA==} + dev: false + /bindings/1.5.0: resolution: {integrity: sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==} dependencies: file-uri-to-path: 1.0.0 dev: false - optional: true + + /bip32/2.0.6: + resolution: {integrity: sha512-HpV5OMLLGTjSVblmrtYRfFFKuQB+GArM0+XP8HGWfJ5vxYBqo+DesvJwOdC2WJ3bCkZShGf0QIfoIpeomVzVdA==} + engines: {node: '>=6.0.0'} + dependencies: + '@types/node': 10.12.18 + bs58check: 2.1.2 + create-hash: 1.2.0 + create-hmac: 1.1.7 + tiny-secp256k1: 1.1.6 + typeforce: 1.18.0 + wif: 2.0.6 + dev: false + + /bip39/3.1.0: + resolution: {integrity: sha512-c9kiwdk45Do5GL0vJMe7tS95VjCii65mYAH7DfWl3uW8AVzXKQVUm64i3hzVybBDMp9r7j9iNxR85+ul8MdN/A==} + dependencies: + '@noble/hashes': 1.3.0 + dev: false /bluebird/3.7.2: resolution: {integrity: sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==} @@ -6694,11 +8075,29 @@ packages: update-browserslist-db: 1.0.10_browserslist@4.21.5 dev: true + /bs58/4.0.1: + resolution: {integrity: sha512-Ok3Wdf5vOIlBrgCvTq96gBkJw+JUEzdBgyaza5HLtPm7yTHkjRy8+JzNyHF7BHa0bNWOQIp3m5YF0nnFcOIKLw==} + dependencies: + base-x: 3.0.10 + dev: false + + /bs58check/2.1.2: + resolution: {integrity: sha512-0TS1jicxdU09dwJMNZtVAfzPi6Q6QeN0pM1Fkzrjn+XYHvzMKPU3pHVpva+769iNVSfIYWf7LJ6WR+BuuMf8cA==} + dependencies: + bs58: 4.0.1 + create-hash: 1.2.0 + safe-buffer: 5.2.1 + dev: false + /bser/2.1.1: resolution: {integrity: sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==} dependencies: node-int64: 0.4.0 + /buffer-equal-constant-time/1.0.1: + resolution: {integrity: sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==} + dev: false + /buffer-from/1.1.2: resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} @@ -6715,6 +8114,13 @@ packages: isarray: 1.0.0 dev: false + /buffer/6.0.3: + resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==} + dependencies: + base64-js: 1.5.1 + ieee754: 1.2.1 + dev: false + /byline/5.0.0: resolution: {integrity: sha512-s6webAy+R4SR8XVuJWt2V2rGvhnrhxN+9S15GNuTK3wKPOXFF6RNc+8ug2XhH+2s4f+uudG4kUVYmYOQWL2g0Q==} engines: {node: '>=0.10.0'} @@ -6822,6 +8228,18 @@ packages: /ci-info/3.3.2: resolution: {integrity: sha512-xmDt/QIAdeZ9+nfdPsaBCpMvHNLFiLdjj59qjqn+6iPe6YmHGQ35sBnQ8uslRBXFmXkiZQOJRjvQeoGppoTjjg==} + /ci-info/3.9.0: + resolution: {integrity: sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==} + engines: {node: '>=8'} + dev: false + + /cipher-base/1.0.4: + resolution: {integrity: sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==} + dependencies: + inherits: 2.0.4 + safe-buffer: 5.2.1 + dev: false + /cjs-module-lexer/1.2.2: resolution: {integrity: sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA==} @@ -6880,6 +8298,15 @@ packages: strip-ansi: 6.0.1 wrap-ansi: 7.0.0 + /cliui/8.0.1: + resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} + engines: {node: '>=12'} + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 7.0.0 + dev: false + /co/4.6.0: resolution: {integrity: sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==} engines: {iojs: '>= 1.0.0', node: '>= 0.12.0'} @@ -7077,6 +8504,27 @@ packages: request: 2.88.2 dev: true + /create-hash/1.2.0: + resolution: {integrity: sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==} + dependencies: + cipher-base: 1.0.4 + inherits: 2.0.4 + md5.js: 1.3.5 + ripemd160: 2.0.2 + sha.js: 2.4.11 + dev: false + + /create-hmac/1.1.7: + resolution: {integrity: sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg==} + dependencies: + cipher-base: 1.0.4 + create-hash: 1.2.0 + inherits: 2.0.4 + ripemd160: 2.0.2 + safe-buffer: 5.2.1 + sha.js: 2.4.11 + dev: false + /create-require/1.1.1: resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==} @@ -7086,7 +8534,7 @@ packages: dependencies: nice-try: 1.0.5 path-key: 2.0.1 - semver: 5.7.1 + semver: 5.7.2 shebang-command: 1.2.0 which: 1.3.1 dev: false @@ -7099,6 +8547,10 @@ packages: shebang-command: 2.0.0 which: 2.0.2 + /crypto-js/4.2.0: + resolution: {integrity: sha512-KALDyEYgpY+Rlob/iriUtjV6d5Eq+Y191A5g4UqLAi8CyGP9N1+FdVbkc1SxKc2r4YAYqG8JzO2KGL+AizD70Q==} + dev: false + /crypto-randomuuid/1.0.0: resolution: {integrity: sha512-/RC5F4l1SCqD/jazwUF6+t34Cd8zTSAGZ7rvvZu1whZUhD2a5MOGKjSGowoGcpj/cbVZk1ZODIooJEQQq3nNAA==} dev: false @@ -7126,6 +8578,13 @@ packages: assert-plus: 1.0.0 dev: true + /dashdash/2.0.0: + resolution: {integrity: sha512-ElMoAPlrzmF4l0OscF5pPBZv8LhUJBnwh7rHKllUOrwabAr47R1aQIIwC53rc59ycCb7k5Sj1/es+A3Bep/x5w==} + engines: {node: '>=10.x'} + dependencies: + assert-plus: 1.0.0 + dev: false + /date-fns/2.29.3: resolution: {integrity: sha512-dDCnyH2WnnKusqvZZ6+jA1O51Ibt8ZMRNkDZdyAyK4YfbDwa/cEmuztzG5pk6hqlp9aSBPYcjOlktquahGwGeA==} engines: {node: '>=0.11'} @@ -7238,6 +8697,11 @@ packages: resolution: {integrity: sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==} engines: {node: '>=0.10.0'} + /define-lazy-prop/2.0.0: + resolution: {integrity: sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==} + engines: {node: '>=8'} + dev: false + /define-properties/1.1.4: resolution: {integrity: sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==} engines: {node: '>= 0.4'} @@ -7267,6 +8731,11 @@ packages: isobject: 3.0.1 dev: false + /delay/4.4.1: + resolution: {integrity: sha512-aL3AhqtfhOlT/3ai6sWXeqwnw63ATNpnUiN4HL7x9q+My5QtHlO3OIkasmug9LKzpheLdmUKGRKnYXYAS7FQkQ==} + engines: {node: '>=6'} + dev: false + /delay/5.0.0: resolution: {integrity: sha512-ReEBKkIfe4ya47wlPYf/gu5ib6yUG0/Aez0JQZQz94kiWtRQvZIQbTiehsnwHvLSWJnQdhVeqYue7Id1dKr0qw==} engines: {node: '>=10'} @@ -7299,8 +8768,8 @@ packages: engines: {node: '>=0.10.0'} dev: false - /detect-libc/2.0.1: - resolution: {integrity: sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w==} + /detect-libc/2.0.3: + resolution: {integrity: sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==} engines: {node: '>=8'} dev: false optional: true @@ -7380,8 +8849,18 @@ packages: resolution: {integrity: sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==} dev: false - /dydx-widdershins/4.0.1: - resolution: {integrity: sha512-LLk/TfqFk3w9UJqLhDzspPj0YS+uK7t2Ec5GRkjhE95YlEghT0sum95cVFKe3j/2lgVymC9C2FV7GDpUeDi1Ng==} + /duplexify/4.1.3: + resolution: {integrity: sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA==} + dependencies: + end-of-stream: 1.4.4 + inherits: 2.0.4 + readable-stream: 3.6.0 + stream-shift: 1.0.3 + dev: false + optional: true + + /dydx-widdershins/4.0.8: + resolution: {integrity: sha512-fisnybZb7TpCoV80YKWi8XLykUzwCGtv9NCIizIRkacLd8KqCJjRdquR+nlaza3bSl+kJic1Suw9yfgBq3P92Q==} hasBin: true dependencies: dot: 1.1.3 @@ -7390,10 +8869,10 @@ packages: jgexml: 0.4.4 markdown-it: 12.3.2 markdown-it-emoji: 2.0.2 - node-fetch: 2.6.11 + node-fetch: 2.7.0 oas-resolver: 2.5.6 oas-schema-walker: 1.1.5 - openapi-sampler: 1.3.1 + openapi-sampler: 1.5.1 pinyin: 2.11.2 reftools: 1.1.9 swagger2openapi: 7.0.8 @@ -7413,6 +8892,12 @@ packages: safer-buffer: 2.1.2 dev: true + /ecdsa-sig-formatter/1.0.11: + resolution: {integrity: sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==} + dependencies: + safe-buffer: 5.2.1 + dev: false + /ee-first/1.1.1: resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} @@ -7564,6 +9049,10 @@ packages: resolution: {integrity: sha512-wd6JXUmyHmt8T5a2xreUwKcGPq6f1f+WwIJkijUqiGcJz1qqnZgP6XIK+QyIWU5lT7imeNxUll48bziG+TSYcA==} dev: false + /es-errors/1.3.0: + resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==} + engines: {node: '>= 0.4'} + /es-get-iterator/1.1.3: resolution: {integrity: sha512-sPZmqHBe6JIiTfN5q2pEi//TwxmAFHwj/XEuYjTuse78i8KxaqMTTzxPoFKuzRpDpTJ+0NAbpfenkmH2rePtuw==} dependencies: @@ -8213,6 +9702,12 @@ packages: through: 2.3.8 dev: false + /event-target-shim/5.0.1: + resolution: {integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==} + engines: {node: '>=6'} + dev: false + optional: true + /events/1.1.1: resolution: {integrity: sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw==} engines: {node: '>=0.4.x'} @@ -8396,6 +9891,11 @@ packages: engines: {'0': node >=0.6.0} dev: true + /farmhash-modern/1.1.0: + resolution: {integrity: sha512-6ypT4XfgqJk/F3Yuv4SX26I3doUjt0GTG4a+JgWxXQpxXzTBq8fPUeGHfcYMMDPHJHm3yPOSjaeBwBGAHWXCdA==} + engines: {node: '>=18.0.0'} + dev: false + /fast-deep-equal/3.1.3: resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} @@ -8428,12 +9928,34 @@ packages: strnum: 1.0.5 dev: false + /fast-xml-parser/4.4.1: + resolution: {integrity: sha512-xkjOecfnKGkSsOwtZ5Pz7Us/T6mrbPQrq0nh+aCO5V9nk5NLWmasAHumTKjiPJPWANe+kAZ84Jc8ooJkzZ88Sw==} + hasBin: true + dependencies: + strnum: 1.0.5 + dev: false + + /fast-xml-parser/4.5.0: + resolution: {integrity: sha512-/PlTQCI96+fZMAOLMZK4CWG1ItCbfZ/0jx7UIJFChPNrx7tcEgerUgWbeieCM9MfHInUDyK8DWYZ+YrywDJuTg==} + hasBin: true + dependencies: + strnum: 1.0.5 + dev: false + optional: true + /fastq/1.13.0: resolution: {integrity: sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==} dependencies: reusify: 1.0.4 dev: true + /faye-websocket/0.11.4: + resolution: {integrity: sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g==} + engines: {node: '>=0.8.0'} + dependencies: + websocket-driver: 0.7.4 + dev: false + /fb-watchman/2.0.1: resolution: {integrity: sha512-DkPJKQeY6kKwmuMretBhr7G6Vodr7bFwDYTXIkfG1gjvNpaxBTQV3PbXg6bR1c1UP4jPOX0jHUbbHANL9vRjVg==} dependencies: @@ -8460,7 +9982,6 @@ packages: /file-uri-to-path/1.0.0: resolution: {integrity: sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==} dev: false - optional: true /fill-range/4.0.0: resolution: {integrity: sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==} @@ -8513,6 +10034,12 @@ packages: locate-path: 5.0.0 path-exists: 4.0.0 + /find-yarn-workspace-root/2.0.0: + resolution: {integrity: sha512-1IMnbjt4KzsQfnhnzNd8wUEgXZ44IzZaZmnLYx7D5FZlaHt2gW20Cri8Q+E/t5tIj4+epTBub+2Zxu/vNILzqQ==} + dependencies: + micromatch: 4.0.5 + dev: false + /findup-sync/3.0.0: resolution: {integrity: sha512-YbffarhcicEhOrm4CtrwdKBdCuz576RLdhJDsIfvNtxUuhdRet1qZcsMjqbePtAseKdAnDyM/IyXbu7PRPRLYg==} engines: {node: '>= 0.10'} @@ -8536,6 +10063,27 @@ packages: parse-filepath: 1.0.2 dev: false + /firebase-admin/12.4.0: + resolution: {integrity: sha512-3HOHqJxNmFv0JgK3voyMQgmcibhJN4LQfZfhnZGb6pcONnZxejki4nQ1twsoJlGaIvgQWBtO7rc5mh/cqlOJNA==} + engines: {node: '>=14'} + dependencies: + '@fastify/busboy': 3.0.0 + '@firebase/database-compat': 1.0.7 + '@firebase/database-types': 1.0.4 + '@types/node': 22.5.4 + farmhash-modern: 1.1.0 + jsonwebtoken: 9.0.2 + jwks-rsa: 3.1.0 + node-forge: 1.3.1 + uuid: 10.0.0 + optionalDependencies: + '@google-cloud/firestore': 7.9.0 + '@google-cloud/storage': 7.12.1 + transitivePeerDependencies: + - encoding + - supports-color + dev: false + /flagged-respawn/1.0.1: resolution: {integrity: sha512-lNaHNVymajmk0OJMBn8fVUAU1BtDeKIqKoVhk4xAALB57aALg6b4W0MfJ/cUE0g9YBXy5XhSlPIpYIJ7HaY/3Q==} engines: {node: '>= 0.10'} @@ -8601,6 +10149,16 @@ packages: mime-types: 2.1.35 dev: true + /form-data/2.5.1: + resolution: {integrity: sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==} + engines: {node: '>= 0.12'} + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + mime-types: 2.1.35 + dev: false + optional: true + /form-data/3.0.0: resolution: {integrity: sha512-CKMFDglpbMi6PyN+brwB9Q/GOw0eAnsrEZDgcsH5Krhz5Od/haKHAX0NmQfha2zPPz0JpWzA7GJHGSnvCRLWsg==} engines: {node: '>= 6'} @@ -8693,6 +10251,9 @@ packages: /function-bind/1.1.1: resolution: {integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==} + /function-bind/1.1.2: + resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} + /function.prototype.name/1.1.5: resolution: {integrity: sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==} engines: {node: '>= 0.4'} @@ -8704,7 +10265,6 @@ packages: /functional-red-black-tree/1.0.1: resolution: {integrity: sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==} - dev: true /functions-have-names/1.2.3: resolution: {integrity: sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==} @@ -8717,6 +10277,7 @@ packages: /gauge/3.0.2: resolution: {integrity: sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==} engines: {node: '>=10'} + deprecated: This package is no longer supported. dependencies: aproba: 2.0.0 color-support: 1.1.3 @@ -8730,6 +10291,33 @@ packages: dev: false optional: true + /gaxios/6.7.1: + resolution: {integrity: sha512-LDODD4TMYx7XXdpwxAVRAIAuB0bzv0s+ywFonY46k126qzQHT9ygyoa9tncmOiQmmDrik65UYsEkv3lbfqQ3yQ==} + engines: {node: '>=14'} + dependencies: + extend: 3.0.2 + https-proxy-agent: 7.0.5 + is-stream: 2.0.1 + node-fetch: 2.7.0 + uuid: 9.0.1 + transitivePeerDependencies: + - encoding + - supports-color + dev: false + optional: true + + /gcp-metadata/6.1.0: + resolution: {integrity: sha512-Jh/AIwwgaxan+7ZUUmRLCjtchyDiqh4KjBJ5tW3plBZb5iL/BPcso8A5DlzeD9qlw0duCamnNdpFjxwaT0KyKg==} + engines: {node: '>=14'} + dependencies: + gaxios: 6.7.1 + json-bigint: 1.0.0 + transitivePeerDependencies: + - encoding + - supports-color + dev: false + optional: true + /gensync/1.0.0-beta.2: resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} engines: {node: '>=6.9.0'} @@ -8756,6 +10344,16 @@ packages: has: 1.0.3 has-symbols: 1.0.3 + /get-intrinsic/1.2.4: + resolution: {integrity: sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==} + engines: {node: '>= 0.4'} + dependencies: + es-errors: 1.3.0 + function-bind: 1.1.2 + has-proto: 1.0.1 + has-symbols: 1.0.3 + hasown: 2.0.2 + /get-own-enumerable-property-symbols/3.0.2: resolution: {integrity: sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==} dev: false @@ -8907,14 +10505,64 @@ packages: slash: 3.0.0 dev: true + /google-auth-library/9.14.1: + resolution: {integrity: sha512-Rj+PMjoNFGFTmtItH7gHfbHpGVSb3vmnGK3nwNBqxQF9NoBpttSZI/rc0WiM63ma2uGDQtYEkMHkK9U6937NiA==} + engines: {node: '>=14'} + dependencies: + base64-js: 1.5.1 + ecdsa-sig-formatter: 1.0.11 + gaxios: 6.7.1 + gcp-metadata: 6.1.0 + gtoken: 7.1.0 + jws: 4.0.0 + transitivePeerDependencies: + - encoding + - supports-color + dev: false + optional: true + + /google-gax/4.4.1: + resolution: {integrity: sha512-Phyp9fMfA00J3sZbJxbbB4jC55b7DBjE3F6poyL3wKMEBVKA79q6BGuHcTiM28yOzVql0NDbRL8MLLh8Iwk9Dg==} + engines: {node: '>=14'} + dependencies: + '@grpc/grpc-js': 1.11.1 + '@grpc/proto-loader': 0.7.13 + '@types/long': 4.0.2 + abort-controller: 3.0.0 + duplexify: 4.1.3 + google-auth-library: 9.14.1 + node-fetch: 2.7.0 + object-hash: 3.0.0 + proto3-json-serializer: 2.0.2 + protobufjs: 7.3.2 + retry-request: 7.0.2 + uuid: 9.0.1 + transitivePeerDependencies: + - encoding + - supports-color + dev: false + optional: true + /gopd/1.0.1: resolution: {integrity: sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==} dependencies: - get-intrinsic: 1.2.0 + get-intrinsic: 1.2.4 /graceful-fs/4.2.10: resolution: {integrity: sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==} + /gtoken/7.1.0: + resolution: {integrity: sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw==} + engines: {node: '>=14.0.0'} + dependencies: + gaxios: 6.7.1 + jws: 4.0.0 + transitivePeerDependencies: + - encoding + - supports-color + dev: false + optional: true + /handlebars/4.7.7: resolution: {integrity: sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA==} engines: {node: '>=0.4.7'} @@ -9018,6 +10666,15 @@ packages: dependencies: function-bind: 1.1.1 + /hash-base/3.1.0: + resolution: {integrity: sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA==} + engines: {node: '>=4'} + dependencies: + inherits: 2.0.4 + readable-stream: 3.6.0 + safe-buffer: 5.2.1 + dev: false + /hash.js/1.1.7: resolution: {integrity: sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA==} dependencies: @@ -9025,6 +10682,12 @@ packages: minimalistic-assert: 1.0.1 dev: false + /hasown/2.0.2: + resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} + engines: {node: '>= 0.4'} + dependencies: + function-bind: 1.1.2 + /hexoid/1.0.0: resolution: {integrity: sha512-QFLV0taWQOZtvIRIAdBChesmogZrtuXvVWsFHZTk2SU+anspqZ2vMnoLg7IE1+Uk16N19APic1BuF8bC8c2m5g==} engines: {node: '>=8'} @@ -9057,6 +10720,11 @@ packages: unix-dgram: 2.0.4 dev: false + /html-entities/2.5.2: + resolution: {integrity: sha512-K//PSRMQk4FZ78Kyau+mZurHn3FH0Vwr+H36eE0rPbeYkRRi9YxceYPhuN60UwWorxyKHhqoAJl2OFKa4BVtaA==} + dev: false + optional: true + /html-escaper/2.0.2: resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==} @@ -9070,6 +10738,22 @@ packages: statuses: 2.0.1 toidentifier: 1.0.1 + /http-parser-js/0.5.8: + resolution: {integrity: sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q==} + dev: false + + /http-proxy-agent/5.0.0: + resolution: {integrity: sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==} + engines: {node: '>= 6'} + dependencies: + '@tootallnate/once': 2.0.0 + agent-base: 6.0.2 + debug: 4.3.4 + transitivePeerDependencies: + - supports-color + dev: false + optional: true + /http-signature/1.2.0: resolution: {integrity: sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ==} engines: {node: '>=0.8', npm: '>=1.3.7'} @@ -9094,6 +10778,17 @@ packages: dev: false optional: true + /https-proxy-agent/7.0.5: + resolution: {integrity: sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw==} + engines: {node: '>= 14'} + dependencies: + agent-base: 7.1.1 + debug: 4.3.4 + transitivePeerDependencies: + - supports-color + dev: false + optional: true + /httpsnippet/1.25.0: resolution: {integrity: sha512-jobE6S923cLuf5BPG6Jf+oLBRkPzv2RPp0dwOHcWwj/t9FwV/t9hyZ46kpT3Q5DHn9iFNmGhrcmmFUBqyjoTQg==} engines: {node: '>=4'} @@ -9128,6 +10823,10 @@ packages: resolution: {integrity: sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==} dev: false + /ieee754/1.2.1: + resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} + dev: false + /ignore/5.2.0: resolution: {integrity: sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==} engines: {node: '>= 4'} @@ -9356,6 +11055,13 @@ packages: resolution: {integrity: sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==} engines: {node: '>= 0.4'} + /is-ci/3.0.1: + resolution: {integrity: sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==} + hasBin: true + dependencies: + ci-info: 3.9.0 + dev: false + /is-core-module/2.9.0: resolution: {integrity: sha512-+5FPy5PnwmO3lvfMb0AsoPaBG+5KHUI0wYFXOtYPnVVVspTFUuMZNfNaNVRt3FZadstu2c8x23vykRW/NBoU6A==} dependencies: @@ -9399,6 +11105,12 @@ packages: kind-of: 6.0.3 dev: false + /is-docker/2.2.1: + resolution: {integrity: sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==} + engines: {node: '>=8'} + hasBin: true + dev: false + /is-extendable/0.1.1: resolution: {integrity: sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==} engines: {node: '>=0.10.0'} @@ -9568,6 +11280,13 @@ packages: engines: {node: '>=0.10.0'} dev: false + /is-wsl/2.2.0: + resolution: {integrity: sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==} + engines: {node: '>=8'} + dependencies: + is-docker: 2.2.1 + dev: false + /isarray/1.0.0: resolution: {integrity: sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==} dev: false @@ -9775,6 +11494,62 @@ packages: - ts-node dev: true + /jest-cli/28.1.2_e1489a60da1bfeaddb37cf23d6a3b371: + resolution: {integrity: sha512-l6eoi5Do/IJUXAFL9qRmDiFpBeEJAnjJb1dcd9i/VWfVWbp3mJhuH50dNtX67Ali4Ecvt4eBkWb4hXhPHkAZTw==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + hasBin: true + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + dependencies: + '@jest/core': 28.1.2_ts-node@10.8.2 + '@jest/test-result': 28.1.1 + '@jest/types': 28.1.1 + chalk: 4.1.2 + exit: 0.1.2 + graceful-fs: 4.2.10 + import-local: 3.1.0 + jest-config: 28.1.2_e1489a60da1bfeaddb37cf23d6a3b371 + jest-util: 28.1.1 + jest-validate: 28.1.1 + prompts: 2.4.2 + yargs: 17.5.1 + transitivePeerDependencies: + - '@types/node' + - supports-color + - ts-node + dev: true + + /jest-cli/28.1.2_ts-node@10.8.2: + resolution: {integrity: sha512-l6eoi5Do/IJUXAFL9qRmDiFpBeEJAnjJb1dcd9i/VWfVWbp3mJhuH50dNtX67Ali4Ecvt4eBkWb4hXhPHkAZTw==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + hasBin: true + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + dependencies: + '@jest/core': 28.1.2_ts-node@10.8.2 + '@jest/test-result': 28.1.1 + '@jest/types': 28.1.1 + chalk: 4.1.2 + exit: 0.1.2 + graceful-fs: 4.2.10 + import-local: 3.1.0 + jest-config: 28.1.2_ts-node@10.8.2 + jest-util: 28.1.1 + jest-validate: 28.1.1 + prompts: 2.4.2 + yargs: 17.5.1 + transitivePeerDependencies: + - '@types/node' + - supports-color + - ts-node + dev: true + /jest-config/28.1.2: resolution: {integrity: sha512-g6EfeRqddVbjPVBVY4JWpUY4IvQoFRIZcv4V36QkqzE0IGhEC/VkugFeBMAeUE7PRgC8KJF0yvJNDeQRbamEVA==} engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} @@ -9891,6 +11666,85 @@ packages: - supports-color dev: true + /jest-config/28.1.2_e1489a60da1bfeaddb37cf23d6a3b371: + resolution: {integrity: sha512-g6EfeRqddVbjPVBVY4JWpUY4IvQoFRIZcv4V36QkqzE0IGhEC/VkugFeBMAeUE7PRgC8KJF0yvJNDeQRbamEVA==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + peerDependencies: + '@types/node': '*' + ts-node: '>=9.0.0' + peerDependenciesMeta: + '@types/node': + optional: true + ts-node: + optional: true + dependencies: + '@babel/core': 7.18.6 + '@jest/test-sequencer': 28.1.1 + '@jest/types': 28.1.1 + '@types/node': 18.19.31 + babel-jest: 28.1.2_@babel+core@7.18.6 + chalk: 4.1.2 + ci-info: 3.3.2 + deepmerge: 4.2.2 + glob: 7.2.3 + graceful-fs: 4.2.10 + jest-circus: 28.1.2 + jest-environment-node: 28.1.2 + jest-get-type: 28.0.2 + jest-regex-util: 28.0.2 + jest-resolve: 28.1.1 + jest-runner: 28.1.2 + jest-util: 28.1.1 + jest-validate: 28.1.1 + micromatch: 4.0.5 + parse-json: 5.2.0 + pretty-format: 28.1.1 + slash: 3.0.0 + strip-json-comments: 3.1.1 + ts-node: 10.8.2_4ea55324100c26d4019c6e6bcc89fac6 + transitivePeerDependencies: + - supports-color + dev: true + + /jest-config/28.1.2_ts-node@10.8.2: + resolution: {integrity: sha512-g6EfeRqddVbjPVBVY4JWpUY4IvQoFRIZcv4V36QkqzE0IGhEC/VkugFeBMAeUE7PRgC8KJF0yvJNDeQRbamEVA==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + peerDependencies: + '@types/node': '*' + ts-node: '>=9.0.0' + peerDependenciesMeta: + '@types/node': + optional: true + ts-node: + optional: true + dependencies: + '@babel/core': 7.18.6 + '@jest/test-sequencer': 28.1.1 + '@jest/types': 28.1.1 + babel-jest: 28.1.2_@babel+core@7.18.6 + chalk: 4.1.2 + ci-info: 3.3.2 + deepmerge: 4.2.2 + glob: 7.2.3 + graceful-fs: 4.2.10 + jest-circus: 28.1.2 + jest-environment-node: 28.1.2 + jest-get-type: 28.0.2 + jest-regex-util: 28.0.2 + jest-resolve: 28.1.1 + jest-runner: 28.1.2 + jest-util: 28.1.1 + jest-validate: 28.1.1 + micromatch: 4.0.5 + parse-json: 5.2.0 + pretty-format: 28.1.1 + slash: 3.0.0 + strip-json-comments: 3.1.1 + ts-node: 10.8.2_typescript@4.7.4 + transitivePeerDependencies: + - supports-color + dev: true + /jest-diff/28.1.1: resolution: {integrity: sha512-/MUUxeR2fHbqHoMMiffe/Afm+U8U4olFRJ0hiVG2lZatPJcnGxx292ustVu7bULhjV65IYMxRdploAKLbcrsyg==} engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} @@ -10264,6 +12118,46 @@ packages: - ts-node dev: true + /jest/28.1.2_e1489a60da1bfeaddb37cf23d6a3b371: + resolution: {integrity: sha512-Tuf05DwLeCh2cfWCQbcz9UxldoDyiR1E9Igaei5khjonKncYdc6LDfynKCEWozK0oLE3GD+xKAo2u8x/0s6GOg==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + hasBin: true + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + dependencies: + '@jest/core': 28.1.2_ts-node@10.8.2 + '@jest/types': 28.1.1 + import-local: 3.1.0 + jest-cli: 28.1.2_e1489a60da1bfeaddb37cf23d6a3b371 + transitivePeerDependencies: + - '@types/node' + - supports-color + - ts-node + dev: true + + /jest/28.1.2_ts-node@10.8.2: + resolution: {integrity: sha512-Tuf05DwLeCh2cfWCQbcz9UxldoDyiR1E9Igaei5khjonKncYdc6LDfynKCEWozK0oLE3GD+xKAo2u8x/0s6GOg==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + hasBin: true + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + dependencies: + '@jest/core': 28.1.2_ts-node@10.8.2 + '@jest/types': 28.1.1 + import-local: 3.1.0 + jest-cli: 28.1.2_ts-node@10.8.2 + transitivePeerDependencies: + - '@types/node' + - supports-color + - ts-node + dev: true + /jgexml/0.4.4: resolution: {integrity: sha512-j0AzSWT7LXy3s3i1cdv5NZxUtscocwiBxgOLiEBfitCehm8STdXVrcOlbAWsJFLCq1elZYpQlGqA9k8Z+n9iJA==} hasBin: true @@ -10274,6 +12168,14 @@ packages: engines: {node: '>= 0.6.0'} dev: false + /jose/4.15.9: + resolution: {integrity: sha512-1vUQX+IdDMVPj4k8kOxgUqlcK518yluMuGZwqlr44FS1ppZB/5GWh4rZG89erpOBOJjU/OBsnCVFfapsRz6nEA==} + dev: false + + /js-sha3/0.8.0: + resolution: {integrity: sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q==} + dev: false + /js-tokens/4.0.0: resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} @@ -10305,6 +12207,13 @@ packages: engines: {node: '>=4'} hasBin: true + /json-bigint/1.0.0: + resolution: {integrity: sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==} + dependencies: + bignumber.js: 9.0.2 + dev: false + optional: true + /json-parse-even-better-errors/2.3.1: resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} @@ -10349,6 +12258,22 @@ packages: graceful-fs: 4.2.10 dev: false + /jsonwebtoken/9.0.2: + resolution: {integrity: sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==} + engines: {node: '>=12', npm: '>=6'} + dependencies: + jws: 3.2.2 + lodash.includes: 4.3.0 + lodash.isboolean: 3.0.3 + lodash.isinteger: 4.0.4 + lodash.isnumber: 3.0.3 + lodash.isplainobject: 4.0.6 + lodash.isstring: 4.0.1 + lodash.once: 4.1.1 + ms: 2.1.3 + semver: 7.5.4 + dev: false + /jsprim/1.4.2: resolution: {integrity: sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw==} engines: {node: '>=0.6.0'} @@ -10367,13 +12292,54 @@ packages: object.assign: 4.1.2 dev: true - /kafkajs/2.1.0: - resolution: {integrity: sha512-6IYiOdGWvFPbSbVB+AV3feT+A7vzw5sXm7Ze4QTfP7FRNdY8pGcpiNPvD2lfgYFD8Dm9KbMgBgTt2mf8KaIkzw==} - engines: {node: '>=14.0.0'} + /jwa/1.4.1: + resolution: {integrity: sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==} + dependencies: + buffer-equal-constant-time: 1.0.1 + ecdsa-sig-formatter: 1.0.11 + safe-buffer: 5.2.1 + dev: false + + /jwa/2.0.0: + resolution: {integrity: sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==} + dependencies: + buffer-equal-constant-time: 1.0.1 + ecdsa-sig-formatter: 1.0.11 + safe-buffer: 5.2.1 + dev: false + optional: true + + /jwks-rsa/3.1.0: + resolution: {integrity: sha512-v7nqlfezb9YfHHzYII3ef2a2j1XnGeSE/bK3WfumaYCqONAIstJbrEGapz4kadScZzEt7zYCN7bucj8C0Mv/Rg==} + engines: {node: '>=14'} + dependencies: + '@types/express': 4.17.21 + '@types/jsonwebtoken': 9.0.6 + debug: 4.3.4 + jose: 4.15.9 + limiter: 1.1.5 + lru-memoizer: 2.3.0 + transitivePeerDependencies: + - supports-color + dev: false + + /jws/3.2.2: + resolution: {integrity: sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==} + dependencies: + jwa: 1.4.1 + safe-buffer: 5.2.1 dev: false - /kafkajs/2.2.3: - resolution: {integrity: sha512-JmzIiLHE/TdQ7b4I2B/DNMtfhTh66fmEaEg7gGkyQXBC6f1A7I2jSjeUsVIJfC8d9YcEIURyBjtOEKBO5OHVhg==} + /jws/4.0.0: + resolution: {integrity: sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==} + dependencies: + jwa: 2.0.0 + safe-buffer: 5.2.1 + dev: false + optional: true + + /kafkajs/2.2.4: + resolution: {integrity: sha512-j/YeapB1vfPT2iOIUn/vxdyKEuhuY2PxMBvf5JWux6iSaukAccrMtXEY/Lb7OvavDhOWME589bpLrEdnVHjfjA==} engines: {node: '>=14.0.0'} dev: false @@ -10405,6 +12371,12 @@ packages: engines: {node: '>=0.10.0'} dev: false + /klaw-sync/6.0.0: + resolution: {integrity: sha512-nIeuVSzdCCs6TDPTqI8w1Yre34sSq7AkZ4B3sfOBbI2CgVSB4Du4aLQijFU2+lhAFCwt9+42Hel6lQNIv6AntQ==} + dependencies: + graceful-fs: 4.2.10 + dev: false + /kleur/3.0.3: resolution: {integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==} engines: {node: '>=6'} @@ -10552,10 +12524,19 @@ packages: dependencies: p-locate: 4.1.0 + /lodash.camelcase/4.3.0: + resolution: {integrity: sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==} + dev: false + optional: true + /lodash.castarray/4.4.0: resolution: {integrity: sha512-aVx8ztPv7/2ULbArGJ2Y42bG1mEQ5mGjpdvrbJcJFU3TbYybe+QlLS4pst9zV52ymy2in1KpFPiZnAOATxD4+Q==} dev: false + /lodash.clonedeep/4.5.0: + resolution: {integrity: sha512-H5ZhCF25riFd9uB5UCkVKo61m3S/xZk1x4wA6yp/L3RFP6Z/eHH1ymQcGLo7J3GMPfm0V/7m1tryHuGVxpqEBQ==} + dev: false + /lodash.compact/3.0.1: resolution: {integrity: sha512-2ozeiPi+5eBXW1CLtzjk8XQFhQOEMwwfxblqeq6EGyTxZJ1bPATqilY0e6g2SLQpP4KuMeuioBhEnWz5Pr7ICQ==} dev: false @@ -10564,10 +12545,34 @@ packages: resolution: {integrity: sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==} dev: true + /lodash.includes/4.3.0: + resolution: {integrity: sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==} + dev: false + + /lodash.isboolean/3.0.3: + resolution: {integrity: sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==} + dev: false + /lodash.isempty/4.4.0: resolution: {integrity: sha512-oKMuF3xEeqDltrGMfDxAPGIVMSSRv8tbRSODbrs4KGsRRLEhrW8N8Rd4DRgB2+621hY8A8XwwrTVhXWpxFvMzg==} dev: false + /lodash.isinteger/4.0.4: + resolution: {integrity: sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==} + dev: false + + /lodash.isnumber/3.0.3: + resolution: {integrity: sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==} + dev: false + + /lodash.isplainobject/4.0.6: + resolution: {integrity: sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==} + dev: false + + /lodash.isstring/4.0.1: + resolution: {integrity: sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==} + dev: false + /lodash.kebabcase/4.1.1: resolution: {integrity: sha512-N8XRTIMMqqDgSy4VLKPnJ/+hpGZN+PHQiJnSenYqPaVV/NCqEogTnAdZLQiGKhxX+JCs8waWq2t1XHWKOmlY8g==} dev: false @@ -10576,6 +12581,10 @@ packages: resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} dev: true + /lodash.once/4.1.1: + resolution: {integrity: sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==} + dev: false + /lodash.pick/4.4.0: resolution: {integrity: sha512-hXt6Ul/5yWjfklSGvLQl8vM//l3FtyHZeuelpzK6mm99pNvN9yTDruNZPEJZD1oWrqo+izBmB7oUfWgcCX7s4Q==} dev: false @@ -10636,6 +12645,13 @@ packages: engines: {node: '>=12'} dev: false + /lru-memoizer/2.3.0: + resolution: {integrity: sha512-GXn7gyHAMhO13WSKrIiNfztwxodVsP8IoZ3XfrJV4yH2x0/OeTO/FIaAHTY5YekdGgW94njfuKmyyt1E0mR6Ug==} + dependencies: + lodash.clonedeep: 4.5.0 + lru-cache: 6.0.0 + dev: false + /lru-queue/0.1.0: resolution: {integrity: sha512-BpdYkt9EvGl8OfWHDQPISVpcl5xZthb+XPsbELj5AQXxIC8IriDZIQYjBJPEm5rS420sjZ0TLEzRcq5KdBhYrQ==} dependencies: @@ -10706,6 +12722,14 @@ packages: uc.micro: 1.0.6 dev: false + /md5.js/1.3.5: + resolution: {integrity: sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg==} + dependencies: + hash-base: 3.1.0 + inherits: 2.0.4 + safe-buffer: 5.2.1 + dev: false + /mdurl/1.0.1: resolution: {integrity: sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==} dev: false @@ -10804,6 +12828,13 @@ packages: hasBin: true dev: false + /mime/3.0.0: + resolution: {integrity: sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==} + engines: {node: '>=10.0.0'} + hasBin: true + dev: false + optional: true + /mimic-fn/1.2.0: resolution: {integrity: sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==} engines: {node: '>=4'} @@ -10908,7 +12939,7 @@ packages: hasBin: true dependencies: event-lite: 0.1.3 - ieee754: 1.1.13 + ieee754: 1.2.1 int64-buffer: 0.1.10 isarray: 1.0.0 dev: false @@ -10928,7 +12959,6 @@ packages: /nan/2.16.0: resolution: {integrity: sha512-UdAqHyFngu7TfQKsCBgAA6pWDkT8MAO7d0jyOecVhN5354xbLqdn8mV9Tat9gepAupm0bt2DbeaSC8vS52MuFA==} dev: false - optional: true /nanomatch/1.2.13: resolution: {integrity: sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA==} @@ -10993,8 +13023,8 @@ packages: http2-client: 1.3.5 dev: false - /node-fetch/2.6.11: - resolution: {integrity: sha512-4I6pdBY1EthSqDmJkiNk3JIT8cswwR9nfeW/cPdUagJYEQG7R95WRH74wpz7ma8Gh/9dI9FP+OU+0E4FvtA55w==} + /node-fetch/2.7.0: + resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==} engines: {node: 4.x || >=6.0.0} peerDependencies: encoding: ^0.1.0 @@ -11005,6 +13035,11 @@ packages: whatwg-url: 5.0.0 dev: false + /node-forge/1.3.1: + resolution: {integrity: sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==} + engines: {node: '>= 6.13.0'} + dev: false + /node-gyp-build/3.9.0: resolution: {integrity: sha512-zLcTg6P4AbcHPq465ZMFNXx7XpKKJh+7kkN699NiQWisR2uWYOWNWqRHAmbnmKiL4e9aLSlmy5U7rEMUXV59+A==} hasBin: true @@ -11036,7 +13071,7 @@ packages: engines: {node: '>= 10.20.0'} requiresBuild: true dependencies: - '@mapbox/node-pre-gyp': 1.0.10 + '@mapbox/node-pre-gyp': 1.0.11 node-addon-api: 3.2.1 transitivePeerDependencies: - encoding @@ -11072,6 +13107,7 @@ packages: /npmlog/5.0.1: resolution: {integrity: sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==} + deprecated: This package is no longer supported. dependencies: are-we-there-yet: 2.0.0 console-control-strings: 1.1.0 @@ -11094,7 +13130,7 @@ packages: /oas-linter/3.2.2: resolution: {integrity: sha512-KEGjPDVoU5K6swgo9hJVA/qYGlwfbFx+Kg2QB/kd7rzV5N8N5Mg6PlsoCMohVnQmo+pzJap/F610qTodKzecGQ==} dependencies: - '@exodus/schemasafe': 1.0.1 + '@exodus/schemasafe': 1.3.0 should: 13.2.3 yaml: 1.10.2 dev: false @@ -11107,7 +13143,7 @@ packages: oas-kit-common: 1.0.8 reftools: 1.1.9 yaml: 1.10.2 - yargs: 17.5.1 + yargs: 17.7.2 dev: false /oas-schema-walker/1.1.5: @@ -11144,6 +13180,12 @@ packages: kind-of: 3.2.2 dev: false + /object-hash/3.0.0: + resolution: {integrity: sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==} + engines: {node: '>= 6'} + dev: false + optional: true + /object-inspect/1.12.2: resolution: {integrity: sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==} @@ -11297,8 +13339,17 @@ packages: dependencies: mimic-fn: 2.1.0 - /openapi-sampler/1.3.1: - resolution: {integrity: sha512-Ert9mvc2tLPmmInwSyGZS+v4Ogu9/YoZuq9oP3EdUklg2cad6+IGndP9yqJJwbgdXwZibiq5fpv6vYujchdJFg==} + /open/8.4.2: + resolution: {integrity: sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==} + engines: {node: '>=12'} + dependencies: + define-lazy-prop: 2.0.0 + is-docker: 2.2.1 + is-wsl: 2.2.0 + dev: false + + /openapi-sampler/1.5.1: + resolution: {integrity: sha512-tIWIrZUKNAsbqf3bd9U1oH6JEXo8LNYuDlXw26By67EygpjT+ArFnsxxyTMjFWRfbqo5ozkvgSQDK69Gd8CddA==} dependencies: '@types/json-schema': 7.0.11 json-pointer: 0.6.2 @@ -11686,6 +13737,14 @@ packages: react-is: 16.13.1 dev: true + /proto3-json-serializer/2.0.2: + resolution: {integrity: sha512-SAzp/O4Yh02jGdRc+uIrGoe87dkN/XtwxfZ4ZyafJHymd79ozp5VG5nyZ7ygqPM5+cpLDjjGnYFUkngonyDPOQ==} + engines: {node: '>=14.0.0'} + dependencies: + protobufjs: 7.3.2 + dev: false + optional: true + /protobufjs/6.11.3: resolution: {integrity: sha512-xL96WDdCZYdU7Slin569tFX712BxsxslWwAfAhCYjQKGTq7dAU91Lomy6nLLhh/dyGhk/YH4TwTSRxTzhuHyZg==} hasBin: true @@ -11702,7 +13761,7 @@ packages: '@protobufjs/pool': 1.1.0 '@protobufjs/utf8': 1.1.0 '@types/long': 4.0.2 - '@types/node': 18.0.3 + '@types/node': 18.19.31 long: 4.0.0 dev: false @@ -11725,6 +13784,26 @@ packages: long: 5.2.1 dev: false + /protobufjs/7.3.2: + resolution: {integrity: sha512-RXyHaACeqXeqAKGLDl68rQKbmObRsTIn4TYVUUug1KfS47YWCo5MacGITEryugIgZqORCvJWEk4l449POg5Txg==} + engines: {node: '>=12.0.0'} + requiresBuild: true + dependencies: + '@protobufjs/aspromise': 1.1.2 + '@protobufjs/base64': 1.1.2 + '@protobufjs/codegen': 2.0.4 + '@protobufjs/eventemitter': 1.1.0 + '@protobufjs/fetch': 1.1.0 + '@protobufjs/float': 1.0.2 + '@protobufjs/inquire': 1.1.0 + '@protobufjs/path': 1.1.2 + '@protobufjs/pool': 1.1.0 + '@protobufjs/utf8': 1.1.0 + '@types/node': 22.5.4 + long: 5.2.1 + dev: false + optional: true + /proxy-addr/2.0.7: resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==} engines: {node: '>= 0.10'} @@ -12033,6 +14112,19 @@ packages: engines: {node: '>=0.12'} dev: false + /retry-request/7.0.2: + resolution: {integrity: sha512-dUOvLMJ0/JJYEn8NrpOaGNE7X3vpI5XlZS/u0ANjqtcZVKnIxP7IgCFwrKTxENw29emmwug53awKtaMm4i9g5w==} + engines: {node: '>=14'} + dependencies: + '@types/request': 2.48.12 + extend: 3.0.2 + teeny-request: 9.0.0 + transitivePeerDependencies: + - encoding + - supports-color + dev: false + optional: true + /retry/0.13.1: resolution: {integrity: sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==} engines: {node: '>= 4'} @@ -12049,6 +14141,13 @@ packages: dependencies: glob: 7.2.3 + /ripemd160/2.0.2: + resolution: {integrity: sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA==} + dependencies: + hash-base: 3.1.0 + inherits: 2.0.4 + dev: false + /run-async/2.4.1: resolution: {integrity: sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==} engines: {node: '>=0.12.0'} @@ -12114,8 +14213,12 @@ packages: resolution: {integrity: sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA==} dev: false - /semver/5.7.1: - resolution: {integrity: sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==} + /seedrandom/3.0.5: + resolution: {integrity: sha512-8OwmbklUNzwezjGInmZ+2clQmExQPvomqjL7LFqOYqtmuxRgQYqOD3mHaU+MvZn5FLUeVxVfQjwLZW/n/JFuqg==} + dev: false + + /semver/5.7.2: + resolution: {integrity: sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==} hasBin: true dev: false @@ -12186,6 +14289,14 @@ packages: /setprototypeof/1.2.0: resolution: {integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==} + /sha.js/2.4.11: + resolution: {integrity: sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==} + hasBin: true + dependencies: + inherits: 2.0.4 + safe-buffer: 5.2.1 + dev: false + /shebang-command/1.2.0: resolution: {integrity: sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==} engines: {node: '>=0.10.0'} @@ -12222,6 +14333,10 @@ packages: rechoir: 0.6.2 dev: true + /shlex/2.1.2: + resolution: {integrity: sha512-Nz6gtibMVgYeMEhUjp2KuwAgqaJA1K155dU/HuDaEJUGgnmYfVtVZah+uerVWdH8UGnyahhDCgABbYTbs254+w==} + dev: false + /should-equal/2.0.0: resolution: {integrity: sha512-ZP36TMrK9euEuWQYBig9W55WPC7uo37qzAEmbjHz4gfyuXrEUgF8cUvQVO+w+d3OMfPvSRQJ22lSm8MQJ43LTA==} dependencies: @@ -12279,6 +14394,11 @@ packages: /sisteransi/1.0.5: resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==} + /slash/2.0.0: + resolution: {integrity: sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==} + engines: {node: '>=6'} + dev: false + /slash/3.0.0: resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} engines: {node: '>=8'} @@ -12437,6 +14557,18 @@ packages: duplexer: 0.1.2 dev: false + /stream-events/1.0.5: + resolution: {integrity: sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==} + dependencies: + stubs: 3.0.0 + dev: false + optional: true + + /stream-shift/1.0.3: + resolution: {integrity: sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ==} + dev: false + optional: true + /string-length/4.0.2: resolution: {integrity: sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==} engines: {node: '>=10'} @@ -12593,6 +14725,11 @@ packages: resolution: {integrity: sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==} dev: false + /stubs/3.0.0: + resolution: {integrity: sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==} + dev: false + optional: true + /superagent/8.0.0: resolution: {integrity: sha512-iudipXEel+SzlP9y29UBWGDjB+Zzag+eeA1iLosaR2YHBRr1Q1kC29iBrF2zIVD9fqVbpZnXkN/VJmwFMVyNWg==} engines: {node: '>=6.4.0 <13 || >=14'} @@ -12607,7 +14744,7 @@ packages: mime: 2.6.0 qs: 6.10.3 readable-stream: 3.6.0 - semver: 7.3.7 + semver: 7.5.4 transitivePeerDependencies: - supports-color dev: false @@ -12674,7 +14811,7 @@ packages: hasBin: true dependencies: call-me-maybe: 1.0.2 - node-fetch: 2.6.11 + node-fetch: 2.7.0 node-fetch-h2: 2.3.0 node-readfiles: 0.2.0 oas-kit-common: 1.0.8 @@ -12683,7 +14820,7 @@ packages: oas-validator: 5.0.8 reftools: 1.1.9 yaml: 1.10.2 - yargs: 17.5.1 + yargs: 17.7.2 transitivePeerDependencies: - encoding dev: false @@ -12693,8 +14830,8 @@ packages: engines: {node: '>=0.10'} dev: false - /tar/6.1.15: - resolution: {integrity: sha512-/zKt9UyngnxIT/EAGYuxaMYgOIJiP81ab9ZfkILq4oNLPFX50qyYmu7jRj9qeXoxmJHjGlbH0+cm2uy1WCs10A==} + /tar/6.2.1: + resolution: {integrity: sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==} engines: {node: '>=10'} dependencies: chownr: 2.0.0 @@ -12711,6 +14848,21 @@ packages: engines: {node: '>=8.0.0'} dev: false + /teeny-request/9.0.0: + resolution: {integrity: sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g==} + engines: {node: '>=14'} + dependencies: + http-proxy-agent: 5.0.0 + https-proxy-agent: 5.0.1 + node-fetch: 2.7.0 + stream-events: 1.0.5 + uuid: 9.0.1 + transitivePeerDependencies: + - encoding + - supports-color + dev: false + optional: true + /terminal-link/2.1.1: resolution: {integrity: sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ==} engines: {node: '>=8'} @@ -12765,6 +14917,18 @@ packages: next-tick: 1.1.0 dev: true + /tiny-secp256k1/1.1.6: + resolution: {integrity: sha512-FmqJZGduTyvsr2cF3375fqGHUovSwDi/QytexX1Se4BPuPZpTE5Ftp5fg+EFSuEf3lhZqgCRjEG3ydUQ/aNiwA==} + engines: {node: '>=6.0.0'} + requiresBuild: true + dependencies: + bindings: 1.5.0 + bn.js: 4.12.0 + create-hmac: 1.1.7 + elliptic: 6.5.4 + nan: 2.16.0 + dev: false + /tmp/0.0.33: resolution: {integrity: sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==} engines: {node: '>=0.6.0'} @@ -12772,6 +14936,11 @@ packages: os-tmpdir: 1.0.2 dev: true + /tmp/0.2.3: + resolution: {integrity: sha512-nZD7m9iCPC5g0pYmcaxogYKggSfLsdxl8of3Q/oIbqCqLLIO9IAF0GWjX1z9NZRHPiXv8Wex4yDCaZsgEw0Y8w==} + engines: {node: '>=14.14'} + dev: false + /tmpl/1.0.5: resolution: {integrity: sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==} @@ -12908,6 +15077,67 @@ packages: yn: 3.1.1 dev: true + /ts-node/10.8.2_4ea55324100c26d4019c6e6bcc89fac6: + resolution: {integrity: sha512-LYdGnoGddf1D6v8REPtIH+5iq/gTDuZqv2/UJUU7tKjuEU8xVZorBM+buCGNjj+pGEud+sOoM4CX3/YzINpENA==} + hasBin: true + peerDependencies: + '@swc/core': '>=1.2.50' + '@swc/wasm': '>=1.2.50' + '@types/node': '*' + typescript: '>=2.7' + peerDependenciesMeta: + '@swc/core': + optional: true + '@swc/wasm': + optional: true + dependencies: + '@cspotcode/source-map-support': 0.8.1 + '@tsconfig/node10': 1.0.9 + '@tsconfig/node12': 1.0.11 + '@tsconfig/node14': 1.0.3 + '@tsconfig/node16': 1.0.3 + '@types/node': 18.19.31 + acorn: 8.7.1 + acorn-walk: 8.2.0 + arg: 4.1.3 + create-require: 1.1.1 + diff: 4.0.2 + make-error: 1.3.6 + typescript: 4.7.4 + v8-compile-cache-lib: 3.0.1 + yn: 3.1.1 + dev: true + + /ts-node/10.8.2_typescript@4.7.4: + resolution: {integrity: sha512-LYdGnoGddf1D6v8REPtIH+5iq/gTDuZqv2/UJUU7tKjuEU8xVZorBM+buCGNjj+pGEud+sOoM4CX3/YzINpENA==} + hasBin: true + peerDependencies: + '@swc/core': '>=1.2.50' + '@swc/wasm': '>=1.2.50' + '@types/node': '*' + typescript: '>=2.7' + peerDependenciesMeta: + '@swc/core': + optional: true + '@swc/wasm': + optional: true + dependencies: + '@cspotcode/source-map-support': 0.8.1 + '@tsconfig/node10': 1.0.9 + '@tsconfig/node12': 1.0.11 + '@tsconfig/node14': 1.0.3 + '@tsconfig/node16': 1.0.3 + acorn: 8.7.1 + acorn-walk: 8.2.0 + arg: 4.1.3 + create-require: 1.1.1 + diff: 4.0.2 + make-error: 1.3.6 + typescript: 4.7.4 + v8-compile-cache-lib: 3.0.1 + yn: 3.1.1 + dev: true + /tsconfig-paths/3.14.1: resolution: {integrity: sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ==} dependencies: @@ -12931,6 +15161,10 @@ packages: /tslib/2.5.0: resolution: {integrity: sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==} + /tslib/2.7.0: + resolution: {integrity: sha512-gLXCKdN1/j47AiHiOkJN69hJmcbGTHI0ImLmbYLHykhgeN0jVGola9yVjFgzCUklsZQMW55o+dW7IXv3RCXDzA==} + dev: false + /tsoa/5.1.1: resolution: {integrity: sha512-U6+5CyD3+u9Dtza0fBnv4+lgmbZEskYljzRpKf3edGCAGtMKD2rfjtDw9jUdTfWb1FEDvsnR3pRvsSGBXaOdsA==} engines: {node: '>=12.0.0', yarn: '>=1.9.4'} @@ -13002,6 +15236,10 @@ packages: for-each: 0.3.3 is-typed-array: 1.1.10 + /typeforce/1.18.0: + resolution: {integrity: sha512-7uc1O8h1M1g0rArakJdf0uLRSSgFcYexrVoKo+bzJd32gd4gDy2L/Z+8/FjPnU9ydY3pEnVPtr9FyscYY60K1g==} + dev: false + /typescript/4.7.4: resolution: {integrity: sha512-C0WQT0gezHuw6AdY1M2jxUO83Rjf0HP7Sk1DtXj6j1EwkQNZrHAg2XPWlq62oqEhYvONq5pkC2Y9oPljWToLmQ==} engines: {node: '>=4.2.0'} @@ -13037,6 +15275,13 @@ packages: engines: {node: '>=0.10.0'} dev: false + /undici-types/5.26.5: + resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} + + /undici-types/6.19.8: + resolution: {integrity: sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==} + dev: false + /unicode-canonical-property-names-ecmascript/2.0.0: resolution: {integrity: sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==} engines: {node: '>=4'} @@ -13164,10 +15409,20 @@ packages: which-typed-array: 1.1.9 dev: false + /utility-types/3.11.0: + resolution: {integrity: sha512-6Z7Ma2aVEWisaL6TvBCy7P8rm2LQoPv6dJ7ecIaIixHcwfbJ0x7mWdbcwlIM5IGQxPZSFYeqRCqlOOeKoJYMkw==} + engines: {node: '>= 4'} + dev: false + /utils-merge/1.0.1: resolution: {integrity: sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==} engines: {node: '>= 0.4.0'} + /uuid/10.0.0: + resolution: {integrity: sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==} + hasBin: true + dev: false + /uuid/3.4.0: resolution: {integrity: sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==} deprecated: Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details. @@ -13183,6 +15438,11 @@ packages: hasBin: true dev: false + /uuid/9.0.1: + resolution: {integrity: sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==} + hasBin: true + dev: false + /v8-compile-cache-lib/3.0.1: resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} @@ -13245,6 +15505,20 @@ packages: resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} dev: false + /websocket-driver/0.7.4: + resolution: {integrity: sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg==} + engines: {node: '>=0.8.0'} + dependencies: + http-parser-js: 0.5.8 + safe-buffer: 5.2.1 + websocket-extensions: 0.1.4 + dev: false + + /websocket-extensions/0.1.4: + resolution: {integrity: sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg==} + engines: {node: '>=0.8.0'} + dev: false + /whatwg-url/5.0.0: resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} dependencies: @@ -13297,6 +15571,12 @@ packages: dev: false optional: true + /wif/2.0.6: + resolution: {integrity: sha512-HIanZn1zmduSF+BQhkE+YXIbEiH0xPr1012QbFEGB0xsKqJii0/SqJjyn8dFv6y36kOznMgMB+LGcbZTJ1xACQ==} + dependencies: + bs58check: 2.1.2 + dev: false + /winston-transport/4.5.0: resolution: {integrity: sha512-YpZzcUzBedhlTAfJg6vJDlyEai/IFMIVcaEZZyl3UXIl4gmqRpU7AE89AHLkbzLUsv0NVmw7ts+iztqKxxPW1Q==} engines: {node: '>= 6.4.0'} @@ -13379,12 +15659,12 @@ packages: optional: true dev: false - /ws/8.8.1: - resolution: {integrity: sha512-bGy2JzvzkPowEJV++hF07hAD6niYSr0JzBNo/J29WsB57A2r7Wlc1UFcTR9IzrPvuNVO4B8LGqF8qcpsVOhJCA==} + /ws/8.16.0: + resolution: {integrity: sha512-HS0c//TP7Ina87TfiPUz1rQzMhHrl/SG2guqRcTOIUYD2q8uhUdNHZYJUaQ8aTGPzCh+c6oawMKW35nFl1dxyQ==} engines: {node: '>=10.0.0'} peerDependencies: bufferutil: ^4.0.1 - utf-8-validate: ^5.0.2 + utf-8-validate: '>=5.0.2' peerDependenciesMeta: bufferutil: optional: true @@ -13458,8 +15738,8 @@ packages: decamelize: 1.2.0 dev: false - /yargs-parser/21.0.1: - resolution: {integrity: sha512-9BK1jFpLzJROCI5TzwZL/TU4gqjK5xiHV/RfWLOahrjAko/e4DJkRDZQXfvqAsiZzzYhgAzbgz6lg48jcm4GLg==} + /yargs-parser/21.1.1: + resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} engines: {node: '>=12'} /yargs/12.0.5: @@ -13504,7 +15784,20 @@ packages: require-directory: 2.1.1 string-width: 4.2.3 y18n: 5.0.8 - yargs-parser: 21.0.1 + yargs-parser: 21.1.1 + + /yargs/17.7.2: + resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==} + engines: {node: '>=12'} + dependencies: + cliui: 8.0.1 + escalade: 3.1.1 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + string-width: 4.2.3 + y18n: 5.0.8 + yargs-parser: 21.1.1 + dev: false /yn/3.1.1: resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==} diff --git a/indexer/scripts/deploy-commit-to-env.sh b/indexer/scripts/deploy-commit-to-env.sh index 03395768797..ea4a7b1d8aa 100755 --- a/indexer/scripts/deploy-commit-to-env.sh +++ b/indexer/scripts/deploy-commit-to-env.sh @@ -23,7 +23,7 @@ case $env in "dev4") account=525975847385;; "dev5") account=917958511744;; "staging") account=677285201534;; - "public-testnet") account=013339450148;; # public testnet + "testnet") account=013339450148;; # public testnet "mainnet") account=332066407361;; # mainnet *) account=329916310755;; esac diff --git a/indexer/services/auxo/package.json b/indexer/services/auxo/package.json index eb2e4b08924..0974b293a94 100644 --- a/indexer/services/auxo/package.json +++ b/indexer/services/auxo/package.json @@ -21,7 +21,7 @@ "@aws-sdk/client-lambda": "^3.353.0", "@dydxprotocol-indexer/base": "workspace:^0.0.1", "dotenv-flow": "^3.2.0", - "kafkajs": "2.1.0", + "kafkajs": "^2.2.4", "lodash": "^4.17.21", "redis": "2.8.0" }, diff --git a/indexer/services/auxo/src/constants.ts b/indexer/services/auxo/src/constants.ts index 39f9efedb16..dc5cc4ce4ed 100644 --- a/indexer/services/auxo/src/constants.ts +++ b/indexer/services/auxo/src/constants.ts @@ -10,6 +10,13 @@ export const BAZOOKA_DB_MIGRATION_PAYLOAD: Uint8Array = new TextEncoder().encode }), ); +export const BAZOOKA_DB_MIGRATION_AND_CREATE_KAFKA_PAYLOAD: Uint8Array = new TextEncoder().encode( + JSON.stringify({ + migrate: true, + create_kafka_topics: true, + }), +); + export const ECS_SERVICE_NAMES: EcsServiceNames[] = [ EcsServiceNames.COMLINK, EcsServiceNames.ENDER, diff --git a/indexer/services/auxo/src/index.ts b/indexer/services/auxo/src/index.ts index 8bd286e778c..f350fa56f36 100644 --- a/indexer/services/auxo/src/index.ts +++ b/indexer/services/auxo/src/index.ts @@ -30,6 +30,7 @@ import _ from 'lodash'; import config from './config'; import { + BAZOOKA_DB_MIGRATION_AND_CREATE_KAFKA_PAYLOAD, BAZOOKA_DB_MIGRATION_PAYLOAD, BAZOOKA_LAMBDA_FUNCTION_NAME, ECS_SERVICE_NAMES, @@ -40,7 +41,7 @@ import { AuxoEventJson, EcsServiceNames, TaskDefinitionArnMap } from './types'; /** * Upgrades all services and run migrations * 1. Upgrade Bazooka - * 2. Run db migration in Bazooka + * 2. Run db migration in Bazooka, and update kafka topics * 3. Create new ECS Task Definition for ECS Services with new image * 4. Upgrade all ECS Services (comlink, ender, roundtable, socks, vulcan) */ @@ -66,8 +67,18 @@ export async function handler( // 1. Upgrade Bazooka await upgradeBazooka(lambda, ecr, event); - // 2. Run db migration in Bazooka - await runDbMigration(lambda); + // 2. Run db migration in Bazooka, + // boolean flag used to determine if new kafka topics should be created + await runDbAndKafkaMigration(event.addNewKafkaTopics, lambda); + + if (event.onlyRunDbMigrationAndCreateKafkaTopics) { + return { + statusCode: 200, + body: JSON.stringify({ + message: 'success', + }), + }; + } // 3. Create new ECS Task Definition for ECS Services with new image const taskDefinitionArnMap: TaskDefinitionArnMap = await createNewEcsTaskDefinitions( @@ -192,16 +203,20 @@ async function getImageDetail( } -async function runDbMigration( +async function runDbAndKafkaMigration( + createNewKafkaTopics: boolean, lambda: ECRClient, ): Promise { logger.info({ at: 'index#runDbMigration', message: 'Running db migration', }); + const payload = createNewKafkaTopics + ? BAZOOKA_DB_MIGRATION_AND_CREATE_KAFKA_PAYLOAD + : BAZOOKA_DB_MIGRATION_PAYLOAD; const response: InvokeCommandOutput = await lambda.send(new InvokeCommand({ FunctionName: BAZOOKA_LAMBDA_FUNCTION_NAME, - Payload: BAZOOKA_DB_MIGRATION_PAYLOAD, + Payload: payload, // RequestResponse means that the lambda is synchronously invoked InvocationType: 'RequestResponse', })); diff --git a/indexer/services/auxo/src/types.ts b/indexer/services/auxo/src/types.ts index 315f38ae0b6..73885cb9c4c 100644 --- a/indexer/services/auxo/src/types.ts +++ b/indexer/services/auxo/src/types.ts @@ -8,11 +8,13 @@ } */ export interface AuxoEventJson { - upgrade_tag: string; - prefix: string; - region: string; + upgrade_tag: string, + prefix: string, + region: string, // In our naming we often times use the appreviated region name - regionAbbrev: string; + regionAbbrev: string, + addNewKafkaTopics: boolean, + onlyRunDbMigrationAndCreateKafkaTopics: boolean, } // EcsServiceName to task definition arn mapping diff --git a/indexer/services/bazooka/__tests__/index.test.ts b/indexer/services/bazooka/__tests__/index.test.ts index f31b92a893a..89ad8bd0cb1 100644 --- a/indexer/services/bazooka/__tests__/index.test.ts +++ b/indexer/services/bazooka/__tests__/index.test.ts @@ -21,12 +21,23 @@ describe('index', () => { adminDeleteSpy .mockRejectedValueOnce(new Error('test')) .mockResolvedValueOnce(Promise); + fetchTopicMetadataSpy + .mockResolvedValue({ + topics: [ + { + topic: KafkaTopics.TO_ENDER, + partitions: [ + {}, + {}, + ], + }, + ], + }); await clearKafkaTopic(1, 5, 3, [KafkaTopics.TO_ENDER], - 2, KafkaTopics.TO_ENDER); expect(adminDeleteSpy).toHaveBeenCalledTimes(2); }); @@ -37,13 +48,24 @@ describe('index', () => { .mockRejectedValueOnce(new Error('test')) .mockRejectedValueOnce(new Error('test')) .mockRejectedValueOnce(new Error('test')); + fetchTopicMetadataSpy + .mockResolvedValue({ + topics: [ + { + topic: KafkaTopics.TO_ENDER, + partitions: [ + {}, + {}, + ], + }, + ], + }); await expect(async () => { await clearKafkaTopic(1, 5, 3, [KafkaTopics.TO_ENDER], - 2, KafkaTopics.TO_ENDER); }).rejects.toThrowError('test'); expect(adminDeleteSpy).toHaveBeenCalledTimes(3); @@ -86,15 +108,6 @@ describe('index', () => { clear_redis: false, force: false, } as APIGatewayEvent & BazookaEventJson], - [{ - migrate: false, - clear_db: false, - reset_db: false, - create_kafka_topics: true, - clear_kafka_topics: false, - clear_redis: false, - force: false, - } as APIGatewayEvent & BazookaEventJson], [{ migrate: false, clear_db: false, diff --git a/indexer/services/bazooka/package.json b/indexer/services/bazooka/package.json index 6b2e7e4539a..fa2828daede 100644 --- a/indexer/services/bazooka/package.json +++ b/indexer/services/bazooka/package.json @@ -23,7 +23,7 @@ "@dydxprotocol-indexer/v4-proto-parser": "workspace:^0.0.1", "@dydxprotocol-indexer/v4-protos": "workspace:^0.0.1", "dotenv-flow": "^3.2.0", - "kafkajs": "2.1.0", + "kafkajs": "^2.2.4", "lodash": "^4.17.21", "redis": "2.8.0", "long": "^5.2.1", diff --git a/indexer/services/bazooka/src/index.ts b/indexer/services/bazooka/src/index.ts index 5d78cc85f8e..efd30af255f 100644 --- a/indexer/services/bazooka/src/index.ts +++ b/indexer/services/bazooka/src/index.ts @@ -18,46 +18,48 @@ const KAFKA_TOPICS: KafkaTopics[] = [ KafkaTopics.TO_WEBSOCKETS_TRADES, KafkaTopics.TO_WEBSOCKETS_MARKETS, KafkaTopics.TO_WEBSOCKETS_CANDLES, + KafkaTopics.TO_WEBSOCKETS_BLOCK_HEIGHT, ]; const DEFAULT_NUM_REPLICAS: number = 3; const KAFKA_TOPICS_TO_PARTITIONS: { [key in KafkaTopics]: number } = { [KafkaTopics.TO_ENDER]: 1, - [KafkaTopics.TO_VULCAN]: 60, + [KafkaTopics.TO_VULCAN]: 210, [KafkaTopics.TO_WEBSOCKETS_ORDERBOOKS]: 1, - [KafkaTopics.TO_WEBSOCKETS_SUBACCOUNTS]: 1, + [KafkaTopics.TO_WEBSOCKETS_SUBACCOUNTS]: 3, [KafkaTopics.TO_WEBSOCKETS_TRADES]: 1, [KafkaTopics.TO_WEBSOCKETS_MARKETS]: 1, [KafkaTopics.TO_WEBSOCKETS_CANDLES]: 1, + [KafkaTopics.TO_WEBSOCKETS_BLOCK_HEIGHT]: 1, }; export interface BazookaEventJson { // Run knex migrations - migrate: boolean; + migrate: boolean, // Clearing data inside the database, but not deleting the tables and schemas - clear_db: boolean; + clear_db: boolean, // Reset the database and all migrations - reset_db: boolean; + reset_db: boolean, // Create all kafka topics with replication and parition counts - create_kafka_topics: boolean; + create_kafka_topics: boolean, // Clearing data inside all topics, not removing the Kafka Topics - clear_kafka_topics: boolean; + clear_kafka_topics: boolean, // Clearing all data in redis - clear_redis: boolean; + clear_redis: boolean, // Force flag that is required to perform any breaking actions in testnet/mainnet // A breaking action is any action in bazooka other that db migration - force: boolean; + force: boolean, // Send stateful orders to Vulcan. This is done during Indexer fast sync to // uncross the orderbook. - send_stateful_orders_to_vulcan: boolean; + send_stateful_orders_to_vulcan: boolean, } // eslint-disable-next-line @typescript-eslint/require-await @@ -74,8 +76,7 @@ export async function handler( if (config.PREVENT_BREAKING_CHANGES_WITHOUT_FORCE && event.force !== true) { if (event.clear_db === true || event.reset_db === true || - event.create_kafka_topics === true || event.clear_kafka_topics === true || - event.clear_redis === true) { + event.clear_kafka_topics === true || event.clear_redis === true) { logger.error({ at: 'index#handler', message: 'Cannot run bazooka without force flag set to "true" because' + @@ -181,6 +182,7 @@ async function maybeClearAndCreateKafkaTopics( if (event.create_kafka_topics) { await createKafkaTopics(existingKafkaTopics); + await partitionKafkaTopics(); } if (event.clear_kafka_topics) { @@ -196,7 +198,7 @@ async function createKafkaTopics( _.forEach(KAFKA_TOPICS, (kafkaTopic: KafkaTopics) => { if (_.includes(existingKafkaTopics, kafkaTopic)) { logger.info({ - at: 'index#clearKafkaTopics', + at: 'index#createKafkaTopics', message: `Cannot create kafka topic that does exist: ${kafkaTopic}`, }); return; @@ -232,17 +234,47 @@ async function createKafkaTopics( }); } +async function partitionKafkaTopics(): Promise { + for (const kafkaTopic of KAFKA_TOPICS) { + const topicMetadata: { topics: Array } = await admin.fetchTopicMetadata({ + topics: [kafkaTopic], + }); + if (topicMetadata.topics.length === 1) { + if (topicMetadata.topics[0].partitions.length !== KAFKA_TOPICS_TO_PARTITIONS[kafkaTopic]) { + logger.info({ + at: 'index#partitionKafkaTopics', + message: `Setting topic ${kafkaTopic} to ${KAFKA_TOPICS_TO_PARTITIONS[kafkaTopic]} partitions`, + }); + await admin.createPartitions({ + validateOnly: false, + topicPartitions: [{ + topic: kafkaTopic, + count: KAFKA_TOPICS_TO_PARTITIONS[kafkaTopic], + }], + }); + logger.info({ + at: 'index#partitionKafkaTopics', + message: `Successfully set topic ${kafkaTopic} to ${KAFKA_TOPICS_TO_PARTITIONS[kafkaTopic]} partitions`, + }); + } + } + } +} + async function clearKafkaTopics( existingKafkaTopics: string[], ): Promise { - await Promise.all( - _.map(KAFKA_TOPICS_TO_PARTITIONS, - clearKafkaTopic.bind(null, - 1, - config.CLEAR_KAFKA_TOPIC_RETRY_MS, - config.CLEAR_KAFKA_TOPIC_MAX_RETRIES, - existingKafkaTopics)), - ); + // Concurrent calls to clear all topics caused the failure: + // TypeError: Cannot destructure property 'partitions' of 'high.pop(...)' as it is undefined. + for (const topic of KAFKA_TOPICS) { + await clearKafkaTopic( + 1, + config.CLEAR_KAFKA_TOPIC_RETRY_MS, + config.CLEAR_KAFKA_TOPIC_MAX_RETRIES, + existingKafkaTopics, + topic, + ); + } } export async function clearKafkaTopic( @@ -250,7 +282,6 @@ export async function clearKafkaTopic( retryMs: number = config.CLEAR_KAFKA_TOPIC_RETRY_MS, maxRetries: number = config.CLEAR_KAFKA_TOPIC_MAX_RETRIES, existingKafkaTopics: string[], - numPartitions: number, kafkaTopic: KafkaTopics, ): Promise { const kafkaTopicExists: boolean = _.includes(existingKafkaTopics, kafkaTopic); @@ -263,6 +294,20 @@ export async function clearKafkaTopic( return; } + const topicMetadata: { topics: Array } = await admin.fetchTopicMetadata({ + topics: [kafkaTopic], + }); + + if (topicMetadata.topics.length !== 1) { + logger.info({ + at: 'index#clearKafkaTopics', + message: `Cannot clear kafka topic that does not exist: ${kafkaTopic}`, + }); + return; + } + + const numPartitions = topicMetadata.topics[0].partitions.length; + logger.info({ at: 'index#clearKafkaTopics', message: `Clearing kafka topic: ${kafkaTopic}`, @@ -280,10 +325,6 @@ export async function clearKafkaTopic( ), }); } catch (error) { - const topicMetadata: { topics: Array } = await admin.fetchTopicMetadata({ - topics: [kafkaTopic], - }); - logger.error({ at: 'index#clearKafkaTopics', message: 'Failed to delete topic records', @@ -321,7 +362,6 @@ export async function clearKafkaTopic( retryMs, maxRetries, existingKafkaTopics, - numPartitions, kafkaTopic, ); } diff --git a/indexer/services/bazooka/src/vulcan-helpers.ts b/indexer/services/bazooka/src/vulcan-helpers.ts index eacbaedb37b..e76ed7cbb18 100644 --- a/indexer/services/bazooka/src/vulcan-helpers.ts +++ b/indexer/services/bazooka/src/vulcan-helpers.ts @@ -22,6 +22,7 @@ import { } from '@dydxprotocol-indexer/v4-protos'; import { Long } from '@dydxprotocol-indexer/v4-protos/build/codegen/helpers'; import Big from 'big.js'; +import { IHeaders } from 'kafkajs'; import _ from 'lodash'; import config from './config'; @@ -30,6 +31,7 @@ import { ZERO } from './constants'; interface VulcanMessage { key: Buffer, value: OffChainUpdateV1, + headers?: IHeaders, } type IndexerOrderIdMap = { [orderUuid: string]: IndexerOrderId }; @@ -129,6 +131,7 @@ export async function sendStatefulOrderMessages() { return { key: message.key, value: Buffer.from(Uint8Array.from(OffChainUpdateV1.encode(message.value).finish())), + headers: message.headers, }; }); diff --git a/indexer/services/comlink/.env.test b/indexer/services/comlink/.env.test index 167901ba36d..30ae74517a7 100644 --- a/indexer/services/comlink/.env.test +++ b/indexer/services/comlink/.env.test @@ -7,3 +7,6 @@ DB_PORT=5436 RATE_LIMIT_ENABLED=false INDEXER_LEVEL_GEOBLOCKING_ENABLED=false EXPOSE_SET_COMPLIANCE_ENDPOINT=true +FIREBASE_PROJECT_ID=projectID +FIREBASE_PRIVATE_KEY='-----BEGIN RSA PRIVATE KEY----------END RSA PRIVATE KEY-----' +FIREBASE_CLIENT_EMAIL=clientEmail@test.com diff --git a/indexer/services/comlink/__tests__/controllers/api/v4/addresses-controller.test.ts b/indexer/services/comlink/__tests__/controllers/api/v4/addresses-controller.test.ts index cd5b9894f42..11837e63a28 100644 --- a/indexer/services/comlink/__tests__/controllers/api/v4/addresses-controller.test.ts +++ b/indexer/services/comlink/__tests__/controllers/api/v4/addresses-controller.test.ts @@ -10,11 +10,38 @@ import { BlockTable, liquidityTierRefresher, SubaccountTable, + FirebaseNotificationTokenTable, } from '@dydxprotocol-indexer/postgres'; import { RequestMethod } from '../../../../src/types'; import request from 'supertest'; import { getFixedRepresentation, sendRequest } from '../../../helpers/helpers'; import { stats } from '@dydxprotocol-indexer/base'; +import config from '../../../../src/config'; +import * as complianceUtils from '../../../../src/helpers/compliance/compliance-utils'; +import { Secp256k1 } from '@cosmjs/crypto'; +import { toBech32 } from '@cosmjs/encoding'; +import { DateTime } from 'luxon'; +import { verifyADR36Amino } from '@keplr-wallet/cosmos'; +import { defaultAddress3 } from '@dydxprotocol-indexer/postgres/build/__tests__/helpers/constants'; + +jest.mock('@cosmjs/crypto', () => ({ + ...jest.requireActual('@cosmjs/crypto'), + Secp256k1: { + verifySignature: jest.fn(), + }, + ExtendedSecp256k1Signature: { + fromFixedLength: jest.fn(), + }, +})); + +jest.mock('@cosmjs/encoding', () => ({ + toBech32: jest.fn(), +})); + +jest.mock('@keplr-wallet/cosmos', () => ({ + ...jest.requireActual('@keplr-wallet/cosmos'), + verifyADR36Amino: jest.fn(), +})); describe('addresses-controller#V4', () => { const latestHeight: string = '3'; @@ -42,6 +69,7 @@ describe('addresses-controller#V4', () => { afterEach(async () => { await dbHelpers.clearData(); + jest.clearAllMocks(); }); const invalidAddress: string = 'invalidAddress'; @@ -81,6 +109,8 @@ describe('addresses-controller#V4', () => { equity: getFixedRepresentation(159500), freeCollateral: getFixedRepresentation(152000), marginEnabled: true, + updatedAtHeight: testConstants.defaultSubaccount.updatedAtHeight, + latestProcessedBlockHeight: latestHeight, openPerpetualPositions: { [testConstants.defaultPerpetualMarket.ticker]: { market: testConstants.defaultPerpetualMarket.ticker, @@ -105,6 +135,7 @@ describe('addresses-controller#V4', () => { createdAtHeight: testConstants.defaultPerpetualPosition.createdAtHeight, exitPrice: null, closedAt: null, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, }, }, assetPositions: { @@ -113,12 +144,14 @@ describe('addresses-controller#V4', () => { size: '9500', side: PositionSide.LONG, assetId: testConstants.defaultAssetPosition.assetId, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, }, [testConstants.defaultAsset2.symbol]: { symbol: testConstants.defaultAsset2.symbol, size: testConstants.defaultAssetPosition2.size, side: PositionSide.SHORT, assetId: testConstants.defaultAssetPosition2.assetId, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, }, }, }, @@ -155,6 +188,8 @@ describe('addresses-controller#V4', () => { equity: getFixedRepresentation(10000), freeCollateral: getFixedRepresentation(10000), marginEnabled: true, + updatedAtHeight: testConstants.defaultSubaccount.updatedAtHeight, + latestProcessedBlockHeight: latestHeight, openPerpetualPositions: {}, assetPositions: { [testConstants.defaultAsset.symbol]: { @@ -162,6 +197,7 @@ describe('addresses-controller#V4', () => { size: testConstants.defaultAssetPosition.size, side: PositionSide.LONG, assetId: testConstants.defaultAssetPosition.assetId, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, }, }, }, @@ -176,7 +212,7 @@ describe('addresses-controller#V4', () => { it('Get / with non-existent address and subaccount number returns 404', async () => { const response: request.Response = await sendRequest({ type: RequestMethod.GET, - path: `/v4/addresses/${invalidAddress}/subaccountNumber/` + + path: `/v4/addresses/${defaultAddress3}/subaccountNumber/` + `${testConstants.defaultSubaccount.subaccountNumber}`, expectedStatus: 404, }); @@ -184,7 +220,7 @@ describe('addresses-controller#V4', () => { expect(response.body).toEqual({ errors: [ { - msg: `No subaccount found with address ${invalidAddress} and ` + + msg: `No subaccount found with address ${defaultAddress3} and ` + `subaccountNumber ${testConstants.defaultSubaccount.subaccountNumber}`, }, ], @@ -238,6 +274,8 @@ describe('addresses-controller#V4', () => { equity: getFixedRepresentation(159500), freeCollateral: getFixedRepresentation(152000), marginEnabled: true, + updatedAtHeight: testConstants.defaultSubaccount.updatedAtHeight, + latestProcessedBlockHeight: latestHeight, openPerpetualPositions: { [testConstants.defaultPerpetualMarket.ticker]: { market: testConstants.defaultPerpetualMarket.ticker, @@ -262,6 +300,7 @@ describe('addresses-controller#V4', () => { createdAtHeight: testConstants.defaultPerpetualPosition.createdAtHeight, exitPrice: null, closedAt: null, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, }, }, assetPositions: { @@ -270,12 +309,14 @@ describe('addresses-controller#V4', () => { size: '9500', side: PositionSide.LONG, assetId: testConstants.defaultAssetPosition.assetId, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, }, [testConstants.defaultAsset2.symbol]: { symbol: testConstants.defaultAsset2.symbol, size: testConstants.defaultAssetPosition2.size, side: PositionSide.SHORT, assetId: testConstants.defaultAssetPosition2.assetId, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, }, }, }, @@ -285,6 +326,30 @@ describe('addresses-controller#V4', () => { equity: getFixedRepresentation(0), freeCollateral: getFixedRepresentation(0), marginEnabled: true, + updatedAtHeight: testConstants.defaultSubaccount2.updatedAtHeight, + latestProcessedBlockHeight: latestHeight, + openPerpetualPositions: {}, + assetPositions: {}, + }, + { + address: testConstants.defaultAddress, + subaccountNumber: testConstants.isolatedSubaccount.subaccountNumber, + equity: getFixedRepresentation(0), + freeCollateral: getFixedRepresentation(0), + marginEnabled: true, + updatedAtHeight: testConstants.isolatedSubaccount.updatedAtHeight, + latestProcessedBlockHeight: latestHeight, + openPerpetualPositions: {}, + assetPositions: {}, + }, + { + address: testConstants.defaultAddress, + subaccountNumber: testConstants.isolatedSubaccount2.subaccountNumber, + equity: getFixedRepresentation(0), + freeCollateral: getFixedRepresentation(0), + marginEnabled: true, + updatedAtHeight: testConstants.isolatedSubaccount2.updatedAtHeight, + latestProcessedBlockHeight: latestHeight, openPerpetualPositions: {}, assetPositions: {}, }, @@ -322,6 +387,8 @@ describe('addresses-controller#V4', () => { equity: getFixedRepresentation(0), freeCollateral: getFixedRepresentation(0), marginEnabled: true, + updatedAtHeight: testConstants.defaultSubaccount.updatedAtHeight, + latestProcessedBlockHeight: latestHeight, assetPositions: {}, openPerpetualPositions: {}, }, @@ -345,15 +412,471 @@ describe('addresses-controller#V4', () => { expect(response.body).toEqual({ errors: [ { - msg: `No subaccounts found for address ${invalidAddress}`, + msg: 'No subaccounts found for address invalidAddress', }, ], }); - expect(stats.increment).toHaveBeenCalledWith('comlink.addresses-controller.response_status_code.404', 1, + }); + }); + + describe('/addresses/:address/parentSubaccountNumber/:parentSubaccountNumber', () => { + afterEach(async () => { + await dbHelpers.clearData(); + }); + + it('Get /:address/parentSubaccountNumber/ gets all subaccounts for the provided parent', async () => { + await PerpetualPositionTable.create( + testConstants.defaultPerpetualPosition, + ); + + await Promise.all([ + AssetPositionTable.upsert(testConstants.defaultAssetPosition), + AssetPositionTable.upsert({ + ...testConstants.defaultAssetPosition2, + subaccountId: testConstants.defaultSubaccountId, + }), + AssetPositionTable.upsert(testConstants.isolatedSubaccountAssetPosition), + FundingIndexUpdatesTable.create({ + ...testConstants.defaultFundingIndexUpdate, + fundingIndex: initialFundingIndex, + effectiveAtHeight: testConstants.createdHeight, + }), + FundingIndexUpdatesTable.create({ + ...testConstants.defaultFundingIndexUpdate, + eventId: testConstants.defaultTendermintEventId2, + effectiveAtHeight: latestHeight, + }), + ]); + + const parentSubaccountNumber: number = 0; + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/addresses/${testConstants.defaultAddress}/parentSubaccountNumber/${parentSubaccountNumber}`, + }); + + expect(response.body).toEqual({ + subaccount: { + address: testConstants.defaultAddress, + parentSubaccountNumber, + equity: getFixedRepresentation(164500), + freeCollateral: getFixedRepresentation(157000), + childSubaccounts: [ + { + address: testConstants.defaultAddress, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + equity: getFixedRepresentation(159500), + freeCollateral: getFixedRepresentation(152000), + marginEnabled: true, + updatedAtHeight: testConstants.defaultSubaccount.updatedAtHeight, + latestProcessedBlockHeight: latestHeight, + openPerpetualPositions: { + [testConstants.defaultPerpetualMarket.ticker]: { + market: testConstants.defaultPerpetualMarket.ticker, + size: testConstants.defaultPerpetualPosition.size, + side: testConstants.defaultPerpetualPosition.side, + entryPrice: getFixedRepresentation( + testConstants.defaultPerpetualPosition.entryPrice!, + ), + maxSize: testConstants.defaultPerpetualPosition.maxSize, + // 200000 + 10*(10000-10050)=199500 + netFunding: getFixedRepresentation('199500'), + // sumClose=0, so realized Pnl is the same as the net funding of the position. + // Unsettled funding is funding payments that already "happened" but not reflected + // in the subaccount's balance yet, so it's considered a part of realizedPnl. + realizedPnl: getFixedRepresentation('199500'), + // size * (index-entry) = 10*(15000-20000) = -50000 + unrealizedPnl: getFixedRepresentation(-50000), + status: testConstants.defaultPerpetualPosition.status, + sumOpen: testConstants.defaultPerpetualPosition.sumOpen, + sumClose: testConstants.defaultPerpetualPosition.sumClose, + createdAt: testConstants.defaultPerpetualPosition.createdAt, + createdAtHeight: testConstants.defaultPerpetualPosition.createdAtHeight, + exitPrice: null, + closedAt: null, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }, + }, + assetPositions: { + [testConstants.defaultAsset.symbol]: { + symbol: testConstants.defaultAsset.symbol, + size: '9500', + side: PositionSide.LONG, + assetId: testConstants.defaultAssetPosition.assetId, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }, + [testConstants.defaultAsset2.symbol]: { + symbol: testConstants.defaultAsset2.symbol, + size: testConstants.defaultAssetPosition2.size, + side: PositionSide.SHORT, + assetId: testConstants.defaultAssetPosition2.assetId, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }, + }, + }, + { + address: testConstants.defaultAddress, + subaccountNumber: testConstants.isolatedSubaccount.subaccountNumber, + equity: getFixedRepresentation(5000), + freeCollateral: getFixedRepresentation(5000), + marginEnabled: true, + updatedAtHeight: testConstants.isolatedSubaccount.updatedAtHeight, + latestProcessedBlockHeight: latestHeight, + openPerpetualPositions: {}, + assetPositions: { + [testConstants.defaultAsset.symbol]: { + symbol: testConstants.defaultAsset.symbol, + size: testConstants.isolatedSubaccountAssetPosition.size, + side: PositionSide.LONG, + assetId: testConstants.isolatedSubaccountAssetPosition.assetId, + subaccountNumber: testConstants.isolatedSubaccount.subaccountNumber, + }, + }, + }, + { + address: testConstants.defaultAddress, + subaccountNumber: testConstants.isolatedSubaccount2.subaccountNumber, + equity: getFixedRepresentation(0), + freeCollateral: getFixedRepresentation(0), + marginEnabled: true, + updatedAtHeight: testConstants.isolatedSubaccount2.updatedAtHeight, + latestProcessedBlockHeight: latestHeight, + openPerpetualPositions: {}, + assetPositions: {}, + }, + ], + }, + }); + expect(stats.increment).toHaveBeenCalledWith('comlink.addresses-controller.response_status_code.200', 1, { - path: '/:address', + path: '/:address/parentSubaccountNumber/:parentSubaccountNumber', method: 'GET', }); }); }); + + it('Get /:address/parentSubaccountNumber/ with non-existent address returns 404', async () => { + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/addresses/${defaultAddress3}/parentSubaccountNumber/` + + `${testConstants.defaultSubaccount.subaccountNumber}`, + expectedStatus: 404, + }); + + expect(response.body).toEqual({ + errors: [ + { + msg: `No subaccounts found for address ${defaultAddress3} and ` + + `parentSubaccountNumber ${testConstants.defaultSubaccount.subaccountNumber}`, + }, + ], + }); + expect(stats.increment).toHaveBeenCalledWith('comlink.addresses-controller.response_status_code.404', 1, + { + path: '/:address/parentSubaccountNumber/:parentSubaccountNumber', + method: 'GET', + }); + }); + + it('Get /:address/parentSubaccountNumber/ with invalid parentSubaccount number returns 400', async () => { + const parentSubaccountNumber: number = 128; + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/addresses/${testConstants.defaultAddress}/parentSubaccountNumber/${parentSubaccountNumber}`, + expectedStatus: 400, + }); + + expect(response.body).toEqual({ + errors: [ + { + location: 'params', + msg: 'parentSubaccountNumber must be a non-negative integer less than 128', + param: 'parentSubaccountNumber', + value: '128', + }, + ], + }); + }); + + describe('/:address/testNotification', () => { + it('Post /:address/testNotification throws error in production', async () => { + // Mock the config to simulate production environment + const originalNodeEnv = config.NODE_ENV; + config.NODE_ENV = 'production'; + + const response: request.Response = await sendRequest({ + type: RequestMethod.POST, + path: `/v4/addresses/${testConstants.defaultAddress}/testNotification`, + expectedStatus: 404, + }); + + expect(response.statusCode).toEqual(404); + // Restore the original NODE_ENV + config.NODE_ENV = originalNodeEnv; + }); + }); + + describe('/:address/registerToken', () => { + const validToken = 'validToken'; + const validLanguage = 'en'; + const validTimestamp = 1726076825; + const validMessage = 'Valid message'; + const validSignedMessage = 'Valid signed message'; + const validPubKey = 'Valid public key'; + + const verifySignatureMock = Secp256k1.verifySignature as jest.Mock; + const verifyADR36AminoMock = verifyADR36Amino as jest.Mock; + const toBech32Mock = toBech32 as jest.Mock; + let statsSpy = jest.spyOn(stats, 'increment'); + + beforeEach(() => { + verifySignatureMock.mockResolvedValue(true); + toBech32Mock.mockReturnValue(testConstants.defaultAddress); + jest.spyOn(DateTime, 'now').mockReturnValue(DateTime.fromSeconds(validTimestamp)); // Mock current time + statsSpy = jest.spyOn(stats, 'increment'); + }); + + afterEach(() => { + jest.clearAllMocks(); + jest.restoreAllMocks(); + }); + + it('Post /:address/registerToken with valid params returns 200', async () => { + const response: request.Response = await sendRequest({ + type: RequestMethod.POST, + path: `/v4/addresses/${testConstants.defaultAddress}/registerToken`, + body: { + token: validToken, + language: validLanguage, + timestamp: validTimestamp, + message: validMessage, + signedMessage: validSignedMessage, + pubKey: validPubKey, + walletIsKeplr: false, + }, + expectedStatus: 200, + }); + + expect(response.body).toEqual({}); + expect(statsSpy).toHaveBeenCalledWith('comlink.addresses-controller.response_status_code.200', 1, { + path: '/:address/registerToken', + method: 'POST', + }); + }); + + it('should register a new token', async () => { + // Register a new token + const newToken = 'newToken'; + await sendRequest({ + type: RequestMethod.POST, + path: `/v4/addresses/${testConstants.defaultAddress}/registerToken`, + body: { + token: newToken, + language: validLanguage, + timestamp: validTimestamp, + message: validMessage, + signedMessage: validSignedMessage, + pubKey: validPubKey, + walletIsKeplr: false, + }, + expectedStatus: 200, + }); + + // Check that old tokens are deleted and new token is registered + const remainingTokens = await FirebaseNotificationTokenTable.findAll({}, []); + expect(remainingTokens.map((t) => t.token)).toContain(newToken); + }); + + it('Post /:address/registerToken with valid params calls TokenTable registerToken', async () => { + const registerTokenSpy = jest.spyOn(FirebaseNotificationTokenTable, 'registerToken'); + const token = 'validToken'; + const language = 'en'; + await sendRequest({ + type: RequestMethod.POST, + path: `/v4/addresses/${testConstants.defaultAddress}/registerToken`, + body: { + token: validToken, + language: validLanguage, + timestamp: validTimestamp, + message: validMessage, + signedMessage: validSignedMessage, + pubKey: validPubKey, + walletIsKeplr: false, + }, + expectedStatus: 200, + }); + expect(registerTokenSpy).toHaveBeenCalledWith( + token, testConstants.defaultAddress, language, + ); + expect(statsSpy).toHaveBeenCalledWith('comlink.addresses-controller.response_status_code.200', 1, { + path: '/:address/registerToken', + method: 'POST', + }); + }); + + it('Post /:address/registerToken with invalid address returns 400', async () => { + toBech32Mock.mockReturnValue('InvalidAddress'); + const response: request.Response = await sendRequest({ + type: RequestMethod.POST, + path: `/v4/addresses/${invalidAddress}/registerToken`, + body: { + token: validToken, + language: validLanguage, + timestamp: validTimestamp, + message: validMessage, + signedMessage: validSignedMessage, + pubKey: validPubKey, + walletIsKeplr: false, + }, + expectedStatus: 400, + }); + + expect(response.body).toEqual({ + errors: [ + { + msg: 'Address invalidAddress is not a valid dYdX V4 address', + }, + ], + }); + }); + + it.each([ + ['validToken', '', 'Invalid language code', 'language'], + ['validToken', 'qq', 'Invalid language code', 'language'], + ])('Post /:address/registerToken with bad language params returns 400', async (token, language, errorMsg, errorParam) => { + const response: request.Response = await sendRequest({ + type: RequestMethod.POST, + path: `/v4/addresses/${testConstants.defaultAddress}/registerToken`, + body: { + token, + language, + timestamp: validTimestamp, + message: validMessage, + signedMessage: validSignedMessage, + pubKey: validPubKey, + walletIsKeplr: false, + }, + expectedStatus: 400, + }); + + expect(response.body).toEqual({ + errors: [ + { + location: 'body', + msg: errorMsg, + param: errorParam, + value: language, + }, + ], + }); + }); + + it.each([ + ['', 'en', 'Token cannot be empty', 'token'], + ])('Post /:address/registerToken with bad token params returns 400', async (token, language, errorMsg, errorParam) => { + const response: request.Response = await sendRequest({ + type: RequestMethod.POST, + path: `/v4/addresses/${testConstants.defaultAddress}/registerToken`, + body: { + token: '', + language, + timestamp: validTimestamp, + message: validMessage, + signedMessage: validSignedMessage, + pubKey: validPubKey, + walletIsKeplr: false, + }, + expectedStatus: 400, + }); + + expect(response.body).toEqual({ + errors: [ + { + location: 'body', + msg: errorMsg, + param: errorParam, + value: token, + }, + ], + }); + }); + + it('Post /:address/registerToken with invalid signature returns 400', async () => { + verifySignatureMock.mockResolvedValue(false); + + const response: request.Response = await sendRequest({ + type: RequestMethod.POST, + path: `/v4/addresses/${testConstants.defaultAddress}/registerToken`, + body: { + token: validToken, + language: validLanguage, + timestamp: validTimestamp, + message: validMessage, + signedMessage: 'Invalid signature', + pubKey: validPubKey, + walletIsKeplr: false, + }, + expectedStatus: 400, + }); + + expect(response.body).toEqual({ + errors: [{ msg: 'Signature verification failed' }], + }); + }); + + it('Post /:address/registerToken with Keplr wallet calls validateSignatureKeplr', async () => { + verifyADR36AminoMock.mockReturnValue(true); + const validateSignatureKeplr = jest.spyOn(complianceUtils, 'validateSignatureKeplr'); + await sendRequest({ + type: RequestMethod.POST, + path: `/v4/addresses/${testConstants.defaultAddress}/registerToken`, + body: { + token: validToken, + language: validLanguage, + timestamp: validTimestamp, + message: validMessage, + signedMessage: validSignedMessage, + pubKey: validPubKey, + walletIsKeplr: true, + }, + expectedStatus: 200, + }); + + expect(validateSignatureKeplr).toHaveBeenCalledWith( + expect.anything(), + testConstants.defaultAddress, + validMessage, + validSignedMessage, + validPubKey, + ); + }); + + it('Post /:address/registerToken with non-Keplr wallet calls validateSignature', async () => { + const validateSignature = jest.spyOn(complianceUtils, 'validateSignature'); + await sendRequest({ + type: RequestMethod.POST, + path: `/v4/addresses/${testConstants.defaultAddress}/registerToken`, + body: { + token: validToken, + language: validLanguage, + timestamp: validTimestamp, + message: validMessage, + signedMessage: validSignedMessage, + pubKey: validPubKey, + walletIsKeplr: false, + }, + expectedStatus: 200, + }); + + expect(validateSignature).toHaveBeenCalledWith( + expect.anything(), + complianceUtils.AccountVerificationRequiredAction.REGISTER_TOKEN, + testConstants.defaultAddress, + validTimestamp, + validMessage, + validSignedMessage, + validPubKey, + '', + ); + }); + }); }); diff --git a/indexer/services/comlink/__tests__/controllers/api/v4/affiliates-controller.test.ts b/indexer/services/comlink/__tests__/controllers/api/v4/affiliates-controller.test.ts new file mode 100644 index 00000000000..ee77111d30c --- /dev/null +++ b/indexer/services/comlink/__tests__/controllers/api/v4/affiliates-controller.test.ts @@ -0,0 +1,385 @@ +import { + dbHelpers, + testConstants, + testMocks, + SubaccountTable, + SubaccountUsernamesTable, + WalletTable, + AffiliateReferredUsersTable, + AffiliateInfoTable, + AffiliateInfoCreateObject, +} from '@dydxprotocol-indexer/postgres'; +import { + AffiliateSnapshotRequest, + AffiliateSnapshotResponse, + RequestMethod, + AffiliateSnapshotResponseObject, +} from '../../../../src/types'; +import request from 'supertest'; +import { sendRequest } from '../../../helpers/helpers'; + +describe('affiliates-controller#V4', () => { + beforeAll(async () => { + await dbHelpers.migrate(); + }); + + afterAll(async () => { + await dbHelpers.teardown(); + }); + + describe('GET /metadata', () => { + beforeEach(async () => { + await testMocks.seedData(); + await SubaccountUsernamesTable.create(testConstants.defaultSubaccountUsername); + }); + + afterEach(async () => { + await dbHelpers.clearData(); + }); + + it('should return referral code for address with username', async () => { + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/affiliates/metadata?address=${testConstants.defaultWallet.address}`, + expectedStatus: 200, // helper performs expect on status + }); + + expect(response.body).toEqual({ + // username is the referral code + referralCode: testConstants.defaultSubaccountUsername.username, + isVolumeEligible: false, + isAffiliate: false, + }); + }); + + it('should fail if address does not exist', async () => { + const nonExistentAddress = 'adgsakhasgt'; + await sendRequest({ + type: RequestMethod.GET, + path: `/v4/affiliates/metadata?address=${nonExistentAddress}`, + expectedStatus: 404, // helper performs expect on status + }); + }); + + it('should classify not volume eligible', async () => { + await WalletTable.update( + { + address: testConstants.defaultWallet.address, + totalVolume: '0', + totalTradingRewards: '0', + }, + ); + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/affiliates/metadata?address=${testConstants.defaultWallet.address}`, + expectedStatus: 200, // helper performs expect on status + }); + expect(response.body).toEqual({ + referralCode: testConstants.defaultSubaccountUsername.username, + isVolumeEligible: false, + isAffiliate: false, + }); + }); + + it('should classify volume eligible', async () => { + await WalletTable.update( + { + address: testConstants.defaultWallet.address, + totalVolume: '100000', + totalTradingRewards: '0', + }, + ); + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/affiliates/metadata?address=${testConstants.defaultWallet.address}`, + expectedStatus: 200, // helper performs expect on status + }); + expect(response.body).toEqual({ + referralCode: testConstants.defaultSubaccountUsername.username, + isVolumeEligible: true, + isAffiliate: false, + }); + }); + + it('should classify is not affiliate', async () => { + // AffiliateReferredUsersTable is empty + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/affiliates/metadata?address=${testConstants.defaultWallet.address}`, + expectedStatus: 200, // helper performs expect on status + }); + expect(response.body).toEqual({ + referralCode: testConstants.defaultSubaccountUsername.username, + isVolumeEligible: false, + isAffiliate: false, + }); + }); + + it('should classify is affiliate', async () => { + await AffiliateReferredUsersTable.create({ + affiliateAddress: testConstants.defaultWallet.address, + refereeAddress: testConstants.defaultWallet2.address, + referredAtBlock: '1', + }); + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/affiliates/metadata?address=${testConstants.defaultWallet.address}`, + expectedStatus: 200, // helper performs expect on status + }); + expect(response.body).toEqual({ + referralCode: testConstants.defaultSubaccountUsername.username, + isVolumeEligible: false, + isAffiliate: true, + }); + }); + + it('should fail if subaccount username not found', async () => { + // create defaultWallet2 without subaccount username + await WalletTable.create(testConstants.defaultWallet2); + await sendRequest({ + type: RequestMethod.GET, + path: `/v4/affiliates/metadata?address=${testConstants.defaultWallet2.address}`, + expectedStatus: 500, // helper performs expect on status + }); + }); + }); + + describe('GET /address', () => { + beforeEach(async () => { + await testMocks.seedData(); + await SubaccountUsernamesTable.create(testConstants.defaultSubaccountUsername); + }); + + afterEach(async () => { + await dbHelpers.clearData(); + }); + + it('should return address for a valid referral code', async () => { + const referralCode: string = testConstants.defaultSubaccountUsername.username; + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/affiliates/address?referralCode=${referralCode}`, + expectedStatus: 200, // helper performs expect on status + }); + + expect(response.body).toEqual({ + address: testConstants.defaultWallet.address, + }); + }); + + it('should fail when referral code not found', async () => { + const nonExistentReferralCode = 'BadCode123'; + await sendRequest({ + type: RequestMethod.GET, + path: `/v4/affiliates/address?referralCode=${nonExistentReferralCode}`, + expectedStatus: 404, // helper performs expect on status + }); + }); + }); + + describe('GET /snapshot', () => { + const defaultInfo: AffiliateInfoCreateObject = testConstants.defaultAffiliateInfo; + const defaultInfo2: AffiliateInfoCreateObject = testConstants.defaultAffiliateInfo2; + const defaultInfo3: AffiliateInfoCreateObject = testConstants.defaultAffiliateInfo3; + + beforeEach(async () => { + await testMocks.seedData(); + // Create username for defaultWallet + await SubaccountUsernamesTable.create(testConstants.defaultSubaccountUsername); + + // Create defaultWallet2, subaccount, and username + await WalletTable.create(testConstants.defaultWallet2); + await SubaccountTable.create(testConstants.defaultSubaccountDefaultWalletAddress); + await SubaccountUsernamesTable.create( + testConstants.subaccountUsernameWithDefaultWalletAddress, + ); + + // Create defaultWallet3, create subaccount, create username + await WalletTable.create(testConstants.defaultWallet3); + await SubaccountTable.create(testConstants.defaultSubaccountWithAlternateAddress); + await SubaccountUsernamesTable.create(testConstants.subaccountUsernameWithAlternativeAddress); + + // Create affiliate infos + await Promise.all([ + AffiliateInfoTable.create(defaultInfo), + AffiliateInfoTable.create(defaultInfo2), + AffiliateInfoTable.create(defaultInfo3), + ]); + }); + + afterEach(async () => { + await dbHelpers.clearData(); + }); + + it('should return snapshots when optional params not specified', async () => { + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: '/v4/affiliates/snapshot', + }); + + expect(response.status).toBe(200); + expect(response.body.affiliateList).toHaveLength(3); + expect(response.body.currentOffset).toEqual(0); + expect(response.body.total).toEqual(3); + }); + + it('should filter by address', async () => { + const req: AffiliateSnapshotRequest = { + addressFilter: [testConstants.defaultWallet.address], + }; + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/affiliates/snapshot?addressFilter=${req.addressFilter!.join(',')}`, + expectedStatus: 200, // helper performs expect on status, + }); + + const expectedResponse: AffiliateSnapshotResponse = { + affiliateList: [ + affiliateInfoCreateToResponseObject( + defaultInfo, testConstants.defaultSubaccountUsername.username, + ), + ], + total: 1, + currentOffset: 0, + }; + expect(response.body.affiliateList).toHaveLength(1); + expect(response.body.affiliateList[0]).toEqual(expectedResponse.affiliateList[0]); + expect(response.body.currentOffset).toEqual(expectedResponse.currentOffset); + expect(response.body.total).toEqual(expectedResponse.total); + }); + + it('should handle no results', async () => { + const req: AffiliateSnapshotRequest = { + addressFilter: ['nonexistentaddress'], + }; + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/affiliates/snapshot?addressFilter=${req.addressFilter!.join(',')}`, + expectedStatus: 200, // helper performs expect on status, + }); + + const expectedResponse: AffiliateSnapshotResponse = { + affiliateList: [], + total: 0, + currentOffset: 0, + }; + expect(response.body.affiliateList).toHaveLength(0); + expect(response.body.affiliateList[0]).toEqual(expectedResponse.affiliateList[0]); + expect(response.body.currentOffset).toEqual(expectedResponse.currentOffset); + expect(response.body.total).toEqual(expectedResponse.total); + }); + + it('should handle offset out of bounds', async () => { + const offset = 5; + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/affiliates/snapshot?offset=${offset}`, + expectedStatus: 200, // helper performs expect on status, + }); + + const expectedResponse: AffiliateSnapshotResponse = { + affiliateList: [], + total: 0, + currentOffset: offset, + }; + expect(response.body.affiliateList).toHaveLength(0); + expect(response.body.affiliateList[0]).toEqual(expectedResponse.affiliateList[0]); + expect(response.body.currentOffset).toEqual(expectedResponse.currentOffset); + expect(response.body.total).toEqual(expectedResponse.total); + }); + + it('should return snapshots when all params specified', async () => { + const req: AffiliateSnapshotRequest = { + addressFilter: [testConstants.defaultWallet.address, testConstants.defaultWallet2.address], + sortByAffiliateEarning: true, + }; + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/affiliates/snapshot?${req.addressFilter!.map((address) => `addressFilter[]=${address}`).join('&')}&offset=1&limit=1&sortByAffiliateEarning=${req.sortByAffiliateEarning}`, + expectedStatus: 200, // helper performs expect on status + }); + + // addressFilter removes defaultInfo3 + // sortorder -> [defaultInfo2, defaultInfo] + // offset=1 -> defaultInfo + const expectedResponse: AffiliateSnapshotResponse = { + affiliateList: [ + affiliateInfoCreateToResponseObject( + defaultInfo, testConstants.defaultSubaccountUsername.username, + ), + ], + total: 1, + currentOffset: 1, + }; + + expect(response.body.affiliateList).toHaveLength(1); + expect(response.body.currentOffset).toEqual(expectedResponse.currentOffset); + expect(response.body.total).toEqual(expectedResponse.total); + expect(response.body.affiliateList[0]).toEqual(expectedResponse.affiliateList[0]); + }); + + }); + + describe('GET /total_volume', () => { + beforeEach(async () => { + await testMocks.seedData(); + await WalletTable.update( + { + address: testConstants.defaultWallet.address, + totalVolume: '100000', + totalTradingRewards: '0', + }, + ); + }); + + afterEach(async () => { + await dbHelpers.clearData(); + }); + + it('should return total volume for a valid address', async () => { + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/affiliates/total_volume?address=${testConstants.defaultWallet.address}`, + expectedStatus: 200, // helper performs expect on status + }); + + expect(response.body).toEqual({ + totalVolume: 100000, + }); + }); + + it('should fail if address does not exist', async () => { + const nonExistentAddress = 'adgsakhasgt'; + await sendRequest({ + type: RequestMethod.GET, + path: `/v4/affiliates/metadata?address=${nonExistentAddress}`, + expectedStatus: 404, // helper performs expect on status + }); + }); + }); +}); + +function affiliateInfoCreateToResponseObject( + info: AffiliateInfoCreateObject, + username: string, +): AffiliateSnapshotResponseObject { + return { + affiliateAddress: info.address, + affiliateReferralCode: username, + affiliateEarnings: Number(info.affiliateEarnings), + affiliateReferredTrades: + Number(info.referredTakerTrades) + Number(info.referredMakerTrades), + affiliateTotalReferredFees: Number(info.totalReferredMakerFees) + + Number(info.totalReferredTakerFees) + + Number(info.totalReferredMakerRebates), + affiliateReferredUsers: Number(info.totalReferredUsers), + affiliateReferredNetProtocolEarnings: Number(info.totalReferredMakerFees) + + Number(info.totalReferredTakerFees) + + Number(info.totalReferredMakerRebates) - + Number(info.affiliateEarnings), + affiliateReferredTotalVolume: Number(info.referredTotalVolume), + affiliateReferredMakerFees: Number(info.totalReferredMakerFees), + affiliateReferredTakerFees: Number(info.totalReferredTakerFees), + affiliateReferredMakerRebates: Number(info.totalReferredMakerRebates), + }; +} diff --git a/indexer/services/comlink/__tests__/controllers/api/v4/asset-positions-controller.test.ts b/indexer/services/comlink/__tests__/controllers/api/v4/asset-positions-controller.test.ts index c76c53b7a8e..2385cd52aa1 100644 --- a/indexer/services/comlink/__tests__/controllers/api/v4/asset-positions-controller.test.ts +++ b/indexer/services/comlink/__tests__/controllers/api/v4/asset-positions-controller.test.ts @@ -38,7 +38,7 @@ describe('asset-positions-controller#V4', () => { const response: request.Response = await sendRequest({ type: RequestMethod.GET, path: `/v4/assetPositions?address=${testConstants.defaultAddress}` + - `&subaccountNumber=${testConstants.defaultSubaccount.subaccountNumber}`, + `&subaccountNumber=${testConstants.defaultSubaccount.subaccountNumber}`, }); const expectedAssetPosition: AssetPositionResponseObject = { @@ -46,6 +46,7 @@ describe('asset-positions-controller#V4', () => { side: PositionSide.LONG, size, assetId: testConstants.defaultAssetPosition.assetId, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, }; expect(response.body.positions).toEqual( @@ -57,7 +58,7 @@ describe('asset-positions-controller#V4', () => { ); }); - it('Get /assetPositions gets short asset and perpetual positions', async () => { + it('Get /assetPositions gets short asset positions', async () => { await testMocks.seedData(); await AssetPositionTable.upsert({ ...testConstants.defaultAssetPosition, @@ -67,7 +68,7 @@ describe('asset-positions-controller#V4', () => { const response: request.Response = await sendRequest({ type: RequestMethod.GET, path: `/v4/assetPositions?address=${testConstants.defaultAddress}` + - `&subaccountNumber=${testConstants.defaultSubaccount.subaccountNumber}`, + `&subaccountNumber=${testConstants.defaultSubaccount.subaccountNumber}`, }); const expectedAssetPosition: AssetPositionResponseObject = { @@ -75,6 +76,7 @@ describe('asset-positions-controller#V4', () => { side: PositionSide.SHORT, size: testConstants.defaultAssetPosition.size, assetId: testConstants.defaultAssetPosition.assetId, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, }; expect(response.body.positions).toEqual( @@ -101,7 +103,7 @@ describe('asset-positions-controller#V4', () => { const response: request.Response = await sendRequest({ type: RequestMethod.GET, path: `/v4/assetPositions?address=${testConstants.defaultAddress}` + - `&subaccountNumber=${testConstants.defaultSubaccount.subaccountNumber}`, + `&subaccountNumber=${testConstants.defaultSubaccount.subaccountNumber}`, }); expect(response.body.positions).toEqual( @@ -110,6 +112,7 @@ describe('asset-positions-controller#V4', () => { size: testConstants.defaultAssetPosition.size, side: PositionSide.LONG, assetId: testConstants.defaultAssetPosition.assetId, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, }], ); }); @@ -130,11 +133,13 @@ describe('asset-positions-controller#V4', () => { subaccountId: testConstants.defaultSubaccountId, size: '0', }), + // Funding index at height 0 is 10000 FundingIndexUpdatesTable.create({ ...testConstants.defaultFundingIndexUpdate, fundingIndex: '10000', effectiveAtHeight: testConstants.createdHeight, }), + // Funding index at height 3 is 10050 FundingIndexUpdatesTable.create({ ...testConstants.defaultFundingIndexUpdate, eventId: testConstants.defaultTendermintEventId2, @@ -145,17 +150,164 @@ describe('asset-positions-controller#V4', () => { const response: request.Response = await sendRequest({ type: RequestMethod.GET, path: `/v4/assetPositions?address=${testConstants.defaultAddress}` + - `&subaccountNumber=${testConstants.defaultSubaccount.subaccountNumber}`, + `&subaccountNumber=${testConstants.defaultSubaccount.subaccountNumber}`, }); expect(response.body.positions).toEqual( [{ symbol: testConstants.defaultAsset.symbol, + // funding index difference = 10050 (height 3) - 10000 (height 0) = 50 + // size = 10000 (initial size) - 50 (funding index diff) * 10(position size) size: '9500', side: PositionSide.LONG, assetId: testConstants.defaultAssetPosition.assetId, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, }], ); }); + + it('Get /assetPositions/parentSubaccountNumber gets long and short asset positions across subaccounts', async () => { + await testMocks.seedData(); + await Promise.all([ + AssetPositionTable.upsert(testConstants.defaultAssetPosition), + AssetPositionTable.upsert({ + ...testConstants.isolatedSubaccountAssetPosition, + isLong: false, + }), + ]); + + const parentSubaccountNumber: number = 0; + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/assetPositions/parentSubaccountNumber?address=${testConstants.defaultAddress}` + + `&parentSubaccountNumber=${parentSubaccountNumber}`, + }); + + const expectedAssetPosition: AssetPositionResponseObject = { + symbol: testConstants.defaultAsset.symbol, + side: PositionSide.LONG, + size: testConstants.defaultAssetPosition.size, + assetId: testConstants.defaultAssetPosition.assetId, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }; + const expectedIsolatedAssetPosition: AssetPositionResponseObject = { + symbol: testConstants.defaultAsset.symbol, + side: PositionSide.SHORT, + size: testConstants.isolatedSubaccountAssetPosition.size, + assetId: testConstants.isolatedSubaccountAssetPosition.assetId, + subaccountNumber: testConstants.isolatedSubaccount.subaccountNumber, + }; + + expect(response.body.positions).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + ...expectedAssetPosition, + }), + expect.objectContaining({ + ...expectedIsolatedAssetPosition, + }), + ]), + ); + }); + + it('Get /assetPositions/parentSubaccountNumber does not get asset positions with 0 size', async () => { + await testMocks.seedData(); + + await Promise.all([ + await AssetPositionTable.upsert(testConstants.defaultAssetPosition), + await AssetPositionTable.upsert({ + ...testConstants.isolatedSubaccountAssetPosition, + size: '0', + }), + ]); + + const parentSubaccountNumber: number = 0; + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/assetPositions/parentSubaccountNumber?address=${testConstants.defaultAddress}` + + `&parentSubaccountNumber=${parentSubaccountNumber}`, + }); + + expect(response.body.positions).toEqual( + [{ + symbol: testConstants.defaultAsset.symbol, + size: testConstants.defaultAssetPosition.size, + side: PositionSide.LONG, + assetId: testConstants.defaultAssetPosition.assetId, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }], + ); + }); + + it('Get /assetPositions/parentSubaccountNumber gets USDC asset positions adjusted by unsettled funding', async () => { + await testMocks.seedData(); + await BlockTable.create({ + ...testConstants.defaultBlock, + blockHeight: '3', + }); + await Promise.all([ + PerpetualPositionTable.create(testConstants.defaultPerpetualPosition), + PerpetualPositionTable.create(testConstants.isolatedPerpetualPosition), + AssetPositionTable.upsert(testConstants.defaultAssetPosition), + AssetPositionTable.upsert(testConstants.isolatedSubaccountAssetPosition), + FundingIndexUpdatesTable.create({ + ...testConstants.defaultFundingIndexUpdate, + fundingIndex: '10000', + effectiveAtHeight: testConstants.createdHeight, + }), + FundingIndexUpdatesTable.create({ + ...testConstants.defaultFundingIndexUpdate, + eventId: testConstants.defaultTendermintEventId2, + effectiveAtHeight: '3', + }), + FundingIndexUpdatesTable.create({ + ...testConstants.isolatedMarketFundingIndexUpdate, + fundingIndex: '10000', + effectiveAtHeight: testConstants.createdHeight, + }), + FundingIndexUpdatesTable.create({ + ...testConstants.isolatedMarketFundingIndexUpdate, + eventId: testConstants.defaultTendermintEventId2, + effectiveAtHeight: '3', + }), + ]); + + const parentSubaccountNumber: number = 0; + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/assetPositions/parentSubaccountNumber?address=${testConstants.defaultAddress}` + + `&parentSubaccountNumber=${parentSubaccountNumber}`, + }); + + const expectedAssetPosition: AssetPositionResponseObject = { + symbol: testConstants.defaultAsset.symbol, + side: PositionSide.LONG, + // funding index difference = 10050 (height 3) - 10000 (height 0) = 50 + // size = 10000 (initial size) - 50 (funding index diff) * 10(position size) + size: '9500', + assetId: testConstants.defaultAssetPosition.assetId, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }; + const expectedIsolatedAssetPosition: AssetPositionResponseObject = { + symbol: testConstants.defaultAsset.symbol, + side: PositionSide.LONG, + // funding index difference = 10200 (height 3) - 10000 (height 0) = 200 + // size = 5000 (initial size) - 200 (funding index diff) * 10(position size) + size: '3000', + assetId: testConstants.isolatedSubaccountAssetPosition.assetId, + subaccountNumber: testConstants.isolatedSubaccount.subaccountNumber, + }; + + expect(response.body.positions).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + ...expectedAssetPosition, + }), + expect.objectContaining({ + ...expectedIsolatedAssetPosition, + }), + ]), + ); + }); }); }); diff --git a/indexer/services/comlink/__tests__/controllers/api/v4/compliance-controller.test.ts b/indexer/services/comlink/__tests__/controllers/api/v4/compliance-controller.test.ts index 34e12bddaae..571abd389f6 100644 --- a/indexer/services/comlink/__tests__/controllers/api/v4/compliance-controller.test.ts +++ b/indexer/services/comlink/__tests__/controllers/api/v4/compliance-controller.test.ts @@ -9,7 +9,11 @@ import { } from '@dydxprotocol-indexer/postgres'; import { stats } from '@dydxprotocol-indexer/base'; import { complianceProvider } from '../../../../src/helpers/compliance/compliance-clients'; -import { ComplianceClientResponse, INDEXER_COMPLIANCE_BLOCKED_PAYLOAD } from '@dydxprotocol-indexer/compliance'; +import { + ComplianceClientResponse, + INDEXER_COMPLIANCE_BLOCKED_PAYLOAD, + NOT_IN_BLOCKCHAIN_RISK_SCORE, +} from '@dydxprotocol-indexer/compliance'; import { ratelimitRedis } from '../../../../src/caches/rate-limiters'; import { redis } from '@dydxprotocol-indexer/redis'; import { DateTime } from 'luxon'; @@ -257,5 +261,33 @@ describe('compliance-controller#V4', () => { { provider: complianceProvider.provider }, ); }); + + it('GET /screen for invalid address does not upsert compliance data', async () => { + const invalidAddress: string = 'invalidAddress'; + const notInBlockchainRiskScore: string = NOT_IN_BLOCKCHAIN_RISK_SCORE.toString(); + + jest.spyOn(complianceProvider.client, 'getComplianceResponse').mockImplementation( + (address: string): Promise => { + return Promise.resolve({ + address, + blocked, + riskScore: notInBlockchainRiskScore, + }); + }, + ); + + const response: any = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/screen?address=${invalidAddress}`, + }); + + expect(response.body).toEqual({ + restricted: false, + reason: undefined, + }); + + const data = await ComplianceTable.findAll({}, [], {}); + expect(data).toHaveLength(0); + }); }); }); diff --git a/indexer/services/comlink/__tests__/controllers/api/v4/compliance-v2-controller.test.ts b/indexer/services/comlink/__tests__/controllers/api/v4/compliance-v2-controller.test.ts index 03d7e4ab551..f2ae431a4a7 100644 --- a/indexer/services/comlink/__tests__/controllers/api/v4/compliance-v2-controller.test.ts +++ b/indexer/services/comlink/__tests__/controllers/api/v4/compliance-v2-controller.test.ts @@ -10,19 +10,26 @@ import { import { getIpAddr } from '../../../../src/lib/utils'; import { sendRequest } from '../../../helpers/helpers'; import { RequestMethod } from '../../../../src/types'; -import { logger, stats } from '@dydxprotocol-indexer/base'; +import { stats } from '@dydxprotocol-indexer/base'; import { redis } from '@dydxprotocol-indexer/redis'; import { ratelimitRedis } from '../../../../src/caches/rate-limiters'; import { ComplianceControllerHelper } from '../../../../src/controllers/api/v4/compliance-controller'; import config from '../../../../src/config'; import { DateTime } from 'luxon'; -import { ComplianceAction } from '../../../../src/controllers/api/v4/compliance-v2-controller'; -import { ExtendedSecp256k1Signature, Secp256k1, sha256 } from '@cosmjs/crypto'; -import { getGeoComplianceReason } from '../../../../src/helpers/compliance/compliance-utils'; -import { isRestrictedCountryHeaders } from '@dydxprotocol-indexer/compliance'; +import { ExtendedSecp256k1Signature, Secp256k1 } from '@cosmjs/crypto'; +import { verifyADR36Amino } from '@keplr-wallet/cosmos'; +import { getGeoComplianceReason, ComplianceAction } from '../../../../src/helpers/compliance/compliance-utils'; +import { isRestrictedCountryHeaders, isWhitelistedAddress } from '@dydxprotocol-indexer/compliance'; +import { toBech32 } from '@cosmjs/encoding'; jest.mock('@dydxprotocol-indexer/compliance'); -jest.mock('../../../../src/helpers/compliance/compliance-utils'); +jest.mock('../../../../src/helpers/compliance/compliance-utils', () => { + const actualModule = jest.requireActual('../../../../src/helpers/compliance/compliance-utils'); + return { + ...actualModule, + getGeoComplianceReason: jest.fn(), + }; +}); jest.mock('../../../../src/lib/utils', () => ({ ...jest.requireActual('../../../../src/lib/utils'), @@ -38,10 +45,23 @@ jest.mock('@cosmjs/crypto', () => ({ }, })); +jest.mock('@keplr-wallet/cosmos', () => ({ + ...jest.requireActual('@keplr-wallet/cosmos'), + verifyADR36Amino: jest.fn(), +})); + +jest.mock('@cosmjs/encoding', () => ({ + toBech32: jest.fn(), +})); + describe('ComplianceV2Controller', () => { const ipAddr: string = '192.168.1.1'; - const ipAddrMock: jest.Mock = (getIpAddr as unknown as jest.Mock); + const verifySignatureMock = Secp256k1.verifySignature as jest.Mock; + const fromFixedLengthMock = ExtendedSecp256k1Signature.fromFixedLength as jest.Mock; + const verifyADR36AminoMock = verifyADR36Amino as jest.Mock; + const ipAddrMock = getIpAddr as jest.Mock; + const toBech32Mock = toBech32 as jest.Mock; beforeAll(async () => { await dbHelpers.migrate(); @@ -54,8 +74,11 @@ describe('ComplianceV2Controller', () => { }); describe('GET', () => { + let isWhitelistedAddressSpy: jest.SpyInstance; + beforeEach(async () => { ipAddrMock.mockReturnValue(ipAddr); + isWhitelistedAddressSpy = isWhitelistedAddress as unknown as jest.Mock; await testMocks.seedData(); }); @@ -111,6 +134,7 @@ describe('ComplianceV2Controller', () => { }); expect(response.body.status).toEqual(ComplianceStatus.BLOCKED); expect(response.body.reason).toEqual(ComplianceReason.COMPLIANCE_PROVIDER); + expect(response.body.updatedAt).toBeDefined(); data = await ComplianceStatusTable.findAll({}, [], {}); expect(data).toHaveLength(1); expect(data[0]).toEqual(expect.objectContaining({ @@ -141,6 +165,7 @@ describe('ComplianceV2Controller', () => { }); expect(response.body.status).toEqual(ComplianceStatus.CLOSE_ONLY); expect(response.body.reason).toEqual(ComplianceReason.COMPLIANCE_PROVIDER); + expect(response.body.updatedAt).toBeDefined(); data = await ComplianceStatusTable.findAll({}, [], {}); expect(data).toHaveLength(1); expect(data[0]).toEqual(expect.objectContaining({ @@ -150,6 +175,66 @@ describe('ComplianceV2Controller', () => { })); }); + it('should return COMPLIANT for a restricted, dydx address with existing CLOSE_ONLY compliance status', async () => { + jest.spyOn(ComplianceControllerHelper.prototype, 'screen').mockImplementation(() => { + return Promise.resolve({ + restricted: true, + }); + }); + + const createdAt: string = DateTime.utc().minus({ days: 1 }).toISO(); + await ComplianceStatusTable.create({ + address: testConstants.defaultAddress, + status: ComplianceStatus.CLOSE_ONLY, + createdAt, + updatedAt: createdAt, + }); + const data: ComplianceStatusFromDatabase[] = await ComplianceStatusTable.findAll({}, [], {}); + expect(data).toHaveLength(1); + + isWhitelistedAddressSpy.mockReturnValue(true); + const response: any = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/compliance/screen/${testConstants.defaultAddress}`, + }); + expect(response.body.status).toEqual(ComplianceStatus.COMPLIANT); + }); + + it('should return CLOSE_ONLY & not update for a restricted, dydx address with existing CLOSE_ONLY compliance status', + async () => { + jest.spyOn(ComplianceControllerHelper.prototype, 'screen').mockImplementation(() => { + return Promise.resolve({ + restricted: true, + }); + }); + + const createdAt: string = DateTime.utc().minus({ days: 1 }).toISO(); + await ComplianceStatusTable.create({ + address: testConstants.defaultAddress, + status: ComplianceStatus.CLOSE_ONLY, + createdAt, + updatedAt: createdAt, + }); + let data: ComplianceStatusFromDatabase[] = await ComplianceStatusTable.findAll({}, [], {}); + expect(data).toHaveLength(1); + + const response: any = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/compliance/screen/${testConstants.defaultAddress}`, + }); + expect(response.body.status).toEqual(ComplianceStatus.CLOSE_ONLY); + expect(response.body.updatedAt).toEqual(createdAt); + data = await ComplianceStatusTable.findAll({}, [], {}); + expect(data).toHaveLength(1); + expect(data[0]).toEqual(expect.objectContaining({ + address: testConstants.defaultAddress, + status: ComplianceStatus.CLOSE_ONLY, + createdAt, + updatedAt: createdAt, + })); + }, + ); + it('should return COMPLIANT for a non-restricted, dydx address', async () => { jest.spyOn(ComplianceControllerHelper.prototype, 'screen').mockImplementation(() => { return Promise.resolve({ @@ -163,6 +248,24 @@ describe('ComplianceV2Controller', () => { }); expect(response.body.status).toEqual(ComplianceStatus.COMPLIANT); }); + + it('should return existing compliance data for a non-restricted, dydx address', async () => { + await ComplianceStatusTable.create({ + address: testConstants.defaultAddress, + status: ComplianceStatus.FIRST_STRIKE, + }); + jest.spyOn(ComplianceControllerHelper.prototype, 'screen').mockImplementation(() => { + return Promise.resolve({ + restricted: false, + }); + }); + + const response: any = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/compliance/screen/${testConstants.defaultAddress}`, + }); + expect(response.body.status).toEqual(ComplianceStatus.FIRST_STRIKE); + }); }); describe('POST /setStatus', () => { @@ -210,7 +313,7 @@ describe('ComplianceV2Controller', () => { })); }); - it('should update exisitng db row for dydx address', async () => { + it('should update existing db row for dydx address', async () => { await ComplianceStatusTable.create({ address: testConstants.defaultAddress, status: ComplianceStatus.FIRST_STRIKE, @@ -235,33 +338,47 @@ describe('ComplianceV2Controller', () => { }); }); - describe('POST /geoblock', () => { + const geoblockEndpoint = '/v4/compliance/geoblock'; + const geoblockKeplerEndpoint = '/v4/compliance/geoblock-keplr'; + const geoblockBody = { + address: testConstants.defaultAddress, + message: 'Test message', + action: ComplianceAction.CONNECT, + signedMessage: 'signedmessage123', + pubkey: 'asdfasdf', + timestamp: 1620000000, + }; + const geoblockKeplrBody = { + address: testConstants.defaultAddress, + message: 'Test message', + action: ComplianceAction.CONNECT, + signedMessage: 'signedmessage123', + pubkey: 'asdfasdf', + }; + const endpoints = [ + { endpoint: geoblockEndpoint, description: 'POST /geoblock', body: geoblockBody }, + { endpoint: geoblockKeplerEndpoint, description: 'POST /geoblock-keplr', body: geoblockKeplrBody }, + ]; + + describe.each(endpoints)('$description endpoint', ({ endpoint, body }) => { let getGeoComplianceReasonSpy: jest.SpyInstance; let isRestrictedCountryHeadersSpy: jest.SpyInstance; - - const body: any = { - address: testConstants.defaultAddress, - message: 'Test message', - action: ComplianceAction.ONBOARD, - signedMessage: sha256(Buffer.from('msg')), - pubkey: new Uint8Array([/* public key bytes */]), - timestamp: 1620000000, - }; + let isWhitelistedAddressSpy: jest.SpyInstance; beforeEach(async () => { getGeoComplianceReasonSpy = getGeoComplianceReason as unknown as jest.Mock; isRestrictedCountryHeadersSpy = isRestrictedCountryHeaders as unknown as jest.Mock; + isWhitelistedAddressSpy = isWhitelistedAddress as unknown as jest.Mock; ipAddrMock.mockReturnValue(ipAddr); await testMocks.seedData(); - jest.mock('@cosmjs/crypto', () => ({ - Secp256k1: { - verifySignature: jest.fn().mockResolvedValue(true), - }, - ExtendedSecp256k1Signature: { - fromFixedLength: jest.fn().mockResolvedValue({} as ExtendedSecp256k1Signature), - }, - })); + // Mock verification to true to reduce mocking within individual tests + verifySignatureMock.mockResolvedValue(true); + fromFixedLengthMock.mockResolvedValue({} as ExtendedSecp256k1Signature); + verifyADR36AminoMock.mockReturnValue(true); + + toBech32Mock.mockReturnValue(testConstants.defaultAddress); jest.spyOn(DateTime, 'now').mockReturnValue(DateTime.fromSeconds(1620000000)); // Mock current time + jest.spyOn(stats, 'increment'); }); afterEach(async () => { @@ -272,62 +389,112 @@ describe('ComplianceV2Controller', () => { }); it('should return 400 for non-dYdX address', async () => { - await sendRequest({ - type: RequestMethod.POST, - path: '/v4/compliance/geoblock', - body: { - ...body, - address: '0x123', // Non-dYdX address - }, - expectedStatus: 400, - }); + if (endpoint === geoblockEndpoint) { + await sendRequest({ + type: RequestMethod.POST, + path: endpoint, + body: { + ...body, + address: '0x123', // Non-dYdX address + }, + expectedStatus: 400, + }); + } }); it('should return 400 for invalid timestamp', async () => { - await sendRequest({ - type: RequestMethod.POST, - path: '/v4/compliance/geoblock', - body: { - ...body, - timestamp: 1619996600, // More than 30 seconds difference - }, - expectedStatus: 400, - }); + if (endpoint === geoblockEndpoint) { + await sendRequest({ + type: RequestMethod.POST, + path: endpoint, + body: { + ...body, + timestamp: 1619996600, // More than 30 seconds difference + }, + expectedStatus: 400, + }); + } }); it('should return 400 for invalid signature', async () => { - // Mock verifySignature to return false for this test - (Secp256k1.verifySignature as jest.Mock).mockResolvedValueOnce(false); + if (endpoint === geoblockEndpoint) { + // Mock verifySignature to return false for this test + (Secp256k1.verifySignature as jest.Mock).mockResolvedValueOnce(false); + await sendRequest({ + type: RequestMethod.POST, + path: endpoint, + body, + expectedStatus: 400, + }); + } + }); - await sendRequest({ + it('should return 400 for incorrect address', async () => { + if (endpoint === geoblockEndpoint) { + toBech32Mock.mockResolvedValueOnce('invalid_address'); + await sendRequest({ + type: RequestMethod.POST, + path: endpoint, + body, + expectedStatus: 400, + }); + } + }); + + it('should return 400 for failed keplr validation', async () => { + if (endpoint === geoblockKeplerEndpoint) { + (verifyADR36Amino as jest.Mock).mockReturnValueOnce(false); + await sendRequest({ + type: RequestMethod.POST, + path: endpoint, + body, + expectedStatus: 400, + }); + } + }); + + it('should process valid request', async () => { + const response: any = await sendRequest({ type: RequestMethod.POST, - path: '/v4/compliance/geoblock', + path: endpoint, body, - expectedStatus: 400, }); + + expect(response.status).toEqual(200); + expect(response.body.status).toEqual(ComplianceStatus.COMPLIANT); }); - it('should process valid request', async () => { - (Secp256k1.verifySignature as jest.Mock).mockResolvedValueOnce(true); + it('should return COMPLIANT from a restricted country when whitelisted', async () => { + getGeoComplianceReasonSpy.mockReturnValueOnce(ComplianceReason.US_GEO); + isRestrictedCountryHeadersSpy.mockReturnValue(true); + await dbHelpers.clearData(); + const data2: ComplianceStatusFromDatabase[] = await ComplianceStatusTable.findAll({}, [], {}); + expect(data2).toHaveLength(0); + + isWhitelistedAddressSpy.mockReturnValue(true); const response: any = await sendRequest({ type: RequestMethod.POST, - path: '/v4/compliance/geoblock', + path: endpoint, body, + expectedStatus: 200, }); - expect(response.status).toEqual(200); + const data: ComplianceStatusFromDatabase[] = await ComplianceStatusTable.findAll({}, [], {}); + expect(data).toHaveLength(0); + expect(response.body.status).toEqual(ComplianceStatus.COMPLIANT); + expect(response.body.updatedAt).toBeDefined(); }); - it('should set status to BLOCKED for ONBOARD action from a restricted country with no existing compliance status', async () => { - (Secp256k1.verifySignature as jest.Mock).mockResolvedValueOnce(true); + it('should set status to BLOCKED for CONNECT action from a restricted country with no existing compliance status and no wallet', async () => { getGeoComplianceReasonSpy.mockReturnValueOnce(ComplianceReason.US_GEO); isRestrictedCountryHeadersSpy.mockReturnValue(true); + await dbHelpers.clearData(); const response: any = await sendRequest({ type: RequestMethod.POST, - path: '/v4/compliance/geoblock', + path: endpoint, body, expectedStatus: 200, }); @@ -340,18 +507,25 @@ describe('ComplianceV2Controller', () => { reason: ComplianceReason.US_GEO, })); + expect(stats.increment).toHaveBeenCalledWith( + `${config.SERVICE_NAME}.compliance-v2-controller.${endpoint === geoblockEndpoint ? 'geo_block' : 'geo_block_keplr'}.compliance_status_changed.count`, + { + newStatus: ComplianceStatus.BLOCKED, + }, + ); + expect(response.body.status).toEqual(ComplianceStatus.BLOCKED); expect(response.body.reason).toEqual(ComplianceReason.US_GEO); + expect(response.body.updatedAt).toBeDefined(); }); - it('should set status to FIRST_STRIKE for CONNECT action from a restricted country with no existing compliance status', async () => { - (Secp256k1.verifySignature as jest.Mock).mockResolvedValueOnce(true); + it('should set status to FIRST_STRIKE_CLOSE_ONLY for CONNECT action from a restricted country with no existing compliance status and a wallet', async () => { getGeoComplianceReasonSpy.mockReturnValueOnce(ComplianceReason.US_GEO); isRestrictedCountryHeadersSpy.mockReturnValue(true); const response: any = await sendRequest({ type: RequestMethod.POST, - path: '/v4/compliance/geoblock', + path: endpoint, body: { ...body, action: ComplianceAction.CONNECT, @@ -363,21 +537,26 @@ describe('ComplianceV2Controller', () => { expect(data).toHaveLength(1); expect(data[0]).toEqual(expect.objectContaining({ address: testConstants.defaultAddress, - status: ComplianceStatus.FIRST_STRIKE, + status: ComplianceStatus.FIRST_STRIKE_CLOSE_ONLY, reason: ComplianceReason.US_GEO, })); + expect(stats.increment).toHaveBeenCalledWith( + `${config.SERVICE_NAME}.compliance-v2-controller.${endpoint === geoblockEndpoint ? 'geo_block' : 'geo_block_keplr'}.compliance_status_changed.count`, + { + newStatus: ComplianceStatus.FIRST_STRIKE_CLOSE_ONLY, + }); - expect(response.body.status).toEqual(ComplianceStatus.FIRST_STRIKE); + expect(response.body.status).toEqual(ComplianceStatus.FIRST_STRIKE_CLOSE_ONLY); expect(response.body.reason).toEqual(ComplianceReason.US_GEO); + expect(response.body.updatedAt).toBeDefined(); }); it('should set status to COMPLIANT for any action from a non-restricted country with no existing compliance status', async () => { - (Secp256k1.verifySignature as jest.Mock).mockResolvedValueOnce(true); isRestrictedCountryHeadersSpy.mockReturnValue(false); const response: any = await sendRequest({ type: RequestMethod.POST, - path: '/v4/compliance/geoblock', + path: endpoint, body, expectedStatus: 200, }); @@ -392,18 +571,17 @@ describe('ComplianceV2Controller', () => { expect(response.body.status).toEqual(ComplianceStatus.COMPLIANT); }); - it('should update status to FIRST_STRIKE for CONNECT action from a restricted country with existing COMPLIANT status', async () => { + it('should update status to FIRST_STRIKE_CLOSE_ONLY for CONNECT action from a restricted country with existing COMPLIANT status', async () => { await ComplianceStatusTable.create({ address: testConstants.defaultAddress, status: ComplianceStatus.COMPLIANT, }); - (Secp256k1.verifySignature as jest.Mock).mockResolvedValueOnce(true); getGeoComplianceReasonSpy.mockReturnValueOnce(ComplianceReason.US_GEO); isRestrictedCountryHeadersSpy.mockReturnValue(true); const response: any = await sendRequest({ type: RequestMethod.POST, - path: '/v4/compliance/geoblock', + path: endpoint, body: { ...body, action: ComplianceAction.CONNECT, @@ -415,96 +593,110 @@ describe('ComplianceV2Controller', () => { expect(data).toHaveLength(1); expect(data[0]).toEqual(expect.objectContaining({ address: testConstants.defaultAddress, - status: ComplianceStatus.FIRST_STRIKE, + status: ComplianceStatus.FIRST_STRIKE_CLOSE_ONLY, reason: ComplianceReason.US_GEO, })); - expect(response.body.status).toEqual(ComplianceStatus.FIRST_STRIKE); + expect(response.body.status).toEqual(ComplianceStatus.FIRST_STRIKE_CLOSE_ONLY); expect(response.body.reason).toEqual(ComplianceReason.US_GEO); + expect(response.body.updatedAt).toBeDefined(); }); - it('should be a no-op for ONBOARD action with existing COMPLIANT status', async () => { - const loggerError = jest.spyOn(logger, 'error'); + it('should update status to CLOSE_ONLY for CONNECT action from a restricted country with existing FIRST_STRIKE status', async () => { await ComplianceStatusTable.create({ address: testConstants.defaultAddress, - status: ComplianceStatus.COMPLIANT, + status: ComplianceStatus.FIRST_STRIKE, + reason: ComplianceReason.US_GEO, }); - (Secp256k1.verifySignature as jest.Mock).mockResolvedValueOnce(true); + getGeoComplianceReasonSpy.mockReturnValueOnce(ComplianceReason.US_GEO); isRestrictedCountryHeadersSpy.mockReturnValue(true); const response: any = await sendRequest({ type: RequestMethod.POST, - path: '/v4/compliance/geoblock', - body, + path: endpoint, + body: { + ...body, + action: ComplianceAction.CONNECT, + }, expectedStatus: 200, }); + expect(stats.increment).toHaveBeenCalledWith( + `${config.SERVICE_NAME}.compliance-v2-controller.${endpoint === geoblockEndpoint ? 'geo_block' : 'geo_block_keplr'}.compliance_status_changed.count`, + { + newStatus: ComplianceStatus.CLOSE_ONLY, + }); + const data: ComplianceStatusFromDatabase[] = await ComplianceStatusTable.findAll({}, [], {}); expect(data).toHaveLength(1); expect(data[0]).toEqual(expect.objectContaining({ address: testConstants.defaultAddress, - status: ComplianceStatus.COMPLIANT, + status: ComplianceStatus.CLOSE_ONLY, + reason: ComplianceReason.US_GEO, })); - expect(loggerError).toHaveBeenCalledWith(expect.objectContaining({ - at: 'ComplianceV2Controller POST /geoblock', - message: 'Invalid action for current compliance status', - })); - expect(response.body.status).toEqual(ComplianceStatus.COMPLIANT); + expect(response.body.status).toEqual(ComplianceStatus.CLOSE_ONLY); + expect(response.body.reason).toEqual(ComplianceReason.US_GEO); + expect(response.body.updatedAt).toBeDefined(); }); - it('should be a no-op for ONBOARD action with existing FIRST_STRIKE status', async () => { - const loggerError = jest.spyOn(logger, 'error'); + it('should return CLOSE_ONLY for CONNECT action from a restricted country with existing CLOSE_ONLY status', async () => { + const createdAt: string = DateTime.utc().minus({ days: 1 }).toISO(); await ComplianceStatusTable.create({ address: testConstants.defaultAddress, - status: ComplianceStatus.FIRST_STRIKE, + status: ComplianceStatus.CLOSE_ONLY, reason: ComplianceReason.US_GEO, + updatedAt: createdAt, }); - (Secp256k1.verifySignature as jest.Mock).mockResolvedValueOnce(true); + getGeoComplianceReasonSpy.mockReturnValueOnce(ComplianceReason.US_GEO); isRestrictedCountryHeadersSpy.mockReturnValue(true); const response: any = await sendRequest({ type: RequestMethod.POST, - path: '/v4/compliance/geoblock', - body, + path: endpoint, + body: { + ...body, + action: ComplianceAction.CONNECT, + }, expectedStatus: 200, }); - const data: ComplianceStatusFromDatabase[] = await ComplianceStatusTable.findAll({}, [], {}); expect(data).toHaveLength(1); expect(data[0]).toEqual(expect.objectContaining({ address: testConstants.defaultAddress, - status: ComplianceStatus.FIRST_STRIKE, + status: ComplianceStatus.CLOSE_ONLY, reason: ComplianceReason.US_GEO, + updatedAt: createdAt, })); - expect(loggerError).toHaveBeenCalledWith(expect.objectContaining({ - at: 'ComplianceV2Controller POST /geoblock', - message: 'Invalid action for current compliance status', - })); - expect(response.body.status).toEqual(ComplianceStatus.FIRST_STRIKE); + expect(response.body.status).toEqual(ComplianceStatus.CLOSE_ONLY); expect(response.body.reason).toEqual(ComplianceReason.US_GEO); + expect(response.body.updatedAt).toEqual(createdAt); }); - it('should update status to CLOSE_ONLY for CONNECT action from a restricted country with existing FIRST_STRIKE status', async () => { + it('should update status to CLOSE_ONLY for INVALID_SURVEY action with existing FIRST_STRIKE_CLOSE_ONLY status', async () => { await ComplianceStatusTable.create({ address: testConstants.defaultAddress, - status: ComplianceStatus.FIRST_STRIKE, + status: ComplianceStatus.FIRST_STRIKE_CLOSE_ONLY, reason: ComplianceReason.US_GEO, }); - (Secp256k1.verifySignature as jest.Mock).mockResolvedValueOnce(true); getGeoComplianceReasonSpy.mockReturnValueOnce(ComplianceReason.US_GEO); isRestrictedCountryHeadersSpy.mockReturnValue(true); const response: any = await sendRequest({ type: RequestMethod.POST, - path: '/v4/compliance/geoblock', + path: endpoint, body: { ...body, - action: ComplianceAction.CONNECT, + action: ComplianceAction.INVALID_SURVEY, }, expectedStatus: 200, }); + expect(stats.increment).toHaveBeenCalledWith( + `${config.SERVICE_NAME}.compliance-v2-controller.${endpoint === geoblockEndpoint ? 'geo_block' : 'geo_block_keplr'}.compliance_status_changed.count`, + { + newStatus: ComplianceStatus.CLOSE_ONLY, + }); const data: ComplianceStatusFromDatabase[] = await ComplianceStatusTable.findAll({}, [], {}); expect(data).toHaveLength(1); @@ -516,6 +708,44 @@ describe('ComplianceV2Controller', () => { expect(response.body.status).toEqual(ComplianceStatus.CLOSE_ONLY); expect(response.body.reason).toEqual(ComplianceReason.US_GEO); + expect(response.body.updatedAt).toBeDefined(); + }); + + it('should update status to FIRST_STRIKE for VALID_SURVEY action with existing FIRST_STRIKE_CLOSE_ONLY status', async () => { + await ComplianceStatusTable.create({ + address: testConstants.defaultAddress, + status: ComplianceStatus.FIRST_STRIKE_CLOSE_ONLY, + reason: ComplianceReason.US_GEO, + }); + getGeoComplianceReasonSpy.mockReturnValueOnce(ComplianceReason.US_GEO); + isRestrictedCountryHeadersSpy.mockReturnValue(true); + + const response: any = await sendRequest({ + type: RequestMethod.POST, + path: endpoint, + body: { + ...body, + action: ComplianceAction.VALID_SURVEY, + }, + expectedStatus: 200, + }); + expect(stats.increment).toHaveBeenCalledWith( + `${config.SERVICE_NAME}.compliance-v2-controller.${endpoint === geoblockEndpoint ? 'geo_block' : 'geo_block_keplr'}.compliance_status_changed.count`, + { + newStatus: ComplianceStatus.FIRST_STRIKE, + }); + + const data: ComplianceStatusFromDatabase[] = await ComplianceStatusTable.findAll({}, [], {}); + expect(data).toHaveLength(1); + expect(data[0]).toEqual(expect.objectContaining({ + address: testConstants.defaultAddress, + status: ComplianceStatus.FIRST_STRIKE, + reason: ComplianceReason.US_GEO, + })); + + expect(response.body.status).toEqual(ComplianceStatus.FIRST_STRIKE); + expect(response.body.reason).toEqual(ComplianceReason.US_GEO); + expect(response.body.updatedAt).toBeDefined(); }); }); }); diff --git a/indexer/services/comlink/__tests__/controllers/api/v4/fills-controller.test.ts b/indexer/services/comlink/__tests__/controllers/api/v4/fills-controller.test.ts index 00eb2283afc..0e340a601c2 100644 --- a/indexer/services/comlink/__tests__/controllers/api/v4/fills-controller.test.ts +++ b/indexer/services/comlink/__tests__/controllers/api/v4/fills-controller.test.ts @@ -10,7 +10,11 @@ import { } from '@dydxprotocol-indexer/postgres'; import { FillResponseObject, MarketType, RequestMethod } from '../../../../src/types'; import request from 'supertest'; -import { getQueryString, sendRequest } from '../../../helpers/helpers'; +import { + getQueryString, + sendRequest, + fillResponseObjectFromFillCreateObject, +} from '../../../helpers/helpers'; describe('fills-controller#V4', () => { beforeAll(async () => { @@ -54,6 +58,7 @@ describe('fills-controller#V4', () => { price: testConstants.defaultFill.price, size: testConstants.defaultFill.size, fee: testConstants.defaultFill.fee, + affiliateRevShare: testConstants.defaultFill.affiliateRevShare, type: testConstants.defaultFill.type, orderId: testConstants.defaultFill.orderId, createdAt: testConstants.defaultFill.createdAt, @@ -103,6 +108,7 @@ describe('fills-controller#V4', () => { price: ethFill.price, size: ethFill.size, fee: ethFill.fee, + affiliateRevShare: ethFill.affiliateRevShare, type: ethFill.type, orderId: ethOrder.id, createdAt: ethFill.createdAt, @@ -121,10 +127,13 @@ describe('fills-controller#V4', () => { ); }); - it('Get /fills with market gets fills ordered by createdAtHeight descending', async () => { + it('Get /fills with market gets correctly ordered fills', async () => { // Order and fill for BTC-USD await OrderTable.create(testConstants.defaultOrder); - await FillTable.create(testConstants.defaultFill); + await FillTable.create({ + ...testConstants.defaultFill, + eventId: testConstants.defaultTendermintEventId2, + }); // Order and fill for ETH-USD const ethOrder: OrderFromDatabase = await OrderTable.create({ @@ -136,11 +145,10 @@ describe('fills-controller#V4', () => { ...testConstants.defaultFill, orderId: ethOrder.id, clobPairId: testConstants.defaultPerpetualMarket2.clobPairId, - eventId: testConstants.defaultTendermintEventId2, createdAtHeight: '1', }); - const response: request.Response = await sendRequest({ + let response: request.Response = await sendRequest({ type: RequestMethod.GET, path: `/v4/fills?address=${testConstants.defaultAddress}` + `&subaccountNumber=${testConstants.defaultSubaccount.subaccountNumber}`, @@ -155,6 +163,7 @@ describe('fills-controller#V4', () => { price: testConstants.defaultFill.price, size: testConstants.defaultFill.size, fee: testConstants.defaultFill.fee, + affiliateRevShare: testConstants.defaultFill.affiliateRevShare, type: testConstants.defaultFill.type, orderId: testConstants.defaultFill.orderId, createdAt: testConstants.defaultFill.createdAt, @@ -175,13 +184,119 @@ describe('fills-controller#V4', () => { }, ]; - // Fills should be returned sorted by createdAtHeight in descending order. + // Page is not specified, so fills should be returned sorted by createdAtHeight + // in descending order. expect(response.body.fills).toHaveLength(2); expect(response.body.fills).toEqual( + [ + expect.objectContaining({ + ...expected[0], + }), + expect.objectContaining({ + ...expected[1], + }), + ], + ); + + response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/fills?address=${testConstants.defaultAddress}` + + `&subaccountNumber=${testConstants.defaultSubaccount.subaccountNumber}&page=1&limit=2`, + }); + // Page is specified, so fills should be sorted by eventId in ascending order. + expect(response.body.fills).toHaveLength(2); + expect(response.body.fills).toEqual( + [ + expect.objectContaining({ + ...expected[1], + }), + expect.objectContaining({ + ...expected[0], + }), + ], + ); + }); + + it('Get /fills with market gets fills ordered by createdAtHeight descending and paginated', async () => { + // Order and fill for BTC-USD + await OrderTable.create(testConstants.defaultOrder); + await FillTable.create(testConstants.defaultFill); + + // Order and fill for ETH-USD + const ethOrder: OrderFromDatabase = await OrderTable.create({ + ...testConstants.defaultOrder, + clientId: '3', + clobPairId: testConstants.defaultPerpetualMarket2.clobPairId, + }); + const ethFill: FillFromDatabase = await FillTable.create({ + ...testConstants.defaultFill, + orderId: ethOrder.id, + clobPairId: testConstants.defaultPerpetualMarket2.clobPairId, + eventId: testConstants.defaultTendermintEventId2, + createdAtHeight: '1', + }); + + const responsePage1: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/fills?address=${testConstants.defaultAddress}` + + `&subaccountNumber=${testConstants.defaultSubaccount.subaccountNumber}&page=1&limit=1`, + }); + + const responsePage2: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/fills?address=${testConstants.defaultAddress}` + + `&subaccountNumber=${testConstants.defaultSubaccount.subaccountNumber}&page=2&limit=1`, + }); + + const expected: Partial[] = [ + { + side: testConstants.defaultFill.side, + liquidity: testConstants.defaultFill.liquidity, + market: testConstants.defaultPerpetualMarket.ticker, + marketType: MarketType.PERPETUAL, + price: testConstants.defaultFill.price, + size: testConstants.defaultFill.size, + fee: testConstants.defaultFill.fee, + affiliateRevShare: testConstants.defaultFill.affiliateRevShare, + type: testConstants.defaultFill.type, + orderId: testConstants.defaultFill.orderId, + createdAt: testConstants.defaultFill.createdAt, + createdAtHeight: testConstants.defaultFill.createdAtHeight, + }, + { + side: ethFill.side, + liquidity: ethFill.liquidity, + market: testConstants.defaultPerpetualMarket2.ticker, + marketType: MarketType.PERPETUAL, + price: ethFill.price, + size: ethFill.size, + fee: ethFill.fee, + affiliateRevShare: ethFill.affiliateRevShare, + type: ethFill.type, + orderId: ethOrder.id, + createdAt: ethFill.createdAt, + createdAtHeight: ethFill.createdAtHeight, + }, + ]; + + expect(responsePage1.body.pageSize).toStrictEqual(1); + expect(responsePage1.body.offset).toStrictEqual(0); + expect(responsePage1.body.totalResults).toStrictEqual(2); + expect(responsePage1.body.fills).toHaveLength(1); + expect(responsePage1.body.fills).toEqual( expect.arrayContaining([ expect.objectContaining({ ...expected[0], }), + ]), + ); + + expect(responsePage2.body.pageSize).toStrictEqual(1); + expect(responsePage2.body.offset).toStrictEqual(1); + expect(responsePage2.body.totalResults).toStrictEqual(2); + expect(responsePage2.body.fills).toHaveLength(1); + expect(responsePage2.body.fills).toEqual( + expect.arrayContaining([ expect.objectContaining({ ...expected[1], }), @@ -282,5 +397,178 @@ describe('fills-controller#V4', () => { ], }); }); + + it('Get /fills/parentSubaccountNumber gets fills', async () => { + await OrderTable.create(testConstants.defaultOrder); + await FillTable.create(testConstants.defaultFill); + await OrderTable.create(testConstants.isolatedMarketOrder); + await FillTable.create(testConstants.isolatedMarketFill); + await FillTable.create(testConstants.isolatedMarketFill2); + + const parentSubaccountNumber: number = 0; + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/fills/parentSubaccountNumber?address=${testConstants.defaultAddress}` + + `&parentSubaccountNumber=${parentSubaccountNumber}`, + }); + + // Use fillResponseObjectFromFillCreateObject to create expectedFills + const expectedFills: Partial[] = [ + fillResponseObjectFromFillCreateObject(testConstants.defaultFill, defaultSubaccountNumber), + fillResponseObjectFromFillCreateObject(testConstants.isolatedMarketFill, + testConstants.isolatedSubaccount.subaccountNumber), + fillResponseObjectFromFillCreateObject(testConstants.isolatedMarketFill2, + testConstants.isolatedSubaccount2.subaccountNumber), + ]; + + expect(response.body.fills).toHaveLength(3); + expect(response.body.fills).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + ...expectedFills[0], + }), + expect.objectContaining({ + ...expectedFills[1], + }), + expect.objectContaining({ + ...expectedFills[2], + }), + ]), + ); + }); + + it('Get /fills/parentSubaccountNumber gets fills for isolated market', async () => { + await OrderTable.create(testConstants.defaultOrder); + await FillTable.create(testConstants.defaultFill); + await OrderTable.create(testConstants.isolatedMarketOrder); + await FillTable.create(testConstants.isolatedMarketFill); + await FillTable.create(testConstants.isolatedMarketFill2); + + const parentSubaccountNumber: number = 0; + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/fills/parentSubaccountNumber?address=${testConstants.defaultAddress}` + + `&parentSubaccountNumber=${parentSubaccountNumber}` + + `&market=${testConstants.isolatedPerpetualMarket.ticker}&marketType=${MarketType.PERPETUAL}`, + }); + + // Use fillResponseObjectFromFillCreateObject to create expectedFills + const expectedFills: Partial[] = [ + fillResponseObjectFromFillCreateObject(testConstants.isolatedMarketFill, + testConstants.isolatedSubaccount.subaccountNumber), + fillResponseObjectFromFillCreateObject(testConstants.isolatedMarketFill2, + testConstants.isolatedSubaccount2.subaccountNumber), + ]; + + expect(response.body.fills).toHaveLength(2); + expect(response.body.fills).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + ...expectedFills[0], + }), + expect.objectContaining({ + ...expectedFills[1], + }), + ]), + ); + }); + + it('Get /fills/parentSubaccountNumber with market with no fills', async () => { + await OrderTable.create(testConstants.defaultOrder); + await FillTable.create(testConstants.defaultFill); + await OrderTable.create(testConstants.isolatedMarketOrder); + await FillTable.create(testConstants.isolatedMarketFill); + + const parentSubaccountNumber: number = 0; + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/fills/parentSubaccountNumber?address=${testConstants.defaultAddress}` + + `&parentSubaccountNumber=${parentSubaccountNumber}` + + `&market=${testConstants.isolatedPerpetualMarket2.ticker}&marketType=${MarketType.PERPETUAL}`, + }); + + expect(response.body.fills).toEqual([]); + }); + + it.each([ + [ + 'market passed in without marketType', + { + address: defaultAddress, + subaccountNumber: defaultSubaccountNumber, + market: defaultMarket, + }, + 'marketType', + 'marketType must be provided if market is provided', + ], + [ + 'marketType passed in without market', + { + address: defaultAddress, + subaccountNumber: defaultSubaccountNumber, + marketType: MarketType.PERPETUAL, + }, + 'market', + 'market must be provided if marketType is provided', + ], + [ + 'invalid marketType', + { + address: defaultAddress, + subaccountNumber: defaultSubaccountNumber, + marketType: 'INVALID', + market: defaultMarket, + }, + 'marketType', + 'marketType must be a valid market type (PERPETUAL/SPOT)', + ], + ])('Returns 400 when validation fails for parentSubaccount endpoint: %s', async ( + _reason: string, + queryParams: { + address?: string, + parentSubaccountNumber?: number, + market?: string, + marketType?: string, + createdBeforeOrAt?: string, + createdBeforeOrAtHeight?: number, + }, + fieldWithError: string, + expectedErrorMsg: string, + ) => { + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/fills/parentSubaccountNumber?${getQueryString(queryParams)}`, + expectedStatus: 400, + }); + + expect(response.body).toEqual(expect.objectContaining({ + errors: expect.arrayContaining([ + expect.objectContaining({ + param: fieldWithError, + msg: expectedErrorMsg, + }), + ]), + })); + }); + + it('Returns 404 with unknown market and type on parentSubaccount endpt', async () => { + const parentSubaccountNumber: number = 0; + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/fills/parentSubaccountNumber?address=${testConstants.defaultAddress}` + + `&parentSubaccountNumber=${parentSubaccountNumber}` + + `&market=${invalidMarket}&marketType=${MarketType.PERPETUAL}`, + expectedStatus: 404, + }); + + expect(response.body).toEqual({ + errors: [ + { + msg: `${invalidMarket} not found in markets of type ${MarketType.PERPETUAL}`, + }, + ], + }); + }); + }); }); diff --git a/indexer/services/comlink/__tests__/controllers/api/v4/historical-pnl-controller.test.ts b/indexer/services/comlink/__tests__/controllers/api/v4/historical-pnl-controller.test.ts index 6371f6c2b14..bca33ac4cd5 100644 --- a/indexer/services/comlink/__tests__/controllers/api/v4/historical-pnl-controller.test.ts +++ b/indexer/services/comlink/__tests__/controllers/api/v4/historical-pnl-controller.test.ts @@ -73,6 +73,75 @@ describe('pnlTicks-controller#V4', () => { ); }); + it('Get /historical-pnl respects pagination', async () => { + await testMocks.seedData(); + const createdAt: string = '2000-05-25T00:00:00.000Z'; + const blockHeight: string = '1'; + const pnlTick2: PnlTicksCreateObject = { + ...testConstants.defaultPnlTick, + createdAt, + blockHeight, + }; + await Promise.all([ + PnlTicksTable.create(testConstants.defaultPnlTick), + PnlTicksTable.create(pnlTick2), + ]); + + const responsePage1: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/historical-pnl?address=${testConstants.defaultAddress}` + + `&subaccountNumber=${testConstants.defaultSubaccount.subaccountNumber}&page=1&limit=1`, + }); + + const responsePage2: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/historical-pnl?address=${testConstants.defaultAddress}` + + `&subaccountNumber=${testConstants.defaultSubaccount.subaccountNumber}&page=2&limit=1`, + }); + + const expectedPnlTickResponse: PnlTicksResponseObject = { + ...testConstants.defaultPnlTick, + id: PnlTicksTable.uuid( + testConstants.defaultPnlTick.subaccountId, + testConstants.defaultPnlTick.createdAt, + ), + }; + + const expectedPnlTick2Response: PnlTicksResponseObject = { + ...testConstants.defaultPnlTick, + createdAt, + blockHeight, + id: PnlTicksTable.uuid( + testConstants.defaultPnlTick.subaccountId, + createdAt, + ), + }; + + expect(responsePage1.body.pageSize).toStrictEqual(1); + expect(responsePage1.body.offset).toStrictEqual(0); + expect(responsePage1.body.totalResults).toStrictEqual(2); + expect(responsePage1.body.historicalPnl).toHaveLength(1); + expect(responsePage1.body.historicalPnl).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + ...expectedPnlTickResponse, + }), + ]), + ); + + expect(responsePage2.body.pageSize).toStrictEqual(1); + expect(responsePage2.body.offset).toStrictEqual(1); + expect(responsePage2.body.totalResults).toStrictEqual(2); + expect(responsePage2.body.historicalPnl).toHaveLength(1); + expect(responsePage2.body.historicalPnl).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + ...expectedPnlTick2Response, + }), + ]), + ); + }); + it('Get /historical-pnl respects createdBeforeOrAt and createdBeforeOrAtHeight field', async () => { await testMocks.seedData(); const createdAt: string = '2000-05-25T00:00:00.000Z'; @@ -195,17 +264,77 @@ describe('pnlTicks-controller#V4', () => { it('Get /historical-pnl with non-existent address and subaccount number returns 404', async () => { const response: request.Response = await sendRequest({ type: RequestMethod.GET, - path: '/v4/historical-pnl?address=invalid_address&subaccountNumber=100', + path: '/v4/historical-pnl?address=invalidaddress&subaccountNumber=100', expectedStatus: 404, }); expect(response.body).toEqual({ errors: [ { - msg: 'No subaccount found with address invalid_address and subaccountNumber 100', + msg: 'No subaccount found with address invalidaddress and subaccountNumber 100', }, ], }); }); + + it('Get /historical-pnl/parentSubaccountNumber', async () => { + await testMocks.seedData(); + const pnlTick2: PnlTicksCreateObject = { + ...testConstants.defaultPnlTick, + subaccountId: testConstants.isolatedSubaccountId, + }; + await Promise.all([ + PnlTicksTable.create(testConstants.defaultPnlTick), + PnlTicksTable.create(pnlTick2), + ]); + + const parentSubaccountNumber: number = 0; + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/historical-pnl/parentSubaccountNumber?address=${testConstants.defaultAddress}` + + `&parentSubaccountNumber=${parentSubaccountNumber}`, + }); + + const expectedPnlTickResponse: any = { + // id and subaccountId don't matter + equity: (parseFloat(testConstants.defaultPnlTick.equity) + + parseFloat(pnlTick2.equity)).toString(), + totalPnl: (parseFloat(testConstants.defaultPnlTick.totalPnl) + + parseFloat(pnlTick2.totalPnl)).toString(), + netTransfers: (parseFloat(testConstants.defaultPnlTick.netTransfers) + + parseFloat(pnlTick2.netTransfers)).toString(), + createdAt: testConstants.defaultPnlTick.createdAt, + blockHeight: testConstants.defaultPnlTick.blockHeight, + blockTime: testConstants.defaultPnlTick.blockTime, + }; + + expect(response.body.historicalPnl).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + ...expectedPnlTickResponse, + }), + ]), + ); + }); + }); + + it('Get /historical-pnl/parentSubaccountNumber with invalid subaccount number returns error', async () => { + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/historical-pnl/parentSubaccountNumber?address=${testConstants.defaultAddress}` + + '&parentSubaccountNumber=128', + expectedStatus: 400, + }); + + expect(response.body).toEqual({ + errors: [ + { + location: 'query', + msg: 'parentSubaccountNumber must be a non-negative integer less than 128', + param: 'parentSubaccountNumber', + value: '128', + }, + ], + }); }); }); diff --git a/indexer/services/comlink/__tests__/controllers/api/v4/orders-controller.test.ts b/indexer/services/comlink/__tests__/controllers/api/v4/orders-controller.test.ts index ce47c5a975d..a43a419af91 100644 --- a/indexer/services/comlink/__tests__/controllers/api/v4/orders-controller.test.ts +++ b/indexer/services/comlink/__tests__/controllers/api/v4/orders-controller.test.ts @@ -76,6 +76,24 @@ describe('orders-controller#V4', () => { ...testConstants.defaultOrder, id: testConstants.defaultOrderId, }, + testConstants.defaultSubaccount.subaccountNumber, + )); + }); + + it('Get /:orderId gets isolated position order in postgres', async () => { + await OrderTable.create(testConstants.isolatedMarketOrder); + + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/orders/${testConstants.isolatedMarketOrderId}`, + }); + + expect(response.body).toEqual(postgresOrderToResponseObject( + { + ...testConstants.isolatedMarketOrder, + id: testConstants.isolatedMarketOrderId, + }, + testConstants.isolatedSubaccount.subaccountNumber, )); }); @@ -97,6 +115,24 @@ describe('orders-controller#V4', () => { ); }); + it('Get /:orderId gets isolated position order in redis', async () => { + await placeOrder({ + redisOrder: redisTestConstants.isolatedMarketRedisOrder, + client: redisClient, + }); + + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/orders/${testConstants.isolatedMarketOrderId}`, + }); + + expect(response.body).toEqual( + redisOrderToResponseObject( + redisTestConstants.isolatedMarketRedisOrder, + ), + ); + }); + it('Get /:orderId gets order in postgres and redis', async () => { await Promise.all([ OrderTable.create(testConstants.defaultOrder), @@ -117,11 +153,44 @@ describe('orders-controller#V4', () => { ...testConstants.defaultOrder, id: testConstants.defaultOrderId, }, + { + [testConstants.defaultSubaccountId]: + testConstants.defaultSubaccount.subaccountNumber, + }, redisTestConstants.defaultRedisOrder, ), ); }); + it('Get /:orderId gets isolated market order in postgres and redis', async () => { + await Promise.all([ + OrderTable.create(testConstants.isolatedMarketOrder), + placeOrder({ + redisOrder: redisTestConstants.isolatedMarketRedisOrder, + client: redisClient, + }), + ]); + + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/orders/${testConstants.isolatedMarketOrderId}`, + }); + + expect(response.body).toEqual( + postgresAndRedisOrderToResponseObject( + { + ...testConstants.isolatedMarketOrder, + id: testConstants.isolatedMarketOrderId, + }, + { + [testConstants.isolatedSubaccountId]: + testConstants.isolatedSubaccount.subaccountNumber, + }, + redisTestConstants.isolatedMarketRedisOrder, + ), + ); + }); + it('Get /:orderId errors when parameter is not a uuid', async () => { await sendRequest({ type: RequestMethod.GET, @@ -149,6 +218,15 @@ describe('orders-controller#V4', () => { orderId: orderIdWithDifferentClobPairId, goodTilBlock: 1200, }; + const isolatedOrderIdWithDiffClientId: IndexerOrderId = { + ...redisTestConstants.isolatedMarketOrderId, + clientId: 2, + }; + const isolatedOrderWithDiffClientId: IndexerOrder = { + ...redisTestConstants.isolatedMarketOrder, + orderId: isolatedOrderIdWithDiffClientId, + goodTilBlock: 1200, + }; const newerOrderGoodTilBlockTime: IndexerOrder = { ...redisTestConstants.defaultOrderGoodTilBlockTime, orderId: newerOrderGoodTilBlockTimeId, @@ -195,6 +273,19 @@ describe('orders-controller#V4', () => { untriggeredOrder.clobPairId, untriggeredOrder.orderFlags, ); + const isolatedRedisOrder: RedisOrder = { + ...redisTestConstants.isolatedMarketRedisOrder, + order: { + ...redisTestConstants.isolatedMarketOrder, + goodTilBlock: 1200, + }, + }; + const isolatedRedisOrderWithDiffClientId: RedisOrder = { + ...redisTestConstants.isolatedMarketRedisOrder, + order: isolatedOrderWithDiffClientId, + id: OrderTable.orderIdToUuid(isolatedOrderIdWithDiffClientId), + ticker: testConstants.isolatedPerpetualMarket.ticker, + }; it('Successfully gets multiple redis orders', async () => { await Promise.all([ @@ -252,6 +343,52 @@ describe('orders-controller#V4', () => { ]); }); + it('Successfully gets multiple redis orders for parent subaccount', async () => { + await Promise.all([ + placeOrder({ + redisOrder: redisTestConstants.defaultRedisOrder, + client: redisClient, + }), + placeOrder({ + redisOrder: isolatedRedisOrder, + client: redisClient, + }), + ]); + + const parentSubaccountNumber: number = 0; + const queryParams = { + address: testConstants.defaultSubaccount.address, + parentSubaccountNumber, + }; + + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/orders/parentSubaccountNumber?${getQueryString(queryParams)}`, + }); + + expect(response.body).toEqual([ // by default sort by desc goodTilBlock + redisOrderToResponseObject(isolatedRedisOrder), + redisOrderToResponseObject( + redisTestConstants.defaultRedisOrder, + ), + ]); + + const response2: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/orders/parentSubaccountNumber?${getQueryString({ + ...queryParams, + returnLatestOrders: 'false', + })}`, + }); + + expect(response2.body).toEqual([ // by default sort by desc goodTilBlock + redisOrderToResponseObject( + redisTestConstants.defaultRedisOrder, + ), + redisOrderToResponseObject(isolatedRedisOrder), + ]); + }); + it.each([ [ 'ticker', @@ -262,6 +399,15 @@ describe('orders-controller#V4', () => { }, redisTestConstants.defaultRedisOrder, ], + [ + 'tickers across parent subaccount', + [redisTestConstants.defaultRedisOrder, redisTestConstants.isolatedMarketRedisOrder], + { + ...defaultQueryParams, + ticker: testConstants.defaultPerpetualMarket.ticker, + }, + redisTestConstants.defaultRedisOrder, + ], [ 'goodTilBlock', [ @@ -277,6 +423,26 @@ describe('orders-controller#V4', () => { }, redisTestConstants.defaultRedisOrder, ], + [ + 'goodTilBlock', + [ + redisTestConstants.defaultRedisOrder, + { + ...redisTestConstants.isolatedMarketRedisOrder, + order: { + ...redisTestConstants.isolatedMarketOrder, + goodTilBlock: 1200, + }, + }, + ], + { + ...defaultQueryParams, + goodTilBlockBeforeOrAt: protocolTranslations.getGoodTilBlock( + redisTestConstants.defaultRedisOrder.order!, + ), + }, + redisTestConstants.defaultRedisOrder, + ], [ 'goodTilBlockTime', [ @@ -391,19 +557,20 @@ describe('orders-controller#V4', () => { postgresOrderToResponseObject({ ...testConstants.defaultOrderGoodTilBlockTime, id: getUuidForTest(testConstants.defaultOrderGoodTilBlockTime), - }), + }, testConstants.defaultSubaccount.subaccountNumber), postgresOrderToResponseObject({ ...secondOrderGoodTilBlockTime, id: getUuidForTest(secondOrderGoodTilBlockTime), - }), + }, + testConstants.defaultSubaccount.subaccountNumber), postgresOrderToResponseObject({ ...secondOrder, id: getUuidForTest(secondOrder), - }), + }, testConstants.defaultSubaccount.subaccountNumber), postgresOrderToResponseObject({ ...testConstants.defaultOrder, id: testConstants.defaultOrderId, - }), + }, testConstants.defaultSubaccount.subaccountNumber), ]); const response2: request.Response = await sendRequest({ @@ -418,20 +585,72 @@ describe('orders-controller#V4', () => { postgresOrderToResponseObject({ ...testConstants.defaultOrder, id: testConstants.defaultOrderId, - }), + }, testConstants.defaultSubaccount.subaccountNumber), postgresOrderToResponseObject({ ...secondOrder, id: getUuidForTest(secondOrder), - }), + }, testConstants.defaultSubaccount.subaccountNumber), postgresOrderToResponseObject({ ...secondOrderGoodTilBlockTime, id: getUuidForTest(secondOrderGoodTilBlockTime), - }), + }, testConstants.defaultSubaccount.subaccountNumber), postgresOrderToResponseObject({ ...testConstants.defaultOrderGoodTilBlockTime, id: getUuidForTest(testConstants.defaultOrderGoodTilBlockTime), + }, testConstants.defaultSubaccount.subaccountNumber), + ]); + }); + + it('Successfully gets multiple postgres orders for parent subaccount', async () => { + await Promise.all([ + OrderTable.create(testConstants.defaultOrder), + OrderTable.create({ + ...testConstants.isolatedMarketOrder, + goodTilBlock: '1000', }), ]); + const parentSubaccountNumber: number = 0; + const queryParams = { + address: testConstants.defaultSubaccount.address, + parentSubaccountNumber, + }; + + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/orders/parentSubaccountNumber?${getQueryString(queryParams)}`, + }); + + expect(response.body).toEqual([ + postgresOrderToResponseObject({ + ...testConstants.isolatedMarketOrder, + id: testConstants.isolatedMarketOrderId, + goodTilBlock: '1000', + }, testConstants.isolatedSubaccount.subaccountNumber), + postgresOrderToResponseObject({ + ...testConstants.defaultOrder, + id: testConstants.defaultOrderId, + }, testConstants.defaultSubaccount.subaccountNumber), + ]); + + const response2: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/orders/parentSubaccountNumber?${getQueryString({ + ...queryParams, + returnLatestOrders: 'false', + })}`, + }); + + expect(response2.body).toEqual([ + postgresOrderToResponseObject({ + ...testConstants.defaultOrder, + id: testConstants.defaultOrderId, + }, testConstants.defaultSubaccount.subaccountNumber), + postgresOrderToResponseObject({ + ...testConstants.isolatedMarketOrder, + id: testConstants.isolatedMarketOrderId, + goodTilBlock: '1000', + }, testConstants.isolatedSubaccount.subaccountNumber), + ]); }); it('Successfully returns filtered order when > limit orders exist', async () => { @@ -449,11 +668,11 @@ describe('orders-controller#V4', () => { postgresOrderToResponseObject({ ...testConstants.defaultOrder, id: testConstants.defaultOrderId, - }), + }, testConstants.defaultSubaccount.subaccountNumber), postgresOrderToResponseObject({ ...untriggeredOrder, id: untriggeredOrderId, - }), + }, testConstants.defaultSubaccount.subaccountNumber), ]); const response2 = await sendRequest({ @@ -469,7 +688,7 @@ describe('orders-controller#V4', () => { postgresOrderToResponseObject({ ...untriggeredOrder, id: untriggeredOrderId, - }), + }, testConstants.defaultSubaccount.subaccountNumber), ]); }); @@ -503,6 +722,10 @@ describe('orders-controller#V4', () => { ...testConstants.defaultOrder, id: testConstants.defaultOrderId, }, + { + [testConstants.defaultSubaccountId]: + testConstants.defaultSubaccount.subaccountNumber, + }, redisTestConstants.defaultRedisOrder, ), ]); @@ -525,7 +748,7 @@ describe('orders-controller#V4', () => { filledOrder.clobPairId, filledOrder.orderFlags, ), - }), + }, testConstants.defaultSubaccount.subaccountNumber), ]); response = await sendRequest({ @@ -554,7 +777,7 @@ describe('orders-controller#V4', () => { postgresOrderToResponseObject({ ...untriggeredOrder, id: untriggeredOrderId, - }), + }, testConstants.defaultSubaccount.subaccountNumber), ]); response = await sendRequest({ @@ -572,12 +795,16 @@ describe('orders-controller#V4', () => { ...testConstants.defaultOrder, id: testConstants.defaultOrderId, }, + { + [testConstants.defaultSubaccountId]: + testConstants.defaultSubaccount.subaccountNumber, + }, redisTestConstants.defaultRedisOrder, ), postgresOrderToResponseObject({ ...untriggeredOrder, id: untriggeredOrderId, - }), + }, testConstants.defaultSubaccount.subaccountNumber), ]); }); @@ -614,30 +841,101 @@ describe('orders-controller#V4', () => { postgresOrderToResponseObject({ ...secondOrderGoodTilBlockTime, id: getUuidForTest(secondOrderGoodTilBlockTime), - }), + }, testConstants.defaultSubaccount.subaccountNumber), redisOrderToResponseObject(newerRedisOrderGoodTilBlockTime), postgresAndRedisOrderToResponseObject( { ...testConstants.defaultOrderGoodTilBlockTime, id: getUuidForTest(testConstants.defaultOrderGoodTilBlockTime), }, + { + [testConstants.defaultSubaccountId]: + testConstants.defaultSubaccount.subaccountNumber, + }, redisTestConstants.defaultRedisOrderGoodTilBlockTime, ), postgresOrderToResponseObject({ ...secondOrder, id: getUuidForTest(secondOrder), - }), + }, testConstants.defaultSubaccount.subaccountNumber), redisOrderToResponseObject(redisOrderWithDifferentMarket), postgresAndRedisOrderToResponseObject( { ...testConstants.defaultOrder, id: testConstants.defaultOrderId, }, + { + [testConstants.defaultSubaccountId]: + testConstants.defaultSubaccount.subaccountNumber, + }, redisTestConstants.defaultRedisOrder, ), ]); }); + it('Successfully pulls both redis and postgres orders for parent subaccount', async () => { + await Promise.all([ + OrderTable.create(testConstants.defaultOrder), + OrderTable.create(secondOrder), + OrderTable.create(testConstants.isolatedMarketOrder), + placeOrder({ + redisOrder: redisTestConstants.defaultRedisOrder, + client: redisClient, + }), + placeOrder({ + redisOrder: redisTestConstants.isolatedMarketRedisOrder, + client: redisClient, + }), + placeOrder({ + redisOrder: isolatedRedisOrderWithDiffClientId, + client: redisClient, + }), + ]); + + const parentSubaccountNumber: number = 0; + const queryParams = { + address: testConstants.defaultSubaccount.address, + parentSubaccountNumber, + }; + + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/orders/parentSubaccountNumber?${getQueryString(queryParams)}`, + }); + + expect(response.body).toEqual( + expect.arrayContaining([ + postgresOrderToResponseObject({ + ...secondOrder, + id: getUuidForTest(secondOrder), + }, testConstants.defaultSubaccount.subaccountNumber), + redisOrderToResponseObject(isolatedRedisOrderWithDiffClientId), + postgresAndRedisOrderToResponseObject( + { + ...testConstants.isolatedMarketOrder, + id: testConstants.isolatedMarketOrderId, + }, + { + [testConstants.isolatedSubaccountId]: + testConstants.isolatedSubaccount.subaccountNumber, + }, + redisTestConstants.isolatedMarketRedisOrder, + ), + postgresAndRedisOrderToResponseObject( + { + ...testConstants.defaultOrder, + id: testConstants.defaultOrderId, + }, + { + [testConstants.defaultSubaccountId]: + testConstants.defaultSubaccount.subaccountNumber, + }, + redisTestConstants.defaultRedisOrder, + ), + ]), + ); + }); + it.each([ [ 'goodTilBlock passed in with goodTilBlockTime', diff --git a/indexer/services/comlink/__tests__/controllers/api/v4/perpetual-positions-controller.test.ts b/indexer/services/comlink/__tests__/controllers/api/v4/perpetual-positions-controller.test.ts index 22dc791af3e..112ade7bb75 100644 --- a/indexer/services/comlink/__tests__/controllers/api/v4/perpetual-positions-controller.test.ts +++ b/indexer/services/comlink/__tests__/controllers/api/v4/perpetual-positions-controller.test.ts @@ -1,13 +1,13 @@ import { + BlockTable, dbHelpers, - testMocks, - testConstants, + FundingIndexUpdatesTable, perpetualMarketRefresher, + PerpetualPositionStatus, PerpetualPositionTable, PositionSide, - BlockTable, - FundingIndexUpdatesTable, - PerpetualPositionStatus, + testConstants, + testMocks, } from '@dydxprotocol-indexer/postgres'; import { PerpetualPositionResponseObject, RequestMethod } from '../../../../src/types'; import request from 'supertest'; @@ -89,6 +89,7 @@ describe('perpetual-positions-controller#V4', () => { createdAt: testConstants.createdDateTime.toISO(), closedAt: null, createdAtHeight: testConstants.createdHeight, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, }; expect(response.body.positions).toEqual( @@ -140,6 +141,7 @@ describe('perpetual-positions-controller#V4', () => { createdAt: testConstants.createdDateTime.toISO(), closedAt: null, createdAtHeight: testConstants.createdHeight, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, }; expect(response.body.positions).toEqual( @@ -189,6 +191,7 @@ describe('perpetual-positions-controller#V4', () => { createdAt: testConstants.createdDateTime.toISO(), closedAt: null, createdAtHeight: testConstants.createdHeight, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, }; expect(response.body.positions).toEqual( @@ -242,5 +245,241 @@ describe('perpetual-positions-controller#V4', () => { ]), })); }); + + it('Get /perpetualPositions/parentSubaccountNumber gets long/short positions across subaccounts', async () => { + await Promise.all([ + PerpetualPositionTable.create(testConstants.defaultPerpetualPosition), + PerpetualPositionTable.create({ + ...testConstants.isolatedPerpetualPosition, + side: PositionSide.SHORT, + size: '-10', + }), + ]); + await Promise.all([ + FundingIndexUpdatesTable.create({ + ...testConstants.isolatedMarketFundingIndexUpdate, + fundingIndex: '10000', + effectiveAtHeight: testConstants.createdHeight, + }), + FundingIndexUpdatesTable.create({ + ...testConstants.isolatedMarketFundingIndexUpdate, + eventId: testConstants.defaultTendermintEventId2, + effectiveAtHeight: latestHeight, + }), + ]); + + const parentSubaccountNumber: number = 0; + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/perpetualPositions/parentSubaccountNumber?address=${testConstants.defaultAddress}` + + `&parentSubaccountNumber=${parentSubaccountNumber}`, + }); + + const expected: PerpetualPositionResponseObject = { + market: testConstants.defaultPerpetualMarket.ticker, + side: testConstants.defaultPerpetualPosition.side, + status: testConstants.defaultPerpetualPosition.status, + size: testConstants.defaultPerpetualPosition.size, + maxSize: testConstants.defaultPerpetualPosition.maxSize, + entryPrice: getFixedRepresentation(testConstants.defaultPerpetualPosition.entryPrice!), + exitPrice: null, + sumOpen: testConstants.defaultPerpetualPosition.sumOpen!, + sumClose: testConstants.defaultPerpetualPosition.sumClose!, + // For the calculation of the net funding (long position): + // settled funding on position = 200_000, size = 10, latest funding index = 10050 + // last updated funding index = 10000 + // total funding = 200_000 + (10 * (10000 - 10050)) = 199_500 + netFunding: getFixedRepresentation('199500'), + // sumClose=0, so realized Pnl is the same as the net funding of the position. + // Unsettled funding is funding payments that already "happened" but not reflected + // in the subaccount's balance yet, so it's considered a part of realizedPnl. + realizedPnl: getFixedRepresentation('199500'), + // For the calculation of the unrealized pnl (long position): + // index price = 15_000, entry price = 20_000, size = 10 + // unrealizedPnl = size * (index price - entry price) + // unrealizedPnl = 10 * (15_000 - 20_000) + unrealizedPnl: getFixedRepresentation('-50000'), + createdAt: testConstants.createdDateTime.toISO(), + closedAt: null, + createdAtHeight: testConstants.createdHeight, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }; + // object for expected 2 which holds an isolated position in an isolated perpetual + // in the isolated subaccount + const expected2: PerpetualPositionResponseObject = { + market: testConstants.isolatedPerpetualMarket.ticker, + side: PositionSide.SHORT, + status: testConstants.isolatedPerpetualPosition.status, + size: '-10', + maxSize: testConstants.isolatedPerpetualPosition.maxSize, + entryPrice: getFixedRepresentation(testConstants.isolatedPerpetualPosition.entryPrice!), + exitPrice: null, + sumOpen: testConstants.isolatedPerpetualPosition.sumOpen!, + sumClose: testConstants.isolatedPerpetualPosition.sumClose!, + // For the calculation of the net funding (short position): + // settled funding on position = 200_000, size = -10, latest funding index = 10200 + // last updated funding index = 10000 + // total funding = 200_000 + (-10 * (10000 - 10200)) = 202_000 + netFunding: getFixedRepresentation('202000'), + // sumClose=0, so realized Pnl is the same as the net funding of the position. + // Unsettled funding is funding payments that already "happened" but not reflected + // in the subaccount's balance yet, so it's considered a part of realizedPnl. + realizedPnl: getFixedRepresentation('202000'), + // For the calculation of the unrealized pnl (short position): + // index price = 1, entry price = 1.5, size = -10 + // unrealizedPnl = size * (index price - entry price) + // unrealizedPnl = -10 * (1-1.5) + unrealizedPnl: getFixedRepresentation('5'), + createdAt: testConstants.createdDateTime.toISO(), + closedAt: null, + createdAtHeight: testConstants.createdHeight, + subaccountNumber: testConstants.isolatedSubaccount.subaccountNumber, + }; + + expect(response.body.positions).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + ...expected, + }), + expect.objectContaining({ + ...expected2, + }), + ]), + ); + }); + + it('Get /perpetualPositions/parentSubaccountNumber gets CLOSED position without adjusting funding', async () => { + await Promise.all([ + PerpetualPositionTable.create({ + ...testConstants.defaultPerpetualPosition, + status: PerpetualPositionStatus.CLOSED, + }), + PerpetualPositionTable.create({ + ...testConstants.isolatedPerpetualPosition, + side: PositionSide.SHORT, + size: '-10', + }), + ]); + + const parentSubaccountNumber: number = 0; + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/perpetualPositions/parentSubaccountNumber?address=${testConstants.defaultAddress}` + + `&parentSubaccountNumber=${parentSubaccountNumber}`, + }); + + const expected: PerpetualPositionResponseObject = { + market: testConstants.defaultPerpetualMarket.ticker, + side: testConstants.defaultPerpetualPosition.side, + status: PerpetualPositionStatus.CLOSED, + size: testConstants.defaultPerpetualPosition.size, + maxSize: testConstants.defaultPerpetualPosition.maxSize, + entryPrice: getFixedRepresentation(testConstants.defaultPerpetualPosition.entryPrice!), + exitPrice: null, + sumOpen: testConstants.defaultPerpetualPosition.sumOpen!, + sumClose: testConstants.defaultPerpetualPosition.sumClose!, + // CLOSED position should not have funding adjusted + netFunding: getFixedRepresentation( + testConstants.defaultPerpetualPosition.settledFunding, + ), + realizedPnl: getFixedRepresentation( + testConstants.defaultPerpetualPosition.settledFunding, + ), + // For the calculation of the unrealized pnl (short position): + // index price = 15_000, entry price = 20_000, size = 10 + // unrealizedPnl = size * (index price - entry price) + // unrealizedPnl = 10 * (15_000 - 20_000) + unrealizedPnl: getFixedRepresentation('-50000'), + createdAt: testConstants.createdDateTime.toISO(), + closedAt: null, + createdAtHeight: testConstants.createdHeight, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }; + const expected2: PerpetualPositionResponseObject = { + market: testConstants.isolatedPerpetualMarket.ticker, + side: PositionSide.SHORT, + status: testConstants.isolatedPerpetualPosition.status, + size: '-10', + maxSize: testConstants.isolatedPerpetualPosition.maxSize, + entryPrice: getFixedRepresentation(testConstants.isolatedPerpetualPosition.entryPrice!), + exitPrice: null, + sumOpen: testConstants.isolatedPerpetualPosition.sumOpen!, + sumClose: testConstants.isolatedPerpetualPosition.sumClose!, + // CLOSED position should not have funding adjusted + netFunding: getFixedRepresentation( + testConstants.isolatedPerpetualPosition.settledFunding, + ), + realizedPnl: getFixedRepresentation( + testConstants.isolatedPerpetualPosition.settledFunding, + ), + // For the calculation of the unrealized pnl (short position): + // index price = 1, entry price = 1.5, size = -10 + // unrealizedPnl = size * (index price - entry price) + // unrealizedPnl = -10 * (1-1.5) + unrealizedPnl: getFixedRepresentation('5'), + createdAt: testConstants.createdDateTime.toISO(), + closedAt: null, + createdAtHeight: testConstants.createdHeight, + subaccountNumber: testConstants.isolatedSubaccount.subaccountNumber, + }; + + expect(response.body.positions).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + ...expected, + }), + expect.objectContaining({ + ...expected2, + }), + ]), + ); + }); + + it.each([ + [ + 'invalid status', + { + address: defaultAddress, + parentSubaccountNumber: defaultSubaccountNumber, + status: 'INVALID', + }, + 'status', + 'status must be a valid Position Status (OPEN, etc)', + ], + [ + 'multiple invalid status', + { + address: defaultAddress, + parentSubaccountNumber: defaultSubaccountNumber, + status: 'INVALID,INVALID', + }, + 'status', + 'status must be a valid Position Status (OPEN, etc)', + ], + ])('Returns 400 when validation fails: %s', async ( + _reason: string, + queryParams: { + address?: string, + subaccountNumber?: number, + status?: string, + }, + fieldWithError: string, + expectedErrorMsg: string, + ) => { + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/perpetualPositions/parentSubaccountNumber?${getQueryString(queryParams)}`, + expectedStatus: 400, + }); + + expect(response.body).toEqual(expect.objectContaining({ + errors: expect.arrayContaining([ + expect.objectContaining({ + param: fieldWithError, + msg: expectedErrorMsg, + }), + ]), + })); + }); }); }); diff --git a/indexer/services/comlink/__tests__/controllers/api/v4/social-trading-controller.test.ts b/indexer/services/comlink/__tests__/controllers/api/v4/social-trading-controller.test.ts new file mode 100644 index 00000000000..67f6e8c3c85 --- /dev/null +++ b/indexer/services/comlink/__tests__/controllers/api/v4/social-trading-controller.test.ts @@ -0,0 +1,82 @@ +import { + dbHelpers, + SubaccountTable, + testMocks, + SubaccountUsernamesTable, + SubaccountFromDatabase, + SubaccountUsernamesFromDatabase, +} from '@dydxprotocol-indexer/postgres'; +import request from 'supertest'; +import { RequestMethod } from '../../../../src/types'; +import { sendRequest } from '../../../helpers/helpers'; + +describe('social-trading-controller', () => { + beforeAll(async () => { + await dbHelpers.migrate(); + }); + + beforeEach(async () => { + await testMocks.seedData(); + }); + + afterAll(async () => { + await dbHelpers.teardown(); + }); + + afterEach(async () => { + await dbHelpers.clearData(); + }); + + it('successfuly fetches subaccount info by address', async () => { + const subaccounts: SubaccountFromDatabase[] = await SubaccountTable.findAll({}, []); + const subaccount: SubaccountFromDatabase = subaccounts[0]; + + const subaccountUsernames: SubaccountUsernamesFromDatabase = await + SubaccountUsernamesTable.create({ + subaccountId: subaccount.id, + username: 'test_username', + }); + + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/trader/search?searchParam=${subaccount.address}`, + }); + + expect(response.status).toEqual(200); + expect(response.body).toEqual({ + result: { + address: subaccount.address, + subaccountNumber: subaccount.subaccountNumber, + username: subaccountUsernames.username, + subaccountId: subaccount.id, + }, + }); + }); + + it('successfuly fetches subaccount info by username', async () => { + + const subaccounts: SubaccountFromDatabase[] = await SubaccountTable.findAll({}, []); + const subaccount: SubaccountFromDatabase = subaccounts[0]; + const subaccountUsernames: SubaccountUsernamesFromDatabase = await + SubaccountUsernamesTable.create({ + subaccountId: subaccount.id, + username: 'test_username', + }); + + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/trader/search?searchParam=${subaccountUsernames.username}`, + }); + + expect(response.status).toEqual(200); + expect(response.body).toEqual({ + result: { + address: subaccount.address, + subaccountNumber: subaccount.subaccountNumber, + username: subaccountUsernames.username, + subaccountId: subaccount.id, + }, + }); + }); + +}); diff --git a/indexer/services/comlink/__tests__/controllers/api/v4/sparklines-controller.test.ts b/indexer/services/comlink/__tests__/controllers/api/v4/sparklines-controller.test.ts index f06741a8acd..07863f32fc2 100644 --- a/indexer/services/comlink/__tests__/controllers/api/v4/sparklines-controller.test.ts +++ b/indexer/services/comlink/__tests__/controllers/api/v4/sparklines-controller.test.ts @@ -6,7 +6,6 @@ import { PerpetualMarketFromDatabase, perpetualMarketRefresher, testConstants, - testMocks, } from '@dydxprotocol-indexer/postgres'; import _ from 'lodash'; import { DateTime } from 'luxon'; @@ -16,6 +15,44 @@ import { SPARKLINE_TIME_PERIOD_TO_LIMIT_MAP, SPARKLINE_TIME_PERIOD_TO_RESOLUTION import { RequestMethod, SparklineTimePeriod } from '../../../../src/types'; import { sendRequest } from '../../../helpers/helpers'; import Big from 'big.js'; +import * as SubaccountTable from '@dydxprotocol-indexer/postgres/build/src/stores/subaccount-table'; +import { + defaultLiquidityTier, + defaultLiquidityTier2, + defaultMarket, + defaultMarket2, + defaultMarket3, + defaultPerpetualMarket, + defaultPerpetualMarket2, + defaultPerpetualMarket3, + defaultSubaccount, + defaultSubaccount2, +} from '@dydxprotocol-indexer/postgres/build/__tests__/helpers/constants'; +import * as MarketTable from '@dydxprotocol-indexer/postgres/build/src/stores/market-table'; +import * as LiquidityTiersTable from '@dydxprotocol-indexer/postgres/build/src/stores/liquidity-tiers-table'; +import * as PerpetualMarketTable from '@dydxprotocol-indexer/postgres/build/src/stores/perpetual-market-table'; + +// helper function to seed data +async function seedData() { + await Promise.all([ + SubaccountTable.create(defaultSubaccount), + SubaccountTable.create(defaultSubaccount2), + ]); + await Promise.all([ + MarketTable.create(defaultMarket), + MarketTable.create(defaultMarket2), + MarketTable.create(defaultMarket3), + ]); + await Promise.all([ + LiquidityTiersTable.create(defaultLiquidityTier), + LiquidityTiersTable.create(defaultLiquidityTier2), + ]); + await Promise.all([ + PerpetualMarketTable.create(defaultPerpetualMarket), + PerpetualMarketTable.create(defaultPerpetualMarket2), + PerpetualMarketTable.create(defaultPerpetualMarket3), + ]); +} describe('sparklines-controller#V4', () => { beforeAll(async () => { @@ -23,7 +60,7 @@ describe('sparklines-controller#V4', () => { }); beforeEach(async () => { - await testMocks.seedData(); + await seedData(); await perpetualMarketRefresher.updatePerpetualMarkets(); }); diff --git a/indexer/services/comlink/__tests__/controllers/api/v4/trades-controller.test.ts b/indexer/services/comlink/__tests__/controllers/api/v4/trades-controller.test.ts index 1c8230133c2..e85cd95daa6 100644 --- a/indexer/services/comlink/__tests__/controllers/api/v4/trades-controller.test.ts +++ b/indexer/services/comlink/__tests__/controllers/api/v4/trades-controller.test.ts @@ -123,6 +123,73 @@ describe('trades-controller#V4', () => { ); }); + it('Get /:ticker gets trades for a ticker in descending order by createdAtHeight and paginated', async () => { + await testMocks.seedData(); + await perpetualMarketRefresher.updatePerpetualMarkets(); + // Order and fill for BTC-USD (maker and taker) + const fills1: { + makerFill: FillFromDatabase, + takerFill: FillFromDatabase, + } = await createMakerTakerOrderAndFill( + testConstants.defaultOrder, + testConstants.defaultFill, + ); + + const btcSize2: string = '600'; + const fills2: { + makerFill: FillFromDatabase, + takerFill: FillFromDatabase, + } = await createMakerTakerOrderAndFill( + testConstants.defaultOrder, + { + ...testConstants.defaultFill, + size: btcSize2, + eventId: testConstants.defaultTendermintEventId2, + createdAtHeight: '1', + }, + ); + + const responsePage1: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/trades/perpetualMarket/${testConstants.defaultPerpetualMarket.ticker}?page=1&limit=1`, + }); + + const responsePage2: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/trades/perpetualMarket/${testConstants.defaultPerpetualMarket.ticker}?page=2&limit=1`, + }); + + const expected: TradeResponseObject[] = [ + fillToTradeResponseObject(fills1.takerFill), + fillToTradeResponseObject(fills2.takerFill), + ]; + + // Expect both trades, ordered by createdAtHeight in descending order + expect(responsePage1.body.pageSize).toStrictEqual(1); + expect(responsePage1.body.offset).toStrictEqual(0); + expect(responsePage1.body.totalResults).toStrictEqual(2); + expect(responsePage1.body.trades).toHaveLength(1); + expect(responsePage1.body.trades).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + ...expected[0], + }), + ]), + ); + + expect(responsePage1.body.pageSize).toStrictEqual(1); + expect(responsePage1.body.offset).toStrictEqual(0); + expect(responsePage2.body.totalResults).toStrictEqual(2); + expect(responsePage2.body.trades).toHaveLength(1); + expect(responsePage2.body.trades).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + ...expected[1], + }), + ]), + ); + }); + it('Get /:ticker for ticker with no fills', async () => { await testMocks.seedData(); await perpetualMarketRefresher.updatePerpetualMarkets(); @@ -138,6 +205,21 @@ describe('trades-controller#V4', () => { expect(response.body.trades).toEqual([]); }); + it('Get /:ticker for ticker with no fills and paginated', async () => { + await testMocks.seedData(); + await perpetualMarketRefresher.updatePerpetualMarkets(); + // Order and fill for BTC-USD + await OrderTable.create(testConstants.defaultOrder); + await FillTable.create(testConstants.defaultFill); + + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/trades/perpetualMarket/${testConstants.defaultPerpetualMarket2.ticker}?page=1&limit=1`, + }); + + expect(response.body.trades).toEqual([]); + }); + it('Get /:ticker for ticker with price < 1e-6', async () => { await testMocks.seedData(); await perpetualMarketRefresher.updatePerpetualMarkets(); diff --git a/indexer/services/comlink/__tests__/controllers/api/v4/transfers-controller.test.ts b/indexer/services/comlink/__tests__/controllers/api/v4/transfers-controller.test.ts index a89c3c027ea..79ddfd4d04c 100644 --- a/indexer/services/comlink/__tests__/controllers/api/v4/transfers-controller.test.ts +++ b/indexer/services/comlink/__tests__/controllers/api/v4/transfers-controller.test.ts @@ -1,5 +1,7 @@ import { dbHelpers, + IsoString, + SubaccountTable, testConstants, testMocks, TransferCreateObject, @@ -7,9 +9,28 @@ import { TransferType, WalletTable, } from '@dydxprotocol-indexer/postgres'; -import { RequestMethod, TransferResponseObject } from '../../../../src/types'; +import { + ParentSubaccountTransferResponseObject, + RequestMethod, + TransferBetweenRequest, + TransferBetweenResponse, + TransferResponseObject, +} from '../../../../src/types'; import request from 'supertest'; -import { sendRequest } from '../../../helpers/helpers'; +import { getQueryString, sendRequest } from '../../../helpers/helpers'; +import { + createdDateTime, createdHeight, + defaultAsset, defaultSubaccount2Num0, + defaultTendermintEventId4, + defaultWalletAddress, isolatedSubaccountId, +} from '@dydxprotocol-indexer/postgres/build/__tests__/helpers/constants'; +import Big from 'big.js'; + +const defaultWallet = { + ...testConstants.defaultWallet, + address: defaultWalletAddress, // defaultWalletAddress != testConstants.defaultWallet.address + +}; describe('transfers-controller#V4', () => { beforeAll(async () => { @@ -37,10 +58,8 @@ describe('transfers-controller#V4', () => { createdAt: testConstants.createdDateTime.toISO(), createdAtHeight: testConstants.createdHeight, }; - await WalletTable.create({ - address: testConstants.defaultWalletAddress, - totalTradingRewards: '0', - }); + // use wallet2 to not create duplicate + await WalletTable.create(testConstants.defaultWallet2); await Promise.all([ TransferTable.create(testConstants.defaultTransfer), TransferTable.create(transfer2), @@ -149,6 +168,145 @@ describe('transfers-controller#V4', () => { ); }); + it('Get /transfers returns transfers/deposits/withdrawals with pagination', async () => { + await testMocks.seedData(); + const transfer2: TransferCreateObject = { + senderSubaccountId: testConstants.defaultSubaccountId2, + recipientSubaccountId: testConstants.defaultSubaccountId, + assetId: testConstants.defaultAsset2.id, + size: '5', + eventId: testConstants.defaultTendermintEventId2, + transactionHash: '', // TODO: Add a real transaction Hash + createdAt: testConstants.createdDateTime.toISO(), + createdAtHeight: testConstants.createdHeight, + }; + await WalletTable.create(defaultWallet); + await Promise.all([ + TransferTable.create(testConstants.defaultTransfer), + TransferTable.create(transfer2), + TransferTable.create(testConstants.defaultWithdrawal), + TransferTable.create(testConstants.defaultDeposit), + ]); + + const responsePage1: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/transfers?address=${testConstants.defaultAddress}` + + `&subaccountNumber=${testConstants.defaultSubaccount.subaccountNumber}&page=1&limit=2`, + }); + const responsePage2: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/transfers?address=${testConstants.defaultAddress}` + + `&subaccountNumber=${testConstants.defaultSubaccount.subaccountNumber}&page=2&limit=2`, + }); + + const expectedTransferResponse: TransferResponseObject = { + id: testConstants.defaultTransferId, + sender: { + address: testConstants.defaultAddress, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }, + recipient: { + address: testConstants.defaultAddress, + subaccountNumber: testConstants.defaultSubaccount2.subaccountNumber, + }, + size: testConstants.defaultTransfer.size, + createdAt: testConstants.defaultTransfer.createdAt, + createdAtHeight: testConstants.defaultTransfer.createdAtHeight, + symbol: testConstants.defaultAsset.symbol, + type: TransferType.TRANSFER_OUT, + transactionHash: testConstants.defaultTransfer.transactionHash, + }; + + const expectedTransfer2Response: TransferResponseObject = { + id: TransferTable.uuid( + transfer2.eventId, + transfer2.assetId, + transfer2.senderSubaccountId, + transfer2.recipientSubaccountId, + transfer2.senderWalletAddress, + transfer2.recipientWalletAddress, + ), + sender: { + address: testConstants.defaultAddress, + subaccountNumber: testConstants.defaultSubaccount2.subaccountNumber, + }, + recipient: { + address: testConstants.defaultAddress, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }, + size: transfer2.size, + createdAt: transfer2.createdAt, + createdAtHeight: transfer2.createdAtHeight, + symbol: testConstants.defaultAsset2.symbol, + type: TransferType.TRANSFER_IN, + transactionHash: transfer2.transactionHash, + }; + + const expectedDepositResponse: TransferResponseObject = { + id: testConstants.defaultDepositId, + sender: { + address: testConstants.defaultWalletAddress, + }, + recipient: { + address: testConstants.defaultAddress, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }, + size: testConstants.defaultDeposit.size, + createdAt: testConstants.defaultDeposit.createdAt, + createdAtHeight: testConstants.defaultDeposit.createdAtHeight, + symbol: testConstants.defaultAsset.symbol, + type: TransferType.DEPOSIT, + transactionHash: testConstants.defaultDeposit.transactionHash, + }; + + const expectedWithdrawalResponse: TransferResponseObject = { + id: testConstants.defaultWithdrawalId, + sender: { + address: testConstants.defaultAddress, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }, + recipient: { + address: testConstants.defaultWalletAddress, + }, + size: testConstants.defaultWithdrawal.size, + createdAt: testConstants.defaultWithdrawal.createdAt, + createdAtHeight: testConstants.defaultWithdrawal.createdAtHeight, + symbol: testConstants.defaultAsset.symbol, + type: TransferType.WITHDRAWAL, + transactionHash: testConstants.defaultWithdrawal.transactionHash, + }; + + expect(responsePage1.body.pageSize).toStrictEqual(2); + expect(responsePage1.body.offset).toStrictEqual(0); + expect(responsePage1.body.totalResults).toStrictEqual(4); + expect(responsePage1.body.transfers).toHaveLength(2); + expect(responsePage1.body.transfers).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + ...expectedTransferResponse, + }), + expect.objectContaining({ + ...expectedTransfer2Response, + }), + ]), + ); + + expect(responsePage2.body.pageSize).toStrictEqual(2); + expect(responsePage2.body.offset).toStrictEqual(2); + expect(responsePage2.body.totalResults).toStrictEqual(4); + expect(responsePage2.body.transfers).toHaveLength(2); + expect(responsePage2.body.transfers).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + ...expectedWithdrawalResponse, + }), + expect.objectContaining({ + ...expectedDepositResponse, + }), + ]), + ); + }); + it('Get /transfers respects createdBeforeOrAt field', async () => { await testMocks.seedData(); const createdAt: string = '2000-05-25T00:00:00.000Z'; @@ -275,17 +433,745 @@ describe('transfers-controller#V4', () => { it('Get /transfers with non-existent address and subaccount number returns 404', async () => { const response: request.Response = await sendRequest({ type: RequestMethod.GET, - path: '/v4/transfers?address=invalid_address&subaccountNumber=100', + path: '/v4/transfers?address=invalidaddress&subaccountNumber=100', expectedStatus: 404, }); expect(response.body).toEqual({ errors: [ { - msg: 'No subaccount found with address invalid_address and subaccountNumber 100', + msg: 'No subaccount found with address invalidaddress and subaccountNumber 100', }, ], }); }); + + it('Get /transfers/parentSubaccountNumber returns transfers/deposits/withdrawals', async () => { + await testMocks.seedData(); + const transfer2: TransferCreateObject = { + senderSubaccountId: testConstants.defaultSubaccountId2, + recipientSubaccountId: testConstants.defaultSubaccountId, + assetId: testConstants.defaultAsset2.id, + size: '5', + eventId: testConstants.defaultTendermintEventId2, + transactionHash: '', // TODO: Add a real transaction Hash + createdAt: testConstants.createdDateTime.toISO(), + createdAtHeight: testConstants.createdHeight, + }; + await WalletTable.create(defaultWallet); + await Promise.all([ + SubaccountTable.create(defaultSubaccount2Num0), + ]); + await Promise.all([ + TransferTable.create(testConstants.defaultTransfer), + TransferTable.create(transfer2), + TransferTable.create(testConstants.defaultWithdrawal), + TransferTable.create(testConstants.defaultDeposit), + TransferTable.create(testConstants.defaultTransferWithAlternateAddress), + ]); + + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/transfers/parentSubaccountNumber?address=${testConstants.defaultAddress}` + + `&parentSubaccountNumber=${testConstants.defaultSubaccount.subaccountNumber}`, + }); + + const expectedTransferResponse: ParentSubaccountTransferResponseObject = { + id: testConstants.defaultTransferId, + sender: { + address: testConstants.defaultAddress, + parentSubaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }, + recipient: { + address: testConstants.defaultAddress, + parentSubaccountNumber: testConstants.defaultSubaccount2.subaccountNumber, + }, + size: testConstants.defaultTransfer.size, + createdAt: testConstants.defaultTransfer.createdAt, + createdAtHeight: testConstants.defaultTransfer.createdAtHeight, + symbol: testConstants.defaultAsset.symbol, + type: TransferType.TRANSFER_OUT, + transactionHash: testConstants.defaultTransfer.transactionHash, + }; + + const expectedTransfer2Response: ParentSubaccountTransferResponseObject = { + id: TransferTable.uuid( + transfer2.eventId, + transfer2.assetId, + transfer2.senderSubaccountId, + transfer2.recipientSubaccountId, + transfer2.senderWalletAddress, + transfer2.recipientWalletAddress, + ), + sender: { + address: testConstants.defaultAddress, + parentSubaccountNumber: testConstants.defaultSubaccount2.subaccountNumber, + }, + recipient: { + address: testConstants.defaultAddress, + parentSubaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }, + size: transfer2.size, + createdAt: transfer2.createdAt, + createdAtHeight: transfer2.createdAtHeight, + symbol: testConstants.defaultAsset2.symbol, + type: TransferType.TRANSFER_IN, + transactionHash: transfer2.transactionHash, + }; + + const expectedDepositResponse: ParentSubaccountTransferResponseObject = { + id: testConstants.defaultDepositId, + sender: { + address: testConstants.defaultWalletAddress, + }, + recipient: { + address: testConstants.defaultAddress, + parentSubaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }, + size: testConstants.defaultDeposit.size, + createdAt: testConstants.defaultDeposit.createdAt, + createdAtHeight: testConstants.defaultDeposit.createdAtHeight, + symbol: testConstants.defaultAsset.symbol, + type: TransferType.DEPOSIT, + transactionHash: testConstants.defaultDeposit.transactionHash, + }; + + const expectedWithdrawalResponse: ParentSubaccountTransferResponseObject = { + id: testConstants.defaultWithdrawalId, + sender: { + address: testConstants.defaultAddress, + parentSubaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }, + recipient: { + address: testConstants.defaultWalletAddress, + }, + size: testConstants.defaultWithdrawal.size, + createdAt: testConstants.defaultWithdrawal.createdAt, + createdAtHeight: testConstants.defaultWithdrawal.createdAtHeight, + symbol: testConstants.defaultAsset.symbol, + type: TransferType.WITHDRAWAL, + transactionHash: testConstants.defaultWithdrawal.transactionHash, + }; + + const expectedTransferWithAlternateAddressResponse: ParentSubaccountTransferResponseObject = { + id: testConstants.defaultTransferWithAlternateAddressId, + sender: { + address: testConstants.defaultAddress2, + parentSubaccountNumber: testConstants.defaultSubaccount2Num0.subaccountNumber, + }, + recipient: { + address: testConstants.defaultAddress, + parentSubaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }, + size: testConstants.defaultTransferWithAlternateAddress.size, + createdAt: testConstants.defaultTransferWithAlternateAddress.createdAt, + createdAtHeight: testConstants.defaultTransferWithAlternateAddress.createdAtHeight, + symbol: testConstants.defaultAsset.symbol, + type: TransferType.TRANSFER_IN, + transactionHash: testConstants.defaultTransferWithAlternateAddress.transactionHash, + }; + + expect(response.body.transfers).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + ...expectedTransferResponse, + }), + expect.objectContaining({ + ...expectedTransfer2Response, + }), + expect.objectContaining({ + ...expectedWithdrawalResponse, + }), + expect.objectContaining({ + ...expectedDepositResponse, + }), + expect.objectContaining({ + ...expectedTransferWithAlternateAddressResponse, + }), + ]), + ); + }); + + it('Get /transfers/parentSubaccountNumber returns transfers/deposits/withdrawals and paginated', async () => { + await testMocks.seedData(); + const transfer2: TransferCreateObject = { + senderSubaccountId: testConstants.defaultSubaccountId2, + recipientSubaccountId: testConstants.defaultSubaccountId, + assetId: testConstants.defaultAsset2.id, + size: '5', + eventId: testConstants.defaultTendermintEventId2, + transactionHash: '', // TODO: Add a real transaction Hash + createdAt: testConstants.createdDateTime.toISO(), + createdAtHeight: testConstants.createdHeight, + }; + await WalletTable.create(defaultWallet); + await Promise.all([ + TransferTable.create(testConstants.defaultTransfer), + TransferTable.create(transfer2), + TransferTable.create(testConstants.defaultWithdrawal), + TransferTable.create(testConstants.defaultDeposit), + ]); + + const responsePage1: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/transfers/parentSubaccountNumber?address=${testConstants.defaultAddress}` + + `&parentSubaccountNumber=${testConstants.defaultSubaccount.subaccountNumber}&page=1&limit=2`, + }); + + const responsePage2: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/transfers/parentSubaccountNumber?address=${testConstants.defaultAddress}` + + `&parentSubaccountNumber=${testConstants.defaultSubaccount.subaccountNumber}&page=2&limit=2`, + }); + + const expectedTransferResponse: ParentSubaccountTransferResponseObject = { + id: testConstants.defaultTransferId, + sender: { + address: testConstants.defaultAddress, + parentSubaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }, + recipient: { + address: testConstants.defaultAddress, + parentSubaccountNumber: testConstants.defaultSubaccount2.subaccountNumber, + }, + size: testConstants.defaultTransfer.size, + createdAt: testConstants.defaultTransfer.createdAt, + createdAtHeight: testConstants.defaultTransfer.createdAtHeight, + symbol: testConstants.defaultAsset.symbol, + type: TransferType.TRANSFER_OUT, + transactionHash: testConstants.defaultTransfer.transactionHash, + }; + + const expectedTransfer2Response: ParentSubaccountTransferResponseObject = { + id: TransferTable.uuid( + transfer2.eventId, + transfer2.assetId, + transfer2.senderSubaccountId, + transfer2.recipientSubaccountId, + transfer2.senderWalletAddress, + transfer2.recipientWalletAddress, + ), + sender: { + address: testConstants.defaultAddress, + parentSubaccountNumber: testConstants.defaultSubaccount2.subaccountNumber, + }, + recipient: { + address: testConstants.defaultAddress, + parentSubaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }, + size: transfer2.size, + createdAt: transfer2.createdAt, + createdAtHeight: transfer2.createdAtHeight, + symbol: testConstants.defaultAsset2.symbol, + type: TransferType.TRANSFER_IN, + transactionHash: transfer2.transactionHash, + }; + + const expectedDepositResponse: ParentSubaccountTransferResponseObject = { + id: testConstants.defaultDepositId, + sender: { + address: testConstants.defaultWalletAddress, + }, + recipient: { + address: testConstants.defaultAddress, + parentSubaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }, + size: testConstants.defaultDeposit.size, + createdAt: testConstants.defaultDeposit.createdAt, + createdAtHeight: testConstants.defaultDeposit.createdAtHeight, + symbol: testConstants.defaultAsset.symbol, + type: TransferType.DEPOSIT, + transactionHash: testConstants.defaultDeposit.transactionHash, + }; + + const expectedWithdrawalResponse: ParentSubaccountTransferResponseObject = { + id: testConstants.defaultWithdrawalId, + sender: { + address: testConstants.defaultAddress, + parentSubaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }, + recipient: { + address: testConstants.defaultWalletAddress, + }, + size: testConstants.defaultWithdrawal.size, + createdAt: testConstants.defaultWithdrawal.createdAt, + createdAtHeight: testConstants.defaultWithdrawal.createdAtHeight, + symbol: testConstants.defaultAsset.symbol, + type: TransferType.WITHDRAWAL, + transactionHash: testConstants.defaultWithdrawal.transactionHash, + }; + + expect(responsePage1.body.pageSize).toStrictEqual(2); + expect(responsePage1.body.offset).toStrictEqual(0); + expect(responsePage1.body.totalResults).toStrictEqual(4); + expect(responsePage1.body.transfers).toHaveLength(2); + expect(responsePage1.body.transfers).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + ...expectedTransferResponse, + }), + expect.objectContaining({ + ...expectedTransfer2Response, + }), + ]), + ); + + expect(responsePage2.body.pageSize).toStrictEqual(2); + expect(responsePage2.body.offset).toStrictEqual(2); + expect(responsePage2.body.totalResults).toStrictEqual(4); + expect(responsePage2.body.transfers).toHaveLength(2); + expect(responsePage2.body.transfers).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + ...expectedWithdrawalResponse, + }), + expect.objectContaining({ + ...expectedDepositResponse, + }), + ]), + ); + }); + + it('Get /transfers/parentSubaccountNumber excludes transfers for parent <> child subaccounts', async () => { + await testMocks.seedData(); + const transfer2: TransferCreateObject = { + senderSubaccountId: testConstants.defaultSubaccountId, + recipientSubaccountId: testConstants.isolatedSubaccountId, + assetId: testConstants.defaultAsset.id, + size: '5', + eventId: testConstants.defaultTendermintEventId2, + transactionHash: '', // TODO: Add a real transaction Hash + createdAt: testConstants.createdDateTime.toISO(), + createdAtHeight: testConstants.createdHeight, + }; + const transfer3: TransferCreateObject = { + senderSubaccountId: testConstants.isolatedSubaccountId2, + recipientSubaccountId: testConstants.defaultSubaccountId, + assetId: testConstants.defaultAsset.id, + size: '5', + eventId: testConstants.defaultTendermintEventId3, + transactionHash: '', // TODO: Add a real transaction Hash + createdAt: testConstants.createdDateTime.toISO(), + createdAtHeight: testConstants.createdHeight, + }; + await WalletTable.create(defaultWallet); + await Promise.all([ + TransferTable.create(testConstants.defaultTransfer), + TransferTable.create(transfer2), + TransferTable.create(transfer3), + ]); + + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/transfers/parentSubaccountNumber?address=${testConstants.defaultAddress}` + + `&parentSubaccountNumber=${testConstants.defaultSubaccount.subaccountNumber}`, + }); + + const expectedTransferResponse: ParentSubaccountTransferResponseObject = { + id: testConstants.defaultTransferId, + sender: { + address: testConstants.defaultAddress, + parentSubaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }, + recipient: { + address: testConstants.defaultAddress, + parentSubaccountNumber: testConstants.defaultSubaccount2.subaccountNumber, + }, + size: testConstants.defaultTransfer.size, + createdAt: testConstants.defaultTransfer.createdAt, + createdAtHeight: testConstants.defaultTransfer.createdAtHeight, + symbol: testConstants.defaultAsset.symbol, + type: TransferType.TRANSFER_OUT, + transactionHash: testConstants.defaultTransfer.transactionHash, + }; + + expect(response.body.transfers.length).toEqual(1); + expect(response.body.transfers).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + ...expectedTransferResponse, + }), + ]), + ); + }); + + it('Get /transfers/parentSubaccountNumber includes transfers for wallets/subaccounts(non parent) <> child subaccounts', async () => { + await testMocks.seedData(); + const transferFromNonParent: TransferCreateObject = { + senderSubaccountId: testConstants.defaultSubaccountId2, + recipientSubaccountId: testConstants.isolatedSubaccountId, + assetId: testConstants.defaultAsset.id, + size: '5', + eventId: testConstants.defaultTendermintEventId2, + transactionHash: '', // TODO: Add a real transaction Hash + createdAt: testConstants.createdDateTime.toISO(), + createdAtHeight: testConstants.createdHeight, + }; + const transferToNonParent: TransferCreateObject = { + senderSubaccountId: testConstants.isolatedSubaccountId2, + recipientSubaccountId: testConstants.defaultSubaccountId2, + assetId: testConstants.defaultAsset.id, + size: '5', + eventId: testConstants.defaultTendermintEventId3, + transactionHash: '', // TODO: Add a real transaction Hash + createdAt: testConstants.createdDateTime.toISO(), + createdAtHeight: testConstants.createdHeight, + }; + const depositToChildSA: TransferCreateObject = { + senderWalletAddress: defaultWalletAddress, + recipientSubaccountId: isolatedSubaccountId, + assetId: defaultAsset.id, + size: '10', + eventId: defaultTendermintEventId4, + transactionHash: '', // TODO: Add a real transaction Hash + createdAt: createdDateTime.toISO(), + createdAtHeight: createdHeight, + }; + const withdrawFromChildSA: TransferCreateObject = { + senderSubaccountId: isolatedSubaccountId, + recipientWalletAddress: defaultWalletAddress, + assetId: defaultAsset.id, + size: '10', + eventId: defaultTendermintEventId4, + transactionHash: '', // TODO: Add a real transaction Hash + createdAt: createdDateTime.toISO(), + createdAtHeight: createdHeight, + }; + await WalletTable.create(defaultWallet); + await Promise.all([ + TransferTable.create(transferFromNonParent), + TransferTable.create(transferToNonParent), + TransferTable.create(depositToChildSA), + TransferTable.create(withdrawFromChildSA), + ]); + + const parentSubaccountNumber: number = 0; + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/transfers/parentSubaccountNumber?address=${testConstants.defaultAddress}` + + `&parentSubaccountNumber=${parentSubaccountNumber}`, + }); + + const expectedTransferResponse1: ParentSubaccountTransferResponseObject = { + id: TransferTable.uuid( + transferFromNonParent.eventId, + transferFromNonParent.assetId, + transferFromNonParent.senderSubaccountId, + transferFromNonParent.recipientSubaccountId, + transferFromNonParent.senderWalletAddress, + transferFromNonParent.recipientWalletAddress, + ), + sender: { + address: testConstants.defaultAddress, + parentSubaccountNumber: testConstants.defaultSubaccount2.subaccountNumber, + }, + recipient: { + address: testConstants.defaultAddress, + parentSubaccountNumber: 0, + }, + size: transferFromNonParent.size, + createdAt: transferFromNonParent.createdAt, + createdAtHeight: transferFromNonParent.createdAtHeight, + symbol: testConstants.defaultAsset.symbol, + type: TransferType.TRANSFER_IN, + transactionHash: transferFromNonParent.transactionHash, + }; + const expectedTransferResponse2: ParentSubaccountTransferResponseObject = { + id: TransferTable.uuid( + transferToNonParent.eventId, + transferToNonParent.assetId, + transferToNonParent.senderSubaccountId, + transferToNonParent.recipientSubaccountId, + transferToNonParent.senderWalletAddress, + transferToNonParent.recipientWalletAddress, + ), + sender: { + address: testConstants.defaultAddress, + parentSubaccountNumber: 0, + }, + recipient: { + address: testConstants.defaultAddress, + parentSubaccountNumber: testConstants.defaultSubaccount2.subaccountNumber, + }, + size: transferToNonParent.size, + createdAt: transferToNonParent.createdAt, + createdAtHeight: transferToNonParent.createdAtHeight, + symbol: testConstants.defaultAsset.symbol, + type: TransferType.TRANSFER_OUT, + transactionHash: transferToNonParent.transactionHash, + }; + const expectedDepositResponse: ParentSubaccountTransferResponseObject = { + id: TransferTable.uuid( + depositToChildSA.eventId, + depositToChildSA.assetId, + depositToChildSA.senderSubaccountId, + depositToChildSA.recipientSubaccountId, + depositToChildSA.senderWalletAddress, + depositToChildSA.recipientWalletAddress, + ), + sender: { + address: testConstants.defaultWalletAddress, + }, + recipient: { + address: testConstants.defaultAddress, + parentSubaccountNumber: 0, + }, + size: depositToChildSA.size, + createdAt: depositToChildSA.createdAt, + createdAtHeight: depositToChildSA.createdAtHeight, + symbol: testConstants.defaultAsset.symbol, + type: TransferType.DEPOSIT, + transactionHash: depositToChildSA.transactionHash, + }; + const expectedWithdrawalResponse: ParentSubaccountTransferResponseObject = { + id: TransferTable.uuid( + withdrawFromChildSA.eventId, + withdrawFromChildSA.assetId, + withdrawFromChildSA.senderSubaccountId, + withdrawFromChildSA.recipientSubaccountId, + withdrawFromChildSA.senderWalletAddress, + withdrawFromChildSA.recipientWalletAddress, + ), + sender: { + address: testConstants.defaultAddress, + parentSubaccountNumber: 0, + }, + recipient: { + address: testConstants.defaultWalletAddress, + }, + size: withdrawFromChildSA.size, + createdAt: withdrawFromChildSA.createdAt, + createdAtHeight: withdrawFromChildSA.createdAtHeight, + symbol: testConstants.defaultAsset.symbol, + type: TransferType.WITHDRAWAL, + transactionHash: withdrawFromChildSA.transactionHash, + }; + + expect(response.body.transfers.length).toEqual(4); + expect(response.body.transfers).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + ...expectedTransferResponse1, + }), + expect.objectContaining({ + ...expectedTransferResponse2, + }), + expect.objectContaining({ + ...expectedDepositResponse, + }), + expect.objectContaining({ + ...expectedWithdrawalResponse, + }), + ]), + ); + }); + + it('Get /transfers/parentSubaccountNumber returns empty when there are no transfers', async () => { + await testMocks.seedData(); + + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/transfers/parentSubaccountNumber?address=${testConstants.defaultAddress}` + + `&parentSubaccountNumber=${testConstants.defaultSubaccount.subaccountNumber}`, + }); + + expect(response.body.transfers).toHaveLength(0); + }); + + it('Get /transfers/parentSubaccountNumber with non-existent address and subaccount number returns 404', async () => { + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: '/v4/transfers/parentSubaccountNumber?address=invalidaddress&parentSubaccountNumber=100', + expectedStatus: 404, + }); + + expect(response.body).toEqual({ + errors: [ + { + msg: 'No subaccount found with address invalidaddress and parentSubaccountNumber 100', + }, + ], + }); + }); + + it('Get /transfers/parentSubaccountNumber with invalid parentSubaccountNumber', async () => { + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/transfers/parentSubaccountNumber?address=${testConstants.defaultAddress}` + + '&parentSubaccountNumber=128', + expectedStatus: 400, + }); + + expect(response.body).toEqual({ + errors: [ + { + location: 'query', + msg: 'parentSubaccountNumber must be a non-negative integer less than 128', + param: 'parentSubaccountNumber', + value: '128', + }, + ], + }); + }); + }); + + describe('GET /transfers/between', () => { + beforeEach(async () => { + await testMocks.seedData(); + }); + + afterEach(async () => { + await dbHelpers.clearData(); + }); + + const firstTransfer: TransferCreateObject = testConstants.defaultTransfer; + const secondTransfer: TransferCreateObject = { + ...testConstants.defaultTransfer, + size: '5', + createdAt: testConstants.createdDateTime.plus({ minutes: 1 }).toISO(), + createdAtHeight: testConstants.createdHeight + 1, + eventId: testConstants.defaultTendermintEventId2, + }; + + const firstTransferResponse: TransferResponseObject = { + id: TransferTable.uuid( + firstTransfer.eventId, + firstTransfer.assetId, + firstTransfer.senderSubaccountId, + firstTransfer.recipientSubaccountId, + ), + sender: { + address: testConstants.defaultSubaccount.address, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }, + recipient: { + address: testConstants.defaultSubaccount2.address, + subaccountNumber: testConstants.defaultSubaccount2.subaccountNumber, + }, + size: firstTransfer.size, + createdAt: firstTransfer.createdAt, + createdAtHeight: firstTransfer.createdAtHeight, + symbol: 'USDC', + type: TransferType.TRANSFER_OUT, + transactionHash: firstTransfer.transactionHash, + }; + const secondTransferResponse: TransferResponseObject = { + ...firstTransferResponse, + id: TransferTable.uuid( + secondTransfer.eventId, + secondTransfer.assetId, + secondTransfer.senderSubaccountId, + secondTransfer.recipientSubaccountId, + ), + size: secondTransfer.size, + createdAt: secondTransfer.createdAt, + createdAtHeight: secondTransfer.createdAtHeight, + }; + + async function getTransferBetweenResponse( + createdBeforeOrAtHeight?: number, + createdBeforeOrAt?: IsoString, + ): Promise { + const queryParams: TransferBetweenRequest = { + sourceAddress: testConstants.defaultSubaccount.address, + sourceSubaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + recipientAddress: testConstants.defaultSubaccount2.address, + recipientSubaccountNumber: testConstants.defaultSubaccount2.subaccountNumber, + }; + + if (createdBeforeOrAtHeight) { + queryParams.createdBeforeOrAtHeight = createdBeforeOrAtHeight; + } + + if (createdBeforeOrAt) { + queryParams.createdBeforeOrAt = createdBeforeOrAt; + } + + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/transfers/between?${getQueryString(queryParams as any)}`, + }); + + return response.body; + } + + it('Returns successfully when there are no transfers between wallets', async () => { + await dbHelpers.clearData(); + + const transferBetweenResponse: TransferBetweenResponse = await getTransferBetweenResponse(); + expect(transferBetweenResponse.transfersSubset).toHaveLength(0); + expect(transferBetweenResponse.totalNetTransfers).toEqual('0'); + }); + + it('Returns successfully when source subaccount does not exist', async () => { + await SubaccountTable.deleteById(testConstants.defaultSubaccountId); + + const transferBetweenResponse: TransferBetweenResponse = await getTransferBetweenResponse(); + expect(transferBetweenResponse.transfersSubset).toHaveLength(0); + expect(transferBetweenResponse.totalNetTransfers).toEqual('0'); + }); + + it('Returns successfully when recipient subaccount does not exist', async () => { + await SubaccountTable.deleteById(testConstants.defaultSubaccountId2); + + const transferBetweenResponse: TransferBetweenResponse = await getTransferBetweenResponse(); + expect(transferBetweenResponse.transfersSubset).toHaveLength(0); + expect(transferBetweenResponse.totalNetTransfers).toEqual('0'); + + }); + + it('Returns successfully with transfers and net transfers', async () => { + await Promise.all([ + TransferTable.create(firstTransfer), + TransferTable.create(secondTransfer), + ]); + + const transferBetweenResponse: TransferBetweenResponse = await getTransferBetweenResponse(); + expect(transferBetweenResponse.transfersSubset).toHaveLength(2); + expect(transferBetweenResponse.transfersSubset).toEqual([ + secondTransferResponse, + firstTransferResponse, + ]); + expect(transferBetweenResponse.totalNetTransfers).toEqual( + Big(firstTransfer.size).plus(secondTransfer.size).toFixed(), + ); + }); + + it('Successfully filters by createdBeforeOrAtHeight', async () => { + await Promise.all([ + TransferTable.create(firstTransfer), + TransferTable.create(secondTransfer), + ]); + + const transferBetweenResponse: TransferBetweenResponse = await getTransferBetweenResponse( + +firstTransfer.createdAtHeight, + ); + expect(transferBetweenResponse.transfersSubset).toHaveLength(1); + expect(transferBetweenResponse.transfersSubset).toEqual([ + firstTransferResponse, + ]); + expect(transferBetweenResponse.totalNetTransfers).toEqual( + Big(firstTransfer.size).plus(secondTransfer.size).toFixed(), + ); + }); + + it('Successfully filters by createdBeforeOrAt', async () => { + await Promise.all([ + TransferTable.create(firstTransfer), + TransferTable.create(secondTransfer), + ]); + + const transferBetweenResponse: TransferBetweenResponse = await getTransferBetweenResponse( + undefined, + firstTransfer.createdAt, + ); + expect(transferBetweenResponse.transfersSubset).toHaveLength(1); + expect(transferBetweenResponse.transfersSubset).toEqual([ + firstTransferResponse, + ]); + expect(transferBetweenResponse.totalNetTransfers).toEqual( + Big(firstTransfer.size).plus(secondTransfer.size).toFixed(), + ); + }); }); }); diff --git a/indexer/services/comlink/__tests__/controllers/api/v4/vault-controller.test.ts b/indexer/services/comlink/__tests__/controllers/api/v4/vault-controller.test.ts new file mode 100644 index 00000000000..03a453daff3 --- /dev/null +++ b/indexer/services/comlink/__tests__/controllers/api/v4/vault-controller.test.ts @@ -0,0 +1,662 @@ +import { + dbHelpers, + testConstants, + testMocks, + PnlTicksTable, + perpetualMarketRefresher, + BlockTable, + liquidityTierRefresher, + SubaccountTable, + PositionSide, + PerpetualPositionTable, + AssetPositionTable, + FundingIndexUpdatesTable, + PnlTicksFromDatabase, + VaultTable, + MEGAVAULT_MODULE_ADDRESS, + MEGAVAULT_SUBACCOUNT_ID, + TransferTable, + VaultPnlTicksView, +} from '@dydxprotocol-indexer/postgres'; +import { RequestMethod, VaultHistoricalPnl } from '../../../../src/types'; +import request from 'supertest'; +import { getFixedRepresentation, sendRequest } from '../../../helpers/helpers'; +import { DateTime, Settings } from 'luxon'; +import Big from 'big.js'; +import config from '../../../../src/config'; +import { clearVaultStartPnl, startVaultStartPnlCache } from '../../../../src/caches/vault-start-pnl'; + +describe('vault-controller#V4', () => { + const latestBlockHeight: string = '25'; + const currentHourBlockHeight: string = '10'; + const currentDayBlockHeight: string = '9'; + const twoHourBlockHeight: string = '7'; + const almostTwoDayBlockHeight: string = '5'; + const twoDayBlockHeight: string = '3'; + const currentDay: DateTime = DateTime.utc().startOf('day').minus({ hour: 5 }); + const currentHour: DateTime = currentDay.plus({ hour: 1 }); + const latestTime: DateTime = currentDay.plus({ minute: 90 }); + const twoHoursAgo: DateTime = currentDay.minus({ hour: 2 }); + const twoDaysAgo: DateTime = currentDay.minus({ day: 2 }); + const almostTwoDaysAgo: DateTime = currentDay.minus({ hour: 47 }); + const initialFundingIndex: string = '10000'; + const vault1Equity: number = 159500; + const vault2Equity: number = 10000; + const mainVaultEquity: number = 10000; + const vaultPnlHistoryHoursPrev: number = config.VAULT_PNL_HISTORY_HOURS; + const vaultPnlLastPnlWindowPrev: number = config.VAULT_LATEST_PNL_TICK_WINDOW_HOURS; + const vaultPnlStartDatePrev: string = config.VAULT_PNL_START_DATE; + + beforeAll(async () => { + await dbHelpers.migrate(); + }); + + afterAll(async () => { + await dbHelpers.teardown(); + }); + + describe('GET /v1', () => { + beforeEach(async () => { + // Get a week of data for hourly pnl ticks. + config.VAULT_PNL_HISTORY_HOURS = 168; + // Use last 48 hours to get latest pnl tick for tests. + config.VAULT_LATEST_PNL_TICK_WINDOW_HOURS = 48; + // Use a time before all pnl ticks as the pnl start date. + config.VAULT_PNL_START_DATE = '2020-01-01T00:00:00Z'; + await testMocks.seedData(); + await perpetualMarketRefresher.updatePerpetualMarkets(); + await liquidityTierRefresher.updateLiquidityTiers(); + await Promise.all([ + BlockTable.create({ + ...testConstants.defaultBlock, + time: twoDaysAgo.toISO(), + blockHeight: twoDayBlockHeight, + }), + BlockTable.create({ + ...testConstants.defaultBlock, + time: twoHoursAgo.toISO(), + blockHeight: twoHourBlockHeight, + }), + BlockTable.create({ + ...testConstants.defaultBlock, + time: currentDay.toISO(), + blockHeight: currentDayBlockHeight, + }), + BlockTable.create({ + ...testConstants.defaultBlock, + time: latestTime.toISO(), + blockHeight: latestBlockHeight, + }), + BlockTable.create({ + ...testConstants.defaultBlock, + time: almostTwoDaysAgo.toISO(), + blockHeight: almostTwoDayBlockHeight, + }), + BlockTable.create({ + ...testConstants.defaultBlock, + time: currentHour.toISO(), + blockHeight: currentHourBlockHeight, + }), + ]); + await SubaccountTable.create(testConstants.vaultSubaccount); + await SubaccountTable.create({ + address: MEGAVAULT_MODULE_ADDRESS, + subaccountNumber: 0, + updatedAt: latestTime.toISO(), + updatedAtHeight: latestBlockHeight, + }); + await Promise.all([ + PerpetualPositionTable.create( + testConstants.defaultPerpetualPosition, + ), + AssetPositionTable.upsert(testConstants.defaultAssetPosition), + AssetPositionTable.upsert({ + ...testConstants.defaultAssetPosition, + subaccountId: testConstants.vaultSubaccountId, + }), + FundingIndexUpdatesTable.create({ + ...testConstants.defaultFundingIndexUpdate, + fundingIndex: initialFundingIndex, + effectiveAtHeight: testConstants.createdHeight, + }), + FundingIndexUpdatesTable.create({ + ...testConstants.defaultFundingIndexUpdate, + eventId: testConstants.defaultTendermintEventId2, + effectiveAtHeight: twoDayBlockHeight, + }), + ]); + Settings.now = () => latestTime.valueOf(); + }); + + afterEach(async () => { + await dbHelpers.clearData(); + await VaultPnlTicksView.refreshDailyView(); + await VaultPnlTicksView.refreshHourlyView(); + clearVaultStartPnl(); + config.VAULT_PNL_HISTORY_HOURS = vaultPnlHistoryHoursPrev; + config.VAULT_LATEST_PNL_TICK_WINDOW_HOURS = vaultPnlLastPnlWindowPrev; + config.VAULT_PNL_START_DATE = vaultPnlStartDatePrev; + Settings.now = () => new Date().valueOf(); + }); + + it.each([ + ['no resolution', '', [1, 2], 4, undefined], + ['daily resolution', '?resolution=day', [1, 2], 4, undefined], + ['hourly resolution', '?resolution=hour', [1, 2, 3, 4], 4, undefined], + ['start date adjust PnL', '?resolution=hour', [1, 2, 3, 4], 4, twoDaysAgo.toISO()], + ])('Get /megavault/historicalPnl with single vault subaccount (%s)', async ( + _name: string, + queryParam: string, + expectedTicksIndex: number[], + finalTickIndex: number, + startDate: string | undefined, + ) => { + if (startDate !== undefined) { + config.VAULT_PNL_START_DATE = startDate; + } + await VaultTable.create({ + ...testConstants.defaultVault, + address: testConstants.defaultSubaccount.address, + clobPairId: testConstants.defaultPerpetualMarket.clobPairId, + }); + const createdPnlTicks: PnlTicksFromDatabase[] = await createPnlTicks(); + // Adjust PnL by total pnl of start date + if (startDate !== undefined) { + for (const createdPnlTick of createdPnlTicks) { + createdPnlTick.totalPnl = Big(createdPnlTick.totalPnl).sub('10000').toFixed(); + } + } + const finalTick: PnlTicksFromDatabase = { + ...createdPnlTicks[finalTickIndex], + equity: Big(vault1Equity).toFixed(), + blockHeight: latestBlockHeight, + blockTime: latestTime.toISO(), + createdAt: latestTime.toISO(), + }; + + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/vault/v1/megavault/historicalPnl${queryParam}`, + }); + + expect(response.body.megavaultPnl).toHaveLength(expectedTicksIndex.length + 1); + expect(response.body.megavaultPnl).toEqual( + expect.arrayContaining( + expectedTicksIndex.map((index: number) => { + return expect.objectContaining(createdPnlTicks[index]); + }).concat([finalTick]), + ), + ); + }); + + it.each([ + ['no resolution', '', [1, 2], [undefined, 7], [11, 12]], + ['daily resolution', '?resolution=day', [1, 2], [undefined, 7], [11, 12]], + [ + 'hourly resolution', + '?resolution=hour', + [1, 2, 3, 4], + [undefined, 7, 8, 9], + [11, 12, 13, 14], + ], + ])('Get /megavault/historicalPnl with 2 vault subaccounts and main subaccount (%s), ' + + 'excludes tick with missing vault ticks', async ( + _name: string, + queryParam: string, + expectedTicksIndex1: (number | undefined)[], + expectedTicksIndex2: (number | undefined)[], + expectedTicksIndexMain: (number | undefined)[], + ) => { + const expectedTicksArray: (number | undefined)[][] = [ + expectedTicksIndex1, + expectedTicksIndex2, + expectedTicksIndexMain, + ]; + await Promise.all([ + VaultTable.create({ + ...testConstants.defaultVault, + address: testConstants.defaultAddress, + clobPairId: testConstants.defaultPerpetualMarket.clobPairId, + createdAt: twoDaysAgo.toISO(), + }), + // Single tick for this vault will be excluded from result. + VaultTable.create({ + ...testConstants.defaultVault, + address: testConstants.vaultAddress, + clobPairId: testConstants.defaultPerpetualMarket2.clobPairId, + createdAt: almostTwoDaysAgo.toISO(), + }), + AssetPositionTable.upsert({ + ...testConstants.defaultAssetPosition, + subaccountId: MEGAVAULT_SUBACCOUNT_ID, + }), + TransferTable.create({ + ...testConstants.defaultTransfer, + recipientSubaccountId: MEGAVAULT_SUBACCOUNT_ID, + createdAt: twoDaysAgo.toISO(), + }), + ]); + + const createdPnlTicks: PnlTicksFromDatabase[] = await createPnlTicks( + true, // createMainSubaccounPnlTicks + ); + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/vault/v1/megavault/historicalPnl${queryParam}`, + }); + + const expectedPnlTickBase: any = { + equity: (parseFloat(testConstants.defaultPnlTick.equity) * 3).toString(), + // total pnl should be fetched from latest hourly pnl tick. + totalPnl: (parseFloat(testConstants.defaultPnlTick.totalPnl) * 4).toString(), + netTransfers: (parseFloat(testConstants.defaultPnlTick.netTransfers) * 3).toString(), + }; + const finalTick: PnlTicksFromDatabase = { + ...expectedPnlTickBase, + equity: Big(vault1Equity).add(vault2Equity).add(mainVaultEquity).toFixed(), + blockHeight: latestBlockHeight, + blockTime: latestTime.toISO(), + createdAt: latestTime.toISO(), + }; + + expect(response.body.megavaultPnl).toHaveLength(expectedTicksIndex1.length + 1); + expect(response.body.megavaultPnl).toEqual( + expect.arrayContaining( + expectedTicksIndex1.map((_: number | undefined, pos: number) => { + const pnlTickBase: any = { + equity: '0', + totalPnl: '0', + netTransfers: '0', + }; + let expectedTick: PnlTicksFromDatabase; + for (const expectedTicks of expectedTicksArray) { + if (expectedTicks[pos] !== undefined) { + expectedTick = createdPnlTicks[expectedTicks[pos]!]; + pnlTickBase.equity = Big(pnlTickBase.equity).add(expectedTick.equity).toFixed(); + pnlTickBase.totalPnl = Big(pnlTickBase.totalPnl) + .add(expectedTick.totalPnl) + .toFixed(); + pnlTickBase.netTransfers = Big(pnlTickBase.netTransfers) + .add(expectedTick.netTransfers) + .toFixed(); + } + } + return expect.objectContaining({ + ...pnlTickBase, + createdAt: expectedTick!.createdAt, + blockHeight: expectedTick!.blockHeight, + blockTime: expectedTick!.blockTime, + }); + }).concat([expect.objectContaining(finalTick)]), + ), + ); + }); + + it('Get /vaults/historicalPnl with no vault subaccounts', async () => { + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: '/v4/vault/v1/vaults/historicalPnl', + }); + + expect(response.body.vaultsPnl).toEqual([]); + }); + + it.each([ + ['no resolution', '', [1, 2], 4], + ['daily resolution', '?resolution=day', [1, 2], 4], + ['hourly resolution', '?resolution=hour', [1, 2, 3, 4], 4], + ])('Get /vaults/historicalPnl with single vault subaccount (%s)', async ( + _name: string, + queryParam: string, + expectedTicksIndex: number[], + currentTickIndex: number, + ) => { + await VaultTable.create({ + ...testConstants.defaultVault, + address: testConstants.defaultAddress, + clobPairId: testConstants.defaultPerpetualMarket.clobPairId, + }); + const createdPnlTicks: PnlTicksFromDatabase[] = await createPnlTicks(); + const finalTick: PnlTicksFromDatabase = { + ...createdPnlTicks[currentTickIndex], + equity: Big(vault1Equity).toFixed(), + blockHeight: latestBlockHeight, + blockTime: latestTime.toISO(), + createdAt: latestTime.toISO(), + }; + + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/vault/v1/vaults/historicalPnl${queryParam}`, + }); + + expect(response.body.vaultsPnl).toHaveLength(1); + expect(response.body.vaultsPnl[0].historicalPnl).toHaveLength(expectedTicksIndex.length + 1); + expect(response.body.vaultsPnl[0]).toEqual({ + ticker: testConstants.defaultPerpetualMarket.ticker, + historicalPnl: expect.arrayContaining( + expectedTicksIndex.map((index: number) => { + return expect.objectContaining(createdPnlTicks[index]); + }).concat(finalTick), + ), + }); + }); + + it.each([ + ['no resolution', '', [1, 2], [6, 7], 4, 9], + ['daily resolution', '?resolution=day', [1, 2], [6, 7], 4, 9], + ['hourly resolution', '?resolution=hour', [1, 2, 3, 4], [6, 7, 8, 9], 4, 9], + ])('Get /vaults/historicalPnl with 2 vault subaccounts (%s)', async ( + _name: string, + queryParam: string, + expectedTicksIndex1: number[], + expectedTicksIndex2: number[], + currentTickIndex1: number, + currentTickIndex2: number, + ) => { + await Promise.all([ + VaultTable.create({ + ...testConstants.defaultVault, + address: testConstants.defaultAddress, + clobPairId: testConstants.defaultPerpetualMarket.clobPairId, + }), + VaultTable.create({ + ...testConstants.defaultVault, + address: testConstants.vaultAddress, + clobPairId: testConstants.defaultPerpetualMarket2.clobPairId, + }), + ]); + const createdPnlTicks: PnlTicksFromDatabase[] = await createPnlTicks(); + const finalTick1: PnlTicksFromDatabase = { + ...createdPnlTicks[currentTickIndex1], + equity: Big(vault1Equity).toFixed(), + blockHeight: latestBlockHeight, + blockTime: latestTime.toISO(), + createdAt: latestTime.toISO(), + }; + const finalTick2: PnlTicksFromDatabase = { + ...createdPnlTicks[currentTickIndex2], + equity: Big(vault2Equity).toFixed(), + blockHeight: latestBlockHeight, + blockTime: latestTime.toISO(), + createdAt: latestTime.toISO(), + }; + + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: `/v4/vault/v1/vaults/historicalPnl${queryParam}`, + }); + + const expectedVaultPnl: VaultHistoricalPnl = { + ticker: testConstants.defaultPerpetualMarket.ticker, + historicalPnl: expectedTicksIndex1.map((index: number) => { + return createdPnlTicks[index]; + }).concat(finalTick1), + }; + + const expectedVaultPnl2: VaultHistoricalPnl = { + ticker: testConstants.defaultPerpetualMarket2.ticker, + historicalPnl: expectedTicksIndex2.map((index: number) => { + return createdPnlTicks[index]; + }).concat(finalTick2), + }; + + expect(response.body.vaultsPnl).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + ...expectedVaultPnl, + }), + expect.objectContaining({ + ...expectedVaultPnl2, + }), + ]), + ); + }); + + it('Get /megavault/positions with no vault subaccount', async () => { + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: '/v4/vault/v1/megavault/positions', + }); + + expect(response.body).toEqual({ + positions: [], + }); + }); + + it('Get /megavault/positions with 1 vault subaccount', async () => { + await VaultTable.create({ + ...testConstants.defaultVault, + address: testConstants.defaultAddress, + clobPairId: testConstants.defaultPerpetualMarket.clobPairId, + }); + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: '/v4/vault/v1/megavault/positions', + }); + + expect(response.body).toEqual({ + positions: [ + { + equity: getFixedRepresentation(159500), + perpetualPosition: { + market: testConstants.defaultPerpetualMarket.ticker, + size: testConstants.defaultPerpetualPosition.size, + side: testConstants.defaultPerpetualPosition.side, + entryPrice: getFixedRepresentation( + testConstants.defaultPerpetualPosition.entryPrice!, + ), + maxSize: testConstants.defaultPerpetualPosition.maxSize, + // 200000 + 10*(10000-10050)=199500 + netFunding: getFixedRepresentation('199500'), + // sumClose=0, so realized Pnl is the same as the net funding of the position. + // Unsettled funding is funding payments that already "happened" but not reflected + // in the subaccount's balance yet, so it's considered a part of realizedPnl. + realizedPnl: getFixedRepresentation('199500'), + // size * (index-entry) = 10*(15000-20000) = -50000 + unrealizedPnl: getFixedRepresentation(-50000), + status: testConstants.defaultPerpetualPosition.status, + sumOpen: testConstants.defaultPerpetualPosition.sumOpen, + sumClose: testConstants.defaultPerpetualPosition.sumClose, + createdAt: testConstants.defaultPerpetualPosition.createdAt, + createdAtHeight: testConstants.defaultPerpetualPosition.createdAtHeight, + exitPrice: null, + closedAt: null, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }, + assetPosition: { + symbol: testConstants.defaultAsset.symbol, + size: '9500', + side: PositionSide.LONG, + assetId: testConstants.defaultAssetPosition.assetId, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }, + ticker: testConstants.defaultPerpetualMarket.ticker, + }, + ], + }); + }); + + it('Get /megavault/positions with 2 vault subaccount, 1 with no perpetual, 1 invalid', async () => { + await Promise.all([ + VaultTable.create({ + ...testConstants.defaultVault, + address: testConstants.defaultAddress, + clobPairId: testConstants.defaultPerpetualMarket.clobPairId, + }), + VaultTable.create({ + ...testConstants.defaultVault, + address: testConstants.vaultAddress, + clobPairId: testConstants.defaultPerpetualMarket2.clobPairId, + }), + VaultTable.create({ + ...testConstants.defaultVault, + address: 'invalid', + clobPairId: '999', + }), + ]); + const response: request.Response = await sendRequest({ + type: RequestMethod.GET, + path: '/v4/vault/v1/megavault/positions', + }); + + expect(response.body).toEqual({ + positions: [ + // Same position as test with a single vault subaccount. + { + equity: getFixedRepresentation(159500), + perpetualPosition: { + market: testConstants.defaultPerpetualMarket.ticker, + size: testConstants.defaultPerpetualPosition.size, + side: testConstants.defaultPerpetualPosition.side, + entryPrice: getFixedRepresentation( + testConstants.defaultPerpetualPosition.entryPrice!, + ), + maxSize: testConstants.defaultPerpetualPosition.maxSize, + netFunding: getFixedRepresentation('199500'), + realizedPnl: getFixedRepresentation('199500'), + unrealizedPnl: getFixedRepresentation(-50000), + status: testConstants.defaultPerpetualPosition.status, + sumOpen: testConstants.defaultPerpetualPosition.sumOpen, + sumClose: testConstants.defaultPerpetualPosition.sumClose, + createdAt: testConstants.defaultPerpetualPosition.createdAt, + createdAtHeight: testConstants.defaultPerpetualPosition.createdAtHeight, + exitPrice: null, + closedAt: null, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }, + assetPosition: { + symbol: testConstants.defaultAsset.symbol, + size: '9500', + side: PositionSide.LONG, + assetId: testConstants.defaultAssetPosition.assetId, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }, + ticker: testConstants.defaultPerpetualMarket.ticker, + }, + { + equity: getFixedRepresentation(10000), + perpetualPosition: undefined, + assetPosition: { + symbol: testConstants.defaultAsset.symbol, + size: testConstants.defaultAssetPosition.size, + side: PositionSide.LONG, + assetId: testConstants.defaultAssetPosition.assetId, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }, + ticker: testConstants.defaultPerpetualMarket2.ticker, + }, + ], + }); + }); + }); + + async function createPnlTicks( + createMainSubaccountPnlTicks: boolean = false, + ): Promise { + const createdTicks: PnlTicksFromDatabase[] = await Promise.all([ + PnlTicksTable.create(testConstants.defaultPnlTick), + PnlTicksTable.create({ + ...testConstants.defaultPnlTick, + blockTime: twoDaysAgo.toISO(), + createdAt: twoDaysAgo.toISO(), + blockHeight: twoDayBlockHeight, + }), + PnlTicksTable.create({ + ...testConstants.defaultPnlTick, + blockTime: twoHoursAgo.toISO(), + createdAt: twoHoursAgo.toISO(), + blockHeight: twoHourBlockHeight, + }), + PnlTicksTable.create({ + ...testConstants.defaultPnlTick, + blockTime: currentDay.toISO(), + createdAt: currentDay.toISO(), + blockHeight: currentDayBlockHeight, + }), + PnlTicksTable.create({ + ...testConstants.defaultPnlTick, + totalPnl: (2 * parseFloat(testConstants.defaultPnlTick.totalPnl)).toString(), + blockTime: currentHour.toISO(), + createdAt: currentHour.toISO(), + blockHeight: currentHourBlockHeight, + }), + PnlTicksTable.create({ + ...testConstants.defaultPnlTick, + subaccountId: testConstants.vaultSubaccountId, + }), + // Invalid pnl tick to be excluded as only a single pnl tick but 2 pnl ticks should exist. + PnlTicksTable.create({ + ...testConstants.defaultPnlTick, + subaccountId: testConstants.vaultSubaccountId, + blockTime: almostTwoDaysAgo.toISO(), + createdAt: almostTwoDaysAgo.toISO(), + blockHeight: almostTwoDayBlockHeight, + }), + PnlTicksTable.create({ + ...testConstants.defaultPnlTick, + subaccountId: testConstants.vaultSubaccountId, + blockTime: twoHoursAgo.toISO(), + createdAt: twoHoursAgo.toISO(), + blockHeight: twoHourBlockHeight, + }), + PnlTicksTable.create({ + ...testConstants.defaultPnlTick, + subaccountId: testConstants.vaultSubaccountId, + blockTime: currentDay.toISO(), + createdAt: currentDay.toISO(), + blockHeight: currentDayBlockHeight, + }), + PnlTicksTable.create({ + ...testConstants.defaultPnlTick, + subaccountId: testConstants.vaultSubaccountId, + blockTime: currentHour.toISO(), + createdAt: currentHour.toISO(), + blockHeight: currentHourBlockHeight, + }), + ]); + + if (createMainSubaccountPnlTicks) { + const mainSubaccountTicks: PnlTicksFromDatabase[] = await Promise.all([ + PnlTicksTable.create({ + ...testConstants.defaultPnlTick, + subaccountId: MEGAVAULT_SUBACCOUNT_ID, + }), + PnlTicksTable.create({ + ...testConstants.defaultPnlTick, + subaccountId: MEGAVAULT_SUBACCOUNT_ID, + blockTime: twoDaysAgo.toISO(), + createdAt: twoDaysAgo.toISO(), + blockHeight: twoDayBlockHeight, + }), + PnlTicksTable.create({ + ...testConstants.defaultPnlTick, + subaccountId: MEGAVAULT_SUBACCOUNT_ID, + blockTime: twoHoursAgo.toISO(), + createdAt: twoHoursAgo.toISO(), + blockHeight: twoHourBlockHeight, + }), + PnlTicksTable.create({ + ...testConstants.defaultPnlTick, + subaccountId: MEGAVAULT_SUBACCOUNT_ID, + blockTime: currentDay.toISO(), + createdAt: currentDay.toISO(), + blockHeight: currentDayBlockHeight, + }), + PnlTicksTable.create({ + ...testConstants.defaultPnlTick, + subaccountId: MEGAVAULT_SUBACCOUNT_ID, + blockTime: currentHour.toISO(), + createdAt: currentHour.toISO(), + blockHeight: currentHourBlockHeight, + }), + ]); + createdTicks.push(...mainSubaccountTicks); + } + await VaultPnlTicksView.refreshDailyView(); + await VaultPnlTicksView.refreshHourlyView(); + await startVaultStartPnlCache(); + + return createdTicks; + } +}); diff --git a/indexer/services/comlink/__tests__/helpers/helpers.ts b/indexer/services/comlink/__tests__/helpers/helpers.ts index d5fe46f5f66..070541f392d 100644 --- a/indexer/services/comlink/__tests__/helpers/helpers.ts +++ b/indexer/services/comlink/__tests__/helpers/helpers.ts @@ -15,7 +15,7 @@ import request from 'supertest'; import IndexV4 from '../../src/controllers/api/index-v4'; import Server from '../../src/request-helpers/server'; -import { RequestMethod } from '../../src/types'; +import { RequestMethod, FillResponseObject, MarketType } from '../../src/types'; const app: e.Express = Server(IndexV4); @@ -145,6 +145,27 @@ export async function createMakerTakerOrderAndFill( return { makerFill, takerFill }; } +export function fillResponseObjectFromFillCreateObject( + fill: FillCreateObject, + subaccountNumber: number, +): Partial { + const fillResponse: Partial = { + side: fill.side, + liquidity: fill.liquidity, + marketType: MarketType.PERPETUAL, + price: fill.price, + size: fill.size, + fee: fill.fee, + affiliateRevShare: fill.affiliateRevShare, + type: fill.type, + orderId: fill.orderId, + createdAt: fill.createdAt, + createdAtHeight: fill.createdAtHeight, + subaccountNumber, + }; + return fillResponse; +} + function randomInt(range: number = 1000): number { return Math.floor(Math.random() * range); } diff --git a/indexer/services/comlink/__tests__/lib/compliance-and-geo-check.test.ts b/indexer/services/comlink/__tests__/lib/compliance-and-geo-check.test.ts index d154d32c9a7..fb2b0470ac2 100644 --- a/indexer/services/comlink/__tests__/lib/compliance-and-geo-check.test.ts +++ b/indexer/services/comlink/__tests__/lib/compliance-and-geo-check.test.ts @@ -18,6 +18,7 @@ import { INDEXER_COMPLIANCE_BLOCKED_PAYLOAD, INDEXER_GEOBLOCKED_PAYLOAD, isRestrictedCountryHeaders, + isWhitelistedAddress, } from '@dydxprotocol-indexer/compliance'; import config from '../../src/config'; @@ -69,6 +70,7 @@ export const complianceCheckApp = Server(router); describe('compliance-check', () => { let isRestrictedCountrySpy: jest.SpyInstance; + let isWhitelistedAddressSpy: jest.SpyInstance; beforeAll(async () => { config.INDEXER_LEVEL_GEOBLOCKING_ENABLED = true; @@ -77,6 +79,8 @@ describe('compliance-check', () => { beforeEach(async () => { isRestrictedCountrySpy = isRestrictedCountryHeaders as unknown as jest.Mock; + isWhitelistedAddressSpy = isWhitelistedAddress as jest.Mock; + isWhitelistedAddressSpy.mockReturnValue(false); await testMocks.seedData(); }); @@ -86,6 +90,7 @@ describe('compliance-check', () => { afterEach(async () => { jest.restoreAllMocks(); + config.WHITELISTED_ADDRESSES = ''; await dbHelpers.clearData(); }); @@ -189,6 +194,26 @@ describe('compliance-check', () => { }); }); + it.each([ + ['query', `/v4/check-compliance-query?address=${testConstants.defaultAddress}`], + ['param', `/v4/check-compliance-param/${testConstants.defaultAddress}`], + ])('does not return 403 if address in request is in FIRST_STRIKE_CLOSE_ONLY and from restricted country (%s)', async ( + _name: string, + path: string, + ) => { + isRestrictedCountrySpy.mockReturnValueOnce(true); + await ComplianceStatusTable.create({ + ...testConstants.compliantStatusData, + status: ComplianceStatus.FIRST_STRIKE_CLOSE_ONLY, + }); + await sendRequestToApp({ + type: RequestMethod.GET, + path, + expressApp: complianceCheckApp, + expectedStatus: 200, + }); + }); + it.each([ ['query', `/v4/check-compliance-query?address=${testConstants.defaultAddress}`], ['param', `/v4/check-compliance-param/${testConstants.defaultAddress}`], @@ -213,6 +238,23 @@ describe('compliance-check', () => { })); }); + it.each([ + ['query', `/v4/check-compliance-query?address=${testConstants.defaultAddress}`], + ['param', `/v4/check-compliance-param/${testConstants.defaultAddress}`], + ])('does not return 403 if address is whitelisted and request is from restricted country (%s)', async ( + _name: string, + path: string, + ) => { + isWhitelistedAddressSpy.mockReturnValue(true); + isRestrictedCountrySpy.mockReturnValueOnce(true); + await sendRequestToApp({ + type: RequestMethod.GET, + path, + expressApp: complianceCheckApp, + expectedStatus: 200, + }); + }); + it.each([ ['query', `/v4/check-compliance-query?address=${testConstants.blockedAddress}`], ['param', `/v4/check-compliance-param/${testConstants.blockedAddress}`], diff --git a/indexer/services/comlink/__tests__/lib/helpers.test.ts b/indexer/services/comlink/__tests__/lib/helpers.test.ts index 032c2e3ceb0..03d169db350 100644 --- a/indexer/services/comlink/__tests__/lib/helpers.test.ts +++ b/indexer/services/comlink/__tests__/lib/helpers.test.ts @@ -28,6 +28,9 @@ import { LiquidityTiersFromDatabase, LiquidityTiersTable, liquidityTierRefresher, + PnlTicksFromDatabase, + PnlTicksTable, + AssetFromDatabase, } from '@dydxprotocol-indexer/postgres'; import { adjustUSDCAssetPosition, @@ -40,6 +43,9 @@ import { getTotalUnsettledFunding, getPerpetualPositionsWithUpdatedFunding, initializePerpetualPositionsWithFunding, + getChildSubaccountNums, + aggregateHourlyPnlTicks, + getSubaccountResponse, } from '../../src/lib/helpers'; import _ from 'lodash'; import Big from 'big.js'; @@ -52,8 +58,11 @@ import { defaultTendermintEventId2, defaultTendermintEventId3, } from '@dydxprotocol-indexer/postgres/build/__tests__/helpers/constants'; -import { AssetPositionsMap, PerpetualPositionWithFunding } from '../../src/types'; +import { + AggregatedPnlTick, AssetPositionsMap, PerpetualPositionWithFunding, SubaccountResponseObject, +} from '../../src/types'; import { ZERO, ZERO_USDC_POSITION } from '../../src/lib/constants'; +import { DateTime } from 'luxon'; describe('helpers', () => { afterEach(async () => { @@ -203,7 +212,7 @@ describe('helpers', () => { }); const filteredPerpetualPositions: PerpetualPositionFromDatabase[ - ] = await filterPositionsByLatestEventIdPerPerpetual( + ] = filterPositionsByLatestEventIdPerPerpetual( initializePerpetualPositionsWithFunding([ perpetualPosition, perpetualPosition2, @@ -307,7 +316,6 @@ describe('helpers', () => { latestBlock!, ); - expect(Object.keys(lastUpdatedFundingIndexMap)).toHaveLength(3); expect( lastUpdatedFundingIndexMap[testConstants.defaultFundingIndexUpdate.perpetualId] .toString(), @@ -320,7 +328,6 @@ describe('helpers', () => { lastUpdatedFundingIndexMap[testConstants.defaultPerpetualMarket3.id] .toString(), ).toEqual(ZERO.toString()); - expect(Object.keys(latestFundingIndexMap)).toHaveLength(3); expect(latestFundingIndexMap[fundingIndexUpdate3.perpetualId].toString()) .toEqual(fundingIndexUpdate3.fundingIndex); expect(latestFundingIndexMap[testConstants.defaultPerpetualMarket2.id].toString()) @@ -392,6 +399,7 @@ describe('helpers', () => { side: PositionSide.LONG, assetId: '0', size: '1', + subaccountNumber: 0, }, }; const unsettledFunding: Big = Big('300'); @@ -401,7 +409,7 @@ describe('helpers', () => { adjustedUSDCAssetPositionSize, }: { assetPositionsMap: AssetPositionsMap, - adjustedUSDCAssetPositionSize: string + adjustedUSDCAssetPositionSize: string, } = adjustUSDCAssetPosition(assetPositions, unsettledFunding); // Original asset positions object should be unchanged @@ -416,6 +424,7 @@ describe('helpers', () => { side: PositionSide.LONG, assetId: '0', size: '1', + subaccountNumber: 0, }, }); expect(assetPositionsMap).toEqual({ @@ -429,6 +438,7 @@ describe('helpers', () => { side: PositionSide.LONG, assetId: '0', size: '1', + subaccountNumber: 0, }, }); expect(adjustedUSDCAssetPositionSize).toEqual(expectedAdjustedPositionSize); @@ -458,6 +468,7 @@ describe('helpers', () => { side: PositionSide.LONG, assetId: '0', size: '1', + subaccountNumber: 0, }, }; @@ -466,7 +477,7 @@ describe('helpers', () => { adjustedUSDCAssetPositionSize, }: { assetPositionsMap: AssetPositionsMap, - adjustedUSDCAssetPositionSize: string + adjustedUSDCAssetPositionSize: string, } = adjustUSDCAssetPosition(assetPositions, Big(unsettledFunding)); // Original asset positions object should be unchanged @@ -481,6 +492,7 @@ describe('helpers', () => { side: PositionSide.LONG, assetId: '0', size: '1', + subaccountNumber: 0, }, }); expect(assetPositionsMap).toEqual({ @@ -494,6 +506,7 @@ describe('helpers', () => { side: PositionSide.LONG, assetId: '0', size: '1', + subaccountNumber: 0, }, }); expect(adjustedUSDCAssetPositionSize).toEqual(expectedAdjustedPositionSize); @@ -513,6 +526,7 @@ describe('helpers', () => { side: PositionSide.LONG, assetId: '0', size: '1', + subaccountNumber: 0, }, }; @@ -521,7 +535,7 @@ describe('helpers', () => { adjustedUSDCAssetPositionSize, }: { assetPositionsMap: AssetPositionsMap, - adjustedUSDCAssetPositionSize: string + adjustedUSDCAssetPositionSize: string, } = adjustUSDCAssetPosition(assetPositions, Big(funding)); // Original asset positions object should be unchanged @@ -531,6 +545,7 @@ describe('helpers', () => { side: PositionSide.LONG, assetId: '0', size: '1', + subaccountNumber: 0, }, }); expect(assetPositionsMap).toEqual({ @@ -544,6 +559,7 @@ describe('helpers', () => { side: PositionSide.LONG, assetId: '0', size: '1', + subaccountNumber: 0, }, }); expect(adjustedUSDCAssetPositionSize).toEqual(funding); @@ -569,6 +585,7 @@ describe('helpers', () => { side: PositionSide.LONG, assetId: '0', size: '1', + subaccountNumber: 0, }, }; @@ -577,7 +594,7 @@ describe('helpers', () => { adjustedUSDCAssetPositionSize, }: { assetPositionsMap: AssetPositionsMap, - adjustedUSDCAssetPositionSize: string + adjustedUSDCAssetPositionSize: string, } = adjustUSDCAssetPosition(assetPositions, Big(unsettledFunding)); // Original asset positions object should be unchanged @@ -592,6 +609,7 @@ describe('helpers', () => { side: PositionSide.LONG, assetId: '0', size: '1', + subaccountNumber: 0, }, }); expect(assetPositionsMap).toEqual({ @@ -600,6 +618,7 @@ describe('helpers', () => { side: PositionSide.LONG, assetId: '0', size: '1', + subaccountNumber: 0, }, }); expect(adjustedUSDCAssetPositionSize).toEqual(ZERO.toString()); @@ -687,4 +706,252 @@ describe('helpers', () => { .toEqual('0'); }); }); + + describe('getChildSubaccountNums', () => { + it('Gets a list of all possible child subaccount numbers for a parent subaccount 0', () => { + const childSubaccounts = getChildSubaccountNums(0); + expect(childSubaccounts.length).toEqual(1000); + expect(childSubaccounts[0]).toEqual(0); + expect(childSubaccounts[1]).toEqual(128); + expect(childSubaccounts[999]).toEqual(128 * 999); + }); + it('Gets a list of all possible child subaccount numbers for a parent subaccount 127', () => { + const childSubaccounts = getChildSubaccountNums(127); + expect(childSubaccounts.length).toEqual(1000); + expect(childSubaccounts[0]).toEqual(127); + expect(childSubaccounts[1]).toEqual(128 + 127); + expect(childSubaccounts[999]).toEqual(128 * 999 + 127); + }); + }); + + describe('getChildSubaccountNums', () => { + it('Throws an error if the parent subaccount number is greater than or equal to the maximum parent subaccount number', () => { + expect(() => getChildSubaccountNums(128)).toThrowError('Parent subaccount number must be less than 128'); + }); + }); + + describe('getSubaccountResponse', () => { + it('gets subaccount response with adjusted perpetual positions', () => { + // Helper function does not care about ids. + const id: string = 'mock-id'; + const perpetualPositions: PerpetualPositionFromDatabase[] = [{ + ...testConstants.defaultPerpetualPosition, + id, + entryPrice: '20000', + sumOpen: '10', + sumClose: '0', + }]; + const assetPositions: AssetPositionFromDatabase[] = [{ + ...testConstants.defaultAssetPosition, + id, + }]; + const lastUpdatedFundingIndexMap: FundingIndexMap = { + 0: Big('10000'), + 1: Big('0'), + 2: Big('0'), + 3: Big('0'), + 4: Big('0'), + }; + const latestUpdatedFundingIndexMap: FundingIndexMap = { + 0: Big('10050'), + 1: Big('0'), + 2: Big('0'), + 3: Big('0'), + 4: Big('0'), + }; + const assets: AssetFromDatabase[] = [{ + ...testConstants.defaultAsset, + id: '0', + }]; + const markets: MarketFromDatabase[] = [ + testConstants.defaultMarket, + ]; + const subaccount: SubaccountFromDatabase = { + ...testConstants.defaultSubaccount, + id, + }; + const perpetualMarketsMap: PerpetualMarketsMap = { + 0: { + ...testConstants.defaultPerpetualMarket, + }, + }; + + const response: SubaccountResponseObject = getSubaccountResponse( + subaccount, + perpetualPositions, + assetPositions, + assets, + markets, + perpetualMarketsMap, + '3', + latestUpdatedFundingIndexMap, + lastUpdatedFundingIndexMap, + ); + + expect(response).toEqual({ + address: testConstants.defaultAddress, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + equity: getFixedRepresentation(159500), + freeCollateral: getFixedRepresentation(152000), + marginEnabled: true, + updatedAtHeight: testConstants.defaultSubaccount.updatedAtHeight, + latestProcessedBlockHeight: '3', + openPerpetualPositions: { + [testConstants.defaultPerpetualMarket.ticker]: { + market: testConstants.defaultPerpetualMarket.ticker, + size: testConstants.defaultPerpetualPosition.size, + side: testConstants.defaultPerpetualPosition.side, + entryPrice: getFixedRepresentation( + testConstants.defaultPerpetualPosition.entryPrice!, + ), + maxSize: testConstants.defaultPerpetualPosition.maxSize, + // 200000 + 10*(10000-10050)=199500 + netFunding: getFixedRepresentation('199500'), + // sumClose=0, so realized Pnl is the same as the net funding of the position. + // Unsettled funding is funding payments that already "happened" but not reflected + // in the subaccount's balance yet, so it's considered a part of realizedPnl. + realizedPnl: getFixedRepresentation('199500'), + // size * (index-entry) = 10*(15000-20000) = -50000 + unrealizedPnl: getFixedRepresentation(-50000), + status: testConstants.defaultPerpetualPosition.status, + sumOpen: testConstants.defaultPerpetualPosition.sumOpen, + sumClose: testConstants.defaultPerpetualPosition.sumClose, + createdAt: testConstants.defaultPerpetualPosition.createdAt, + createdAtHeight: testConstants.defaultPerpetualPosition.createdAtHeight, + exitPrice: undefined, + closedAt: undefined, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }, + }, + assetPositions: { + [testConstants.defaultAsset.symbol]: { + symbol: testConstants.defaultAsset.symbol, + size: '9500', + side: PositionSide.LONG, + assetId: testConstants.defaultAssetPosition.assetId, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, + }, + }, + }); + }); + }); + + describe('aggregateHourlyPnlTicks', () => { + it('aggregates single pnl tick', () => { + const pnlTick: PnlTicksFromDatabase = { + ...testConstants.defaultPnlTick, + id: PnlTicksTable.uuid( + testConstants.defaultPnlTick.subaccountId, + testConstants.defaultPnlTick.createdAt, + ), + }; + + const aggregatedPnlTicks: AggregatedPnlTick[] = aggregateHourlyPnlTicks([pnlTick]); + expect( + aggregatedPnlTicks, + ).toEqual( + [expect.objectContaining( + { + pnlTick: expect.objectContaining(testConstants.defaultPnlTick), + numTicks: 1, + }, + )], + ); + }); + + it('aggregates multiple pnl ticks same height and de-dupes ticks', () => { + const pnlTick: PnlTicksFromDatabase = { + ...testConstants.defaultPnlTick, + id: PnlTicksTable.uuid( + testConstants.defaultPnlTick.subaccountId, + testConstants.defaultPnlTick.createdAt, + ), + }; + const pnlTick2: PnlTicksFromDatabase = { + ...testConstants.defaultPnlTick, + subaccountId: testConstants.defaultSubaccountId2, + id: PnlTicksTable.uuid( + testConstants.defaultSubaccountId2, + testConstants.defaultPnlTick.createdAt, + ), + }; + const blockHeight2: string = '80'; + const blockTime2: string = DateTime.fromISO(pnlTick.createdAt).plus({ hour: 1 }).toISO(); + const pnlTick3: PnlTicksFromDatabase = { + ...testConstants.defaultPnlTick, + id: PnlTicksTable.uuid( + testConstants.defaultPnlTick.subaccountId, + blockTime2, + ), + blockHeight: blockHeight2, + blockTime: blockTime2, + createdAt: blockTime2, + }; + const blockHeight3: string = '81'; + const blockTime3: string = DateTime.fromISO(pnlTick.createdAt).plus({ minute: 61 }).toISO(); + const pnlTick4: PnlTicksFromDatabase = { + ...testConstants.defaultPnlTick, + subaccountId: testConstants.defaultSubaccountId2, + id: PnlTicksTable.uuid( + testConstants.defaultSubaccountId2, + blockTime3, + ), + equity: '1', + totalPnl: '2', + netTransfers: '3', + blockHeight: blockHeight3, + blockTime: blockTime3, + createdAt: blockTime3, + }; + const blockHeight4: string = '82'; + const blockTime4: string = DateTime.fromISO(pnlTick.createdAt).startOf('hour').plus({ minute: 63 }).toISO(); + // should be de-duped + const pnlTick5: PnlTicksFromDatabase = { + ...testConstants.defaultPnlTick, + subaccountId: testConstants.defaultSubaccountId2, + id: PnlTicksTable.uuid( + testConstants.defaultSubaccountId2, + blockTime4, + ), + equity: '1', + totalPnl: '2', + netTransfers: '3', + blockHeight: blockHeight4, + blockTime: blockTime4, + createdAt: blockTime4, + }; + + const aggregatedPnlTicks: AggregatedPnlTick[] = aggregateHourlyPnlTicks( + [pnlTick, pnlTick2, pnlTick3, pnlTick4, pnlTick5], + ); + expect(aggregatedPnlTicks).toEqual( + expect.arrayContaining([ + // Combined pnl tick at initial hour + expect.objectContaining({ + pnlTick: expect.objectContaining({ + equity: (parseFloat(testConstants.defaultPnlTick.equity) + + parseFloat(pnlTick2.equity)).toString(), + totalPnl: (parseFloat(testConstants.defaultPnlTick.totalPnl) + + parseFloat(pnlTick2.totalPnl)).toString(), + netTransfers: (parseFloat(testConstants.defaultPnlTick.netTransfers) + + parseFloat(pnlTick2.netTransfers)).toString(), + }), + numTicks: 2, + }), + // Combined pnl tick at initial hour + 1 hour and initial hour + 1 hour, 1 minute + expect.objectContaining({ + pnlTick: expect.objectContaining({ + equity: (parseFloat(pnlTick3.equity) + + parseFloat(pnlTick4.equity)).toString(), + totalPnl: (parseFloat(pnlTick3.totalPnl) + + parseFloat(pnlTick4.totalPnl)).toString(), + netTransfers: (parseFloat(pnlTick3.netTransfers) + + parseFloat(pnlTick4.netTransfers)).toString(), + }), + numTicks: 2, + }), + ]), + ); + }); + }); }); diff --git a/indexer/services/comlink/__tests__/lib/request-helpers/request-transformer.test.ts b/indexer/services/comlink/__tests__/lib/request-helpers/request-transformer.test.ts index 07ebb2d930f..2f45bfd6937 100644 --- a/indexer/services/comlink/__tests__/lib/request-helpers/request-transformer.test.ts +++ b/indexer/services/comlink/__tests__/lib/request-helpers/request-transformer.test.ts @@ -77,6 +77,11 @@ describe('request-transformer', () => { stepSize: Big(10).pow(-9).toFixed(), // 10 * 1e-10 = 1e-9 stepBaseQuantums: perpetualMarket.stepBaseQuantums, subticksPerTick: perpetualMarket.subticksPerTick, + marketType: perpetualMarket.marketType, + openInterestLowerCap: liquidityTier.openInterestLowerCap, + openInterestUpperCap: liquidityTier.openInterestUpperCap, + baseOpenInterest: perpetualMarket.baseOpenInterest, + defaultFundingRate1H: perpetualMarket.defaultFundingRate1H, }, ); }); @@ -118,6 +123,10 @@ describe('request-transformer', () => { }; const responseObject: OrderResponseObject | undefined = postgresAndRedisOrderToResponseObject( filledOrder, + { + [testConstants.defaultSubaccountId]: + testConstants.defaultSubaccount.subaccountNumber, + }, redisTestConstants.defaultRedisOrder, ); const expectedRedisOrderTIF: TimeInForce = protocolTranslations.protocolOrderTIFToTIF( @@ -126,13 +135,19 @@ describe('request-transformer', () => { expect(responseObject).not.toBeUndefined(); expect(responseObject).not.toEqual( - postgresOrderToResponseObject(filledOrder), + postgresOrderToResponseObject( + filledOrder, + testConstants.defaultSubaccount.subaccountNumber, + ), ); expect(responseObject).not.toEqual( redisOrderToResponseObject(redisTestConstants.defaultRedisOrder), ); expect(responseObject).toEqual({ - ...postgresOrderToResponseObject(filledOrder), + ...postgresOrderToResponseObject( + filledOrder, + testConstants.defaultSubaccount.subaccountNumber, + ), size: redisTestConstants.defaultRedisOrder.size, price: redisTestConstants.defaultRedisOrder.price, timeInForce: apiTranslations.orderTIFToAPITIF(expectedRedisOrderTIF), @@ -151,7 +166,10 @@ describe('request-transformer', () => { it('successfully converts a postgres order to a response object', () => { const responseObject: OrderResponseObject | undefined = postgresAndRedisOrderToResponseObject( order, - null, + { + [testConstants.defaultSubaccountId]: + testConstants.defaultSubaccount.subaccountNumber, + }, ); expect(responseObject).not.toBeUndefined(); @@ -159,19 +177,23 @@ describe('request-transformer', () => { redisOrderToResponseObject(redisTestConstants.defaultRedisOrder), ); expect(responseObject).toEqual( - postgresOrderToResponseObject(order), + postgresOrderToResponseObject(order, testConstants.defaultSubaccount.subaccountNumber), ); }); it('successfully converts a redis order to a response object', () => { const responseObject: OrderResponseObject | undefined = postgresAndRedisOrderToResponseObject( undefined, + { + [testConstants.defaultSubaccountId]: + testConstants.defaultSubaccount.subaccountNumber, + }, redisTestConstants.defaultRedisOrder, ); expect(responseObject).not.toBeUndefined(); expect(responseObject).not.toEqual( - postgresOrderToResponseObject(order), + postgresOrderToResponseObject(order, testConstants.defaultSubaccount.subaccountNumber), ); expect(responseObject).toEqual( redisOrderToResponseObject(redisTestConstants.defaultRedisOrder), @@ -181,6 +203,10 @@ describe('request-transformer', () => { it('successfully converts undefined postgres order and null redis orderto undefined', () => { const responseObject: OrderResponseObject | undefined = postgresAndRedisOrderToResponseObject( undefined, + { + [testConstants.defaultSubaccountId]: + testConstants.defaultSubaccount.subaccountNumber, + }, null, ); @@ -192,13 +218,17 @@ describe('request-transformer', () => { it( 'successfully converts a postgres order with null `goodTilBlockTime` to a response object', () => { - const responseObject: OrderResponseObject = postgresOrderToResponseObject(order); + const responseObject: OrderResponseObject = postgresOrderToResponseObject( + order, + testConstants.defaultSubaccount.subaccountNumber, + ); expect(responseObject).toEqual({ ...order, timeInForce: apiTranslations.orderTIFToAPITIF(order.timeInForce), postOnly: apiTranslations.isOrderTIFPostOnly(order.timeInForce), ticker, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, }); }, ); @@ -212,6 +242,7 @@ describe('request-transformer', () => { }; const responseObject: OrderResponseObject = postgresOrderToResponseObject( orderWithGoodTilBlockTime, + testConstants.defaultSubaccount.subaccountNumber, ); expect(responseObject).toEqual({ @@ -219,6 +250,7 @@ describe('request-transformer', () => { timeInForce: apiTranslations.orderTIFToAPITIF(order.timeInForce), postOnly: apiTranslations.isOrderTIFPostOnly(order.timeInForce), ticker, + subaccountNumber: testConstants.defaultSubaccount.subaccountNumber, }); }, ); @@ -302,6 +334,7 @@ describe('request-transformer', () => { goodTilBlockTime: protocolTranslations.getGoodTilBlockTime(redisOrder.order!), ticker, clientMetadata: redisOrder.order!.clientMetadata.toString(), + subaccountNumber: redisOrder.order!.orderId!.subaccountId!.number, }); }); }); diff --git a/indexer/services/comlink/__tests__/lib/validation/schemas.test.ts b/indexer/services/comlink/__tests__/lib/validation/schemas.test.ts index 7e61a5bbc55..399e7fe15c0 100644 --- a/indexer/services/comlink/__tests__/lib/validation/schemas.test.ts +++ b/indexer/services/comlink/__tests__/lib/validation/schemas.test.ts @@ -3,8 +3,11 @@ import { getQueryString, sendRequestToApp } from '../../helpers/helpers'; import { schemaTestApp } from './helpers'; import request from 'supertest'; import config from '../../../src/config'; -import { testConstants } from '@dydxprotocol-indexer/postgres'; -import { MAX_SUBACCOUNT_NUMBER } from '../../../src/constants'; +import { + testConstants, + MAX_PARENT_SUBACCOUNTS, + CHILD_SUBACCOUNT_MULTIPLIER, +} from '@dydxprotocol-indexer/postgres'; describe('schemas', () => { const positiveNonInteger: number = 3.2; @@ -14,30 +17,38 @@ describe('schemas', () => { const defaultAddress: string = testConstants.defaultSubaccount.address; describe('CheckSubaccountSchema', () => { it.each([ - ['missing address', { subaccountNumber: defaultSubaccountNumber }, 'address', 'Invalid value'], + [ + 'missingaddress', + { subaccountNumber: defaultSubaccountNumber }, + 'address', + 'address must be a valid dydx address', + ], [ 'missing subaccountNumber', { address: defaultAddress }, 'subaccountNumber', - 'subaccountNumber must be a non-negative integer less than 128', + 'subaccountNumber must be a non-negative integer less than 128001', ], [ 'non-integer subaccountNumber', { address: defaultAddress, subaccountNumber: positiveNonInteger }, 'subaccountNumber', - 'subaccountNumber must be a non-negative integer less than 128', + 'subaccountNumber must be a non-negative integer less than 128001', ], [ 'negative subaccountNumber', { address: defaultAddress, subaccountNumber: negativeInteger }, 'subaccountNumber', - 'subaccountNumber must be a non-negative integer less than 128', + 'subaccountNumber must be a non-negative integer less than 128001', ], [ 'subaccountNumber greater than maximum subaccount number', - { address: defaultAddress, subaccountNumber: MAX_SUBACCOUNT_NUMBER + 1 }, + { + address: defaultAddress, + subaccountNumber: MAX_PARENT_SUBACCOUNTS * CHILD_SUBACCOUNT_MULTIPLIER + 1, + }, 'subaccountNumber', - 'subaccountNumber must be a non-negative integer less than 128', + 'subaccountNumber must be a non-negative integer less than 128001', ], ])('Returns 400 when validation fails: %s', async ( _reason: string, diff --git a/indexer/services/comlink/package.json b/indexer/services/comlink/package.json index ef48f211e17..fa92b9548b7 100644 --- a/indexer/services/comlink/package.json +++ b/indexer/services/comlink/package.json @@ -25,19 +25,24 @@ "@bugsnag/node": "^7.18.0", "@bugsnag/plugin-express": "^7.18.0", "@cosmjs/crypto": "0.32.1", + "@cosmjs/encoding": "^0.32.3", "@dydxprotocol-indexer/base": "workspace:^0.0.1", "@dydxprotocol-indexer/compliance": "workspace:^0.0.1", + "@dydxprotocol-indexer/notifications": "workspace:^0.0.1", "@dydxprotocol-indexer/postgres": "workspace:^0.0.1", "@dydxprotocol-indexer/redis": "workspace:^0.0.1", "@dydxprotocol-indexer/v4-proto-parser": "workspace:^0.0.1", "@dydxprotocol-indexer/v4-protos": "workspace:^0.0.1", + "@keplr-wallet/cosmos": "^0.12.122", "@tsoa/runtime": "^5.0.0", + "bech32": "1.1.4", "big.js": "^6.2.1", + "binary-searching": "^2.0.5", "body-parser": "^1.20.0", "cors": "^2.8.5", "dd-trace": "^3.32.1", "dotenv-flow": "^3.2.0", - "dydx-widdershins": "^4.0.1", + "dydx-widdershins": "^4.0.8", "express": "^4.18.1", "express-request-id": "^1.4.0", "express-validator": "^6.14.2", diff --git a/indexer/services/comlink/public/api-documentation.md b/indexer/services/comlink/public/api-documentation.md index 46cbc57c80a..0aebabb2559 100644 --- a/indexer/services/comlink/public/api-documentation.md +++ b/indexer/services/comlink/public/api-documentation.md @@ -5,7 +5,10 @@ Base URLs: -* https://dydx-testnet.imperator.co/v4 +* For **the deployment by DYDX token holders**, use https://indexer.dydx.trade/v4 +* For **Testnet**, use https://indexer.v4testnet.dydx.exchange/v4 + +Note: Messages on Indexer WebSocket feeds are typically more recent than data fetched via Indexer's REST API, because the latter is backed by read replicas of the databases that feed the former. Ordinarily this difference is minimal (less than a second), but it might become prolonged under load. Please see [Indexer Architecture](https://dydx.exchange/blog/v4-deep-dive-indexer) for more information. # Authentication @@ -23,7 +26,13 @@ headers = { 'Accept': 'application/json' } -r = requests.get('https://dydx-testnet.imperator.co/v4/addresses/{address}', headers = headers) +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/affiliates/address', params={ + 'referralCode': 'string' +}, headers = headers) print(r.json()) @@ -35,7 +44,11 @@ const headers = { 'Accept':'application/json' }; -fetch('https://dydx-testnet.imperator.co/v4/addresses/{address}', +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/affiliates/address?referralCode=string`, { method: 'GET', @@ -49,13 +62,13 @@ fetch('https://dydx-testnet.imperator.co/v4/addresses/{address}', ``` -`GET /addresses/{address}` +`GET /affiliates/address` ### Parameters |Name|In|Type|Required|Description| |---|---|---|---|---| -|address|path|string|true|none| +|referralCode|query|string|true|none| > Example responses @@ -63,66 +76,7 @@ fetch('https://dydx-testnet.imperator.co/v4/addresses/{address}', ```json { - "subaccounts": [ - { - "address": "string", - "subaccountNumber": 0, - "equity": "string", - "freeCollateral": "string", - "openPerpetualPositions": { - "property1": { - "market": "string", - "status": "OPEN", - "side": "LONG", - "size": "string", - "maxSize": "string", - "entryPrice": "string", - "realizedPnl": "string", - "createdAt": "string", - "createdAtHeight": "string", - "sumOpen": "string", - "sumClose": "string", - "netFunding": "string", - "unrealizedPnl": "string", - "closedAt": null, - "exitPrice": "string" - }, - "property2": { - "market": "string", - "status": "OPEN", - "side": "LONG", - "size": "string", - "maxSize": "string", - "entryPrice": "string", - "realizedPnl": "string", - "createdAt": "string", - "createdAtHeight": "string", - "sumOpen": "string", - "sumClose": "string", - "netFunding": "string", - "unrealizedPnl": "string", - "closedAt": null, - "exitPrice": "string" - } - }, - "assetPositions": { - "property1": { - "symbol": "string", - "side": "LONG", - "size": "string", - "assetId": "string" - }, - "property2": { - "symbol": "string", - "side": "LONG", - "size": "string", - "assetId": "string" - } - }, - "marginEnabled": true - } - ], - "totalTradingRewards": "string" + "address": "string" } ``` @@ -130,7 +84,7 @@ fetch('https://dydx-testnet.imperator.co/v4/addresses/{address}', |Status|Meaning|Description|Schema| |---|---|---|---| -|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[AddressResponse](#schemaaddressresponse)| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[AffiliateAddressResponse](#schemaaffiliateaddressresponse)| -## GetAssetPositions +## GetParentSubaccount - + > Code samples @@ -269,9 +237,11 @@ headers = { 'Accept': 'application/json' } -r = requests.get('https://dydx-testnet.imperator.co/v4/assetPositions', params={ - 'address': 'string', 'subaccountNumber': '0' -}, headers = headers) +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/addresses/{address}/parentSubaccountNumber/{parentSubaccountNumber}', headers = headers) print(r.json()) @@ -283,7 +253,11 @@ const headers = { 'Accept':'application/json' }; -fetch('https://dydx-testnet.imperator.co/v4/assetPositions?address=string&subaccountNumber=0', +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/addresses/{address}/parentSubaccountNumber/{parentSubaccountNumber}`, { method: 'GET', @@ -297,14 +271,14 @@ fetch('https://dydx-testnet.imperator.co/v4/assetPositions?address=string&subacc ``` -`GET /assetPositions` +`GET /addresses/{address}/parentSubaccountNumber/{parentSubaccountNumber}` ### Parameters |Name|In|Type|Required|Description| |---|---|---|---|---| -|address|query|string|true|none| -|subaccountNumber|query|number(double)|true|none| +|address|path|string|true|none| +|parentSubaccountNumber|path|number(double)|true|none| > Example responses @@ -312,12 +286,73 @@ fetch('https://dydx-testnet.imperator.co/v4/assetPositions?address=string&subacc ```json { - "positions": [ + "address": "string", + "parentSubaccountNumber": 0, + "equity": "string", + "freeCollateral": "string", + "childSubaccounts": [ { - "symbol": "string", - "side": "LONG", - "size": "string", - "assetId": "string" + "address": "string", + "subaccountNumber": 0, + "equity": "string", + "freeCollateral": "string", + "openPerpetualPositions": { + "property1": { + "market": "string", + "status": "OPEN", + "side": "LONG", + "size": "string", + "maxSize": "string", + "entryPrice": "string", + "realizedPnl": "string", + "createdAt": "string", + "createdAtHeight": "string", + "sumOpen": "string", + "sumClose": "string", + "netFunding": "string", + "unrealizedPnl": "string", + "closedAt": null, + "exitPrice": "string", + "subaccountNumber": 0 + }, + "property2": { + "market": "string", + "status": "OPEN", + "side": "LONG", + "size": "string", + "maxSize": "string", + "entryPrice": "string", + "realizedPnl": "string", + "createdAt": "string", + "createdAtHeight": "string", + "sumOpen": "string", + "sumClose": "string", + "netFunding": "string", + "unrealizedPnl": "string", + "closedAt": null, + "exitPrice": "string", + "subaccountNumber": 0 + } + }, + "assetPositions": { + "property1": { + "symbol": "string", + "side": "LONG", + "size": "string", + "assetId": "string", + "subaccountNumber": 0 + }, + "property2": { + "symbol": "string", + "side": "LONG", + "size": "string", + "assetId": "string", + "subaccountNumber": 0 + } + }, + "marginEnabled": true, + "updatedAtHeight": "string", + "latestProcessedBlockHeight": "string" } ] } @@ -327,42 +362,51 @@ fetch('https://dydx-testnet.imperator.co/v4/assetPositions?address=string&subacc |Status|Meaning|Description|Schema| |---|---|---|---| -|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[AssetPositionResponse](#schemaassetpositionresponse)| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[ParentSubaccountResponse](#schemaparentsubaccountresponse)| -## GetCandles +## RegisterToken - + > Code samples ```python import requests headers = { - 'Accept': 'application/json' + 'Content-Type': 'application/json' } -r = requests.get('https://dydx-testnet.imperator.co/v4/candles/perpetualMarkets/{ticker}', params={ - 'resolution': '1MIN' -}, headers = headers) +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.post(f'{baseURL}/addresses/{address}/registerToken', headers = headers) print(r.json()) ``` ```javascript - +const inputBody = '{ + "language": "string", + "token": "string" +}'; const headers = { - 'Accept':'application/json' + 'Content-Type':'application/json' }; -fetch('https://dydx-testnet.imperator.co/v4/candles/perpetualMarkets/{ticker}?resolution=1MIN', -{ - method: 'GET', +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; +fetch(`${baseURL}/addresses/{address}/registerToken`, +{ + method: 'POST', + body: inputBody, headers: headers }) .then(function(res) { @@ -373,78 +417,50 @@ fetch('https://dydx-testnet.imperator.co/v4/candles/perpetualMarkets/{ticker}?re ``` -`GET /candles/perpetualMarkets/{ticker}` - -### Parameters - -|Name|In|Type|Required|Description| -|---|---|---|---|---| -|ticker|path|string|true|none| -|resolution|query|[CandleResolution](#schemacandleresolution)|true|none| -|limit|query|number(double)|false|none| -|fromISO|query|string|false|none| -|toISO|query|string|false|none| - -#### Enumerated Values - -|Parameter|Value| -|---|---| -|resolution|1MIN| -|resolution|5MINS| -|resolution|15MINS| -|resolution|30MINS| -|resolution|1HOUR| -|resolution|4HOURS| -|resolution|1DAY| - -> Example responses +`POST /addresses/{address}/registerToken` -> 200 Response +> Body parameter ```json { - "candles": [ - { - "startedAt": "string", - "ticker": "string", - "resolution": "1MIN", - "low": "string", - "high": "string", - "open": "string", - "close": "string", - "baseTokenVolume": "string", - "usdVolume": "string", - "trades": 0, - "startingOpenInterest": "string", - "id": "string" - } - ] + "language": "string", + "token": "string" } ``` +### Parameters + +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|address|path|string|true|none| +|body|body|object|true|none| +|» language|body|string|true|none| +|» token|body|string|true|none| + ### Responses |Status|Meaning|Description|Schema| |---|---|---|---| -|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[CandleResponse](#schemacandleresponse)| +|204|[No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5)|No content|None| -## Screen +## TestNotification - + > Code samples ```python import requests -headers = { - 'Accept': 'application/json' -} -r = requests.get('https://dydx-testnet.imperator.co/v4/compliance/screen/{address}', headers = headers) +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.post(f'{baseURL}/addresses/{address}/testNotification') print(r.json()) @@ -452,15 +468,14 @@ print(r.json()) ```javascript -const headers = { - 'Accept':'application/json' -}; +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; -fetch('https://dydx-testnet.imperator.co/v4/compliance/screen/{address}', +fetch(`${baseURL}/addresses/{address}/testNotification`, { - method: 'GET', + method: 'POST' - headers: headers }) .then(function(res) { return res.json(); @@ -470,7 +485,7 @@ fetch('https://dydx-testnet.imperator.co/v4/compliance/screen/{address}', ``` -`GET /compliance/screen/{address}` +`POST /addresses/{address}/testNotification` ### Parameters @@ -478,30 +493,19 @@ fetch('https://dydx-testnet.imperator.co/v4/compliance/screen/{address}', |---|---|---|---|---| |address|path|string|true|none| -> Example responses - -> 200 Response - -```json -{ - "status": "COMPLIANT", - "reason": "MANUAL" -} -``` - ### Responses |Status|Meaning|Description|Schema| |---|---|---|---| -|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[ComplianceV2Response](#schemacompliancev2response)| +|204|[No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5)|No content|None| -## GetFills +## GetMetadata - + > Code samples @@ -511,8 +515,12 @@ headers = { 'Accept': 'application/json' } -r = requests.get('https://dydx-testnet.imperator.co/v4/fills', params={ - 'address': 'string', 'subaccountNumber': '0' +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/affiliates/metadata', params={ + 'address': 'string' }, headers = headers) print(r.json()) @@ -525,7 +533,11 @@ const headers = { 'Accept':'application/json' }; -fetch('https://dydx-testnet.imperator.co/v4/fills?address=string&subaccountNumber=0', +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/affiliates/metadata?address=string`, { method: 'GET', @@ -539,26 +551,13 @@ fetch('https://dydx-testnet.imperator.co/v4/fills?address=string&subaccountNumbe ``` -`GET /fills` +`GET /affiliates/metadata` ### Parameters |Name|In|Type|Required|Description| |---|---|---|---|---| |address|query|string|true|none| -|subaccountNumber|query|number(double)|true|none| -|market|query|string|false|none| -|marketType|query|[MarketType](#schemamarkettype)|false|none| -|limit|query|number(double)|false|none| -|createdBeforeOrAtHeight|query|number(double)|false|none| -|createdBeforeOrAt|query|[IsoString](#schemaisostring)|false|none| - -#### Enumerated Values - -|Parameter|Value| -|---|---| -|marketType|PERPETUAL| -|marketType|SPOT| > Example responses @@ -566,24 +565,9 @@ fetch('https://dydx-testnet.imperator.co/v4/fills?address=string&subaccountNumbe ```json { - "fills": [ - { - "id": "string", - "side": "BUY", - "liquidity": "TAKER", - "type": "LIMIT", - "market": "string", - "marketType": "PERPETUAL", - "price": "string", - "size": "string", - "fee": "string", - "createdAt": "string", - "createdAtHeight": "string", - "orderId": "string", - "clientMetadata": "string", - "subaccountNumber": 0 - } - ] + "referralCode": "string", + "isVolumeEligible": true, + "isAffiliate": true } ``` @@ -591,15 +575,15 @@ fetch('https://dydx-testnet.imperator.co/v4/fills?address=string&subaccountNumbe |Status|Meaning|Description|Schema| |---|---|---|---| -|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[FillResponse](#schemafillresponse)| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[AffiliateMetadataResponse](#schemaaffiliatemetadataresponse)| -## GetHeight +## GetSnapshot - + > Code samples @@ -609,7 +593,11 @@ headers = { 'Accept': 'application/json' } -r = requests.get('https://dydx-testnet.imperator.co/v4/height', headers = headers) +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/affiliates/snapshot', headers = headers) print(r.json()) @@ -621,7 +609,11 @@ const headers = { 'Accept':'application/json' }; -fetch('https://dydx-testnet.imperator.co/v4/height', +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/affiliates/snapshot`, { method: 'GET', @@ -635,7 +627,16 @@ fetch('https://dydx-testnet.imperator.co/v4/height', ``` -`GET /height` +`GET /affiliates/snapshot` + +### Parameters + +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|addressFilter|query|array[string]|false|none| +|offset|query|number(double)|false|none| +|limit|query|number(double)|false|none| +|sortByAffiliateEarning|query|boolean|false|none| > Example responses @@ -643,8 +644,23 @@ fetch('https://dydx-testnet.imperator.co/v4/height', ```json { - "height": "string", - "time": "string" + "affiliateList": [ + { + "affiliateAddress": "string", + "affiliateReferralCode": "string", + "affiliateEarnings": 0.1, + "affiliateReferredTrades": 0.1, + "affiliateTotalReferredFees": 0.1, + "affiliateReferredUsers": 0.1, + "affiliateReferredNetProtocolEarnings": 0.1, + "affiliateReferredTotalVolume": 0.1, + "affiliateReferredMakerFees": 0.1, + "affiliateReferredTakerFees": 0.1, + "affiliateReferredMakerRebates": 0.1 + } + ], + "total": 0.1, + "currentOffset": 0.1 } ``` @@ -652,15 +668,15 @@ fetch('https://dydx-testnet.imperator.co/v4/height', |Status|Meaning|Description|Schema| |---|---|---|---| -|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[HeightResponse](#schemaheightresponse)| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[AffiliateSnapshotResponse](#schemaaffiliatesnapshotresponse)| -## GetTradingRewards +## GetTotalVolume - + > Code samples @@ -670,7 +686,13 @@ headers = { 'Accept': 'application/json' } -r = requests.get('https://dydx-testnet.imperator.co/v4/historicalBlockTradingRewards/{address}', headers = headers) +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/affiliates/total_volume', params={ + 'address': 'string' +}, headers = headers) print(r.json()) @@ -682,7 +704,11 @@ const headers = { 'Accept':'application/json' }; -fetch('https://dydx-testnet.imperator.co/v4/historicalBlockTradingRewards/{address}', +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/affiliates/total_volume?address=string`, { method: 'GET', @@ -696,16 +722,13 @@ fetch('https://dydx-testnet.imperator.co/v4/historicalBlockTradingRewards/{addre ``` -`GET /historicalBlockTradingRewards/{address}` +`GET /affiliates/total_volume` ### Parameters |Name|In|Type|Required|Description| |---|---|---|---|---| -|address|path|string|true|none| -|limit|query|number(double)|false|none| -|startingBeforeOrAt|query|[IsoString](#schemaisostring)|false|none| -|startingBeforeOrAtHeight|query|string|false|none| +|address|query|string|true|none| > Example responses @@ -713,13 +736,7 @@ fetch('https://dydx-testnet.imperator.co/v4/historicalBlockTradingRewards/{addre ```json { - "rewards": [ - { - "tradingReward": "string", - "createdAt": "string", - "createdAtHeight": "string" - } - ] + "totalVolume": 0.1 } ``` @@ -727,15 +744,15 @@ fetch('https://dydx-testnet.imperator.co/v4/historicalBlockTradingRewards/{addre |Status|Meaning|Description|Schema| |---|---|---|---| -|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[HistoricalBlockTradingRewardsResponse](#schemahistoricalblocktradingrewardsresponse)| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[AffiliateTotalVolumeResponse](#schemaaffiliatetotalvolumeresponse)| -## GetHistoricalFunding +## GetAssetPositions - + > Code samples @@ -745,7 +762,13 @@ headers = { 'Accept': 'application/json' } -r = requests.get('https://dydx-testnet.imperator.co/v4/historicalFunding/{ticker}', headers = headers) +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/assetPositions', params={ + 'address': 'string', 'subaccountNumber': '0.1' +}, headers = headers) print(r.json()) @@ -757,7 +780,11 @@ const headers = { 'Accept':'application/json' }; -fetch('https://dydx-testnet.imperator.co/v4/historicalFunding/{ticker}', +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/assetPositions?address=string&subaccountNumber=0.1`, { method: 'GET', @@ -771,16 +798,14 @@ fetch('https://dydx-testnet.imperator.co/v4/historicalFunding/{ticker}', ``` -`GET /historicalFunding/{ticker}` +`GET /assetPositions` ### Parameters |Name|In|Type|Required|Description| |---|---|---|---|---| -|ticker|path|string|true|none| -|limit|query|number(double)|false|none| -|effectiveBeforeOrAtHeight|query|number(double)|false|none| -|effectiveBeforeOrAt|query|[IsoString](#schemaisostring)|false|none| +|address|query|string|true|none| +|subaccountNumber|query|number(double)|true|none| > Example responses @@ -788,13 +813,13 @@ fetch('https://dydx-testnet.imperator.co/v4/historicalFunding/{ticker}', ```json { - "historicalFunding": [ + "positions": [ { - "ticker": "string", - "rate": "string", - "price": "string", - "effectiveAt": "string", - "effectiveAtHeight": "string" + "symbol": "string", + "side": "LONG", + "size": "string", + "assetId": "string", + "subaccountNumber": 0 } ] } @@ -804,15 +829,15 @@ fetch('https://dydx-testnet.imperator.co/v4/historicalFunding/{ticker}', |Status|Meaning|Description|Schema| |---|---|---|---| -|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[HistoricalFundingResponse](#schemahistoricalfundingresponse)| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[AssetPositionResponse](#schemaassetpositionresponse)| -## GetHistoricalPnl +## GetAssetPositionsForParentSubaccount - + > Code samples @@ -822,8 +847,12 @@ headers = { 'Accept': 'application/json' } -r = requests.get('https://dydx-testnet.imperator.co/v4/historical-pnl', params={ - 'address': 'string', 'subaccountNumber': '0' +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/assetPositions/parentSubaccountNumber', params={ + 'address': 'string', 'parentSubaccountNumber': '0.1' }, headers = headers) print(r.json()) @@ -836,7 +865,11 @@ const headers = { 'Accept':'application/json' }; -fetch('https://dydx-testnet.imperator.co/v4/historical-pnl?address=string&subaccountNumber=0', +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/assetPositions/parentSubaccountNumber?address=string&parentSubaccountNumber=0.1`, { method: 'GET', @@ -850,19 +883,14 @@ fetch('https://dydx-testnet.imperator.co/v4/historical-pnl?address=string&subacc ``` -`GET /historical-pnl` +`GET /assetPositions/parentSubaccountNumber` ### Parameters |Name|In|Type|Required|Description| |---|---|---|---|---| |address|query|string|true|none| -|subaccountNumber|query|number(double)|true|none| -|limit|query|number(double)|false|none| -|createdBeforeOrAtHeight|query|number(double)|false|none| -|createdBeforeOrAt|query|[IsoString](#schemaisostring)|false|none| -|createdOnOrAfterHeight|query|number(double)|false|none| -|createdOnOrAfter|query|[IsoString](#schemaisostring)|false|none| +|parentSubaccountNumber|query|number(double)|true|none| > Example responses @@ -870,16 +898,13 @@ fetch('https://dydx-testnet.imperator.co/v4/historical-pnl?address=string&subacc ```json { - "historicalPnl": [ + "positions": [ { - "id": "string", - "subaccountId": "string", - "equity": "string", - "totalPnl": "string", - "netTransfers": "string", - "createdAt": "string", - "blockHeight": "string", - "blockTime": "string" + "symbol": "string", + "side": "LONG", + "size": "string", + "assetId": "string", + "subaccountNumber": 0 } ] } @@ -889,15 +914,15 @@ fetch('https://dydx-testnet.imperator.co/v4/historical-pnl?address=string&subacc |Status|Meaning|Description|Schema| |---|---|---|---| -|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[HistoricalPnlResponse](#schemahistoricalpnlresponse)| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[AssetPositionResponse](#schemaassetpositionresponse)| -## GetAggregations +## GetCandles - + > Code samples @@ -907,8 +932,12 @@ headers = { 'Accept': 'application/json' } -r = requests.get('https://dydx-testnet.imperator.co/v4/historicalTradingRewardAggregations/{address}', params={ - 'period': 'DAILY' +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/candles/perpetualMarkets/{ticker}', params={ + 'resolution': '1MIN' }, headers = headers) print(r.json()) @@ -921,7 +950,11 @@ const headers = { 'Accept':'application/json' }; -fetch('https://dydx-testnet.imperator.co/v4/historicalTradingRewardAggregations/{address}?period=DAILY', +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/candles/perpetualMarkets/{ticker}?resolution=1MIN`, { method: 'GET', @@ -935,25 +968,29 @@ fetch('https://dydx-testnet.imperator.co/v4/historicalTradingRewardAggregations/ ``` -`GET /historicalTradingRewardAggregations/{address}` +`GET /candles/perpetualMarkets/{ticker}` ### Parameters |Name|In|Type|Required|Description| |---|---|---|---|---| -|address|path|string|true|none| -|period|query|[TradingRewardAggregationPeriod](#schematradingrewardaggregationperiod)|true|none| +|ticker|path|string|true|none| +|resolution|query|[CandleResolution](#schemacandleresolution)|true|none| |limit|query|number(double)|false|none| -|startingBeforeOrAt|query|[IsoString](#schemaisostring)|false|none| -|startingBeforeOrAtHeight|query|string|false|none| +|fromISO|query|string|false|none| +|toISO|query|string|false|none| #### Enumerated Values |Parameter|Value| |---|---| -|period|DAILY| -|period|WEEKLY| -|period|MONTHLY| +|resolution|1MIN| +|resolution|5MINS| +|resolution|15MINS| +|resolution|30MINS| +|resolution|1HOUR| +|resolution|4HOURS| +|resolution|1DAY| > Example responses @@ -961,14 +998,22 @@ fetch('https://dydx-testnet.imperator.co/v4/historicalTradingRewardAggregations/ ```json { - "rewards": [ + "candles": [ { - "tradingReward": "string", "startedAt": "string", - "startedAtHeight": "string", - "endedAt": "string", - "endedAtHeight": "string", - "period": "DAILY" + "ticker": "string", + "resolution": "1MIN", + "low": "string", + "high": "string", + "open": "string", + "close": "string", + "baseTokenVolume": "string", + "usdVolume": "string", + "trades": 0.1, + "startingOpenInterest": "string", + "orderbookMidPriceOpen": "string", + "orderbookMidPriceClose": "string", + "id": "string" } ] } @@ -978,15 +1023,15 @@ fetch('https://dydx-testnet.imperator.co/v4/historicalTradingRewardAggregations/ |Status|Meaning|Description|Schema| |---|---|---|---| -|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[HistoricalTradingRewardAggregationsResponse](#schemahistoricaltradingrewardaggregationsresponse)| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[CandleResponse](#schemacandleresponse)| -## GetPerpetualMarket +## Screen - + > Code samples @@ -996,7 +1041,11 @@ headers = { 'Accept': 'application/json' } -r = requests.get('https://dydx-testnet.imperator.co/v4/orderbooks/perpetualMarket/{ticker}', headers = headers) +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/compliance/screen/{address}', headers = headers) print(r.json()) @@ -1008,7 +1057,11 @@ const headers = { 'Accept':'application/json' }; -fetch('https://dydx-testnet.imperator.co/v4/orderbooks/perpetualMarket/{ticker}', +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/compliance/screen/{address}`, { method: 'GET', @@ -1022,13 +1075,13 @@ fetch('https://dydx-testnet.imperator.co/v4/orderbooks/perpetualMarket/{ticker}' ``` -`GET /orderbooks/perpetualMarket/{ticker}` +`GET /compliance/screen/{address}` ### Parameters |Name|In|Type|Required|Description| |---|---|---|---|---| -|ticker|path|string|true|none| +|address|path|string|true|none| > Example responses @@ -1036,18 +1089,9 @@ fetch('https://dydx-testnet.imperator.co/v4/orderbooks/perpetualMarket/{ticker}' ```json { - "bids": [ - { - "price": "string", - "size": "string" - } - ], - "asks": [ - { - "price": "string", - "size": "string" - } - ] + "status": "COMPLIANT", + "reason": "MANUAL", + "updatedAt": "string" } ``` @@ -1055,15 +1099,15 @@ fetch('https://dydx-testnet.imperator.co/v4/orderbooks/perpetualMarket/{ticker}' |Status|Meaning|Description|Schema| |---|---|---|---| -|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[OrderbookResponseObject](#schemaorderbookresponseobject)| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[ComplianceV2Response](#schemacompliancev2response)| -## ListOrders +## GetFills - + > Code samples @@ -1073,8 +1117,12 @@ headers = { 'Accept': 'application/json' } -r = requests.get('https://dydx-testnet.imperator.co/v4/orders', params={ - 'address': 'string', 'subaccountNumber': '0' +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/fills', params={ + 'address': 'string', 'subaccountNumber': '0.1' }, headers = headers) print(r.json()) @@ -1087,7 +1135,11 @@ const headers = { 'Accept':'application/json' }; -fetch('https://dydx-testnet.imperator.co/v4/orders?address=string&subaccountNumber=0', +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/fills?address=string&subaccountNumber=0.1`, { method: 'GET', @@ -1101,7 +1153,7 @@ fetch('https://dydx-testnet.imperator.co/v4/orders?address=string&subaccountNumb ``` -`GET /orders` +`GET /fills` ### Parameters @@ -1109,90 +1161,1116 @@ fetch('https://dydx-testnet.imperator.co/v4/orders?address=string&subaccountNumb |---|---|---|---|---| |address|query|string|true|none| |subaccountNumber|query|number(double)|true|none| +|market|query|string|false|none| +|marketType|query|[MarketType](#schemamarkettype)|false|none| |limit|query|number(double)|false|none| -|ticker|query|string|false|none| -|side|query|[OrderSide](#schemaorderside)|false|none| -|type|query|[OrderType](#schemaordertype)|false|none| -|status|query|array[any]|false|none| -|goodTilBlockBeforeOrAt|query|number(double)|false|none| -|goodTilBlockTimeBeforeOrAt|query|[IsoString](#schemaisostring)|false|none| -|returnLatestOrders|query|boolean|false|none| +|createdBeforeOrAtHeight|query|number(double)|false|none| +|createdBeforeOrAt|query|[IsoString](#schemaisostring)|false|none| +|page|query|number(double)|false|none| #### Enumerated Values |Parameter|Value| |---|---| -|side|BUY| -|side|SELL| -|type|LIMIT| -|type|MARKET| -|type|STOP_LIMIT| -|type|STOP_MARKET| -|type|TRAILING_STOP| -|type|TAKE_PROFIT| -|type|TAKE_PROFIT_MARKET| -|type|HARD_TRADE| -|type|FAILED_HARD_TRADE| -|type|TRANSFER_PLACEHOLDER| +|marketType|PERPETUAL| +|marketType|SPOT| > Example responses > 200 Response ```json -[ - { - "id": "string", - "subaccountId": "string", - "clientId": "string", - "clobPairId": "string", - "side": "BUY", - "size": "string", - "totalFilled": "string", - "price": "string", - "type": "LIMIT", - "reduceOnly": true, - "orderFlags": "string", - "goodTilBlock": "string", - "goodTilBlockTime": "string", - "createdAtHeight": "string", - "clientMetadata": "string", - "triggerPrice": "string", - "timeInForce": "GTT", - "status": "OPEN", - "postOnly": true, - "ticker": "string", - "updatedAt": "string", - "updatedAtHeight": "string" - } -] +{ + "pageSize": 0, + "totalResults": 0, + "offset": 0, + "fills": [ + { + "id": "string", + "side": "BUY", + "liquidity": "TAKER", + "type": "LIMIT", + "market": "string", + "marketType": "PERPETUAL", + "price": "string", + "size": "string", + "fee": "string", + "affiliateRevShare": "string", + "createdAt": "string", + "createdAtHeight": "string", + "orderId": "string", + "clientMetadata": "string", + "subaccountNumber": 0 + } + ] +} ``` ### Responses |Status|Meaning|Description|Schema| |---|---|---|---| -|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|Inline| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[FillResponse](#schemafillresponse)| -### Response Schema + -Status Code **200** +## GetFillsForParentSubaccount -|Name|Type|Required|Restrictions|Description| -|---|---|---|---|---| -|*anonymous*|[[OrderResponseObject](#schemaorderresponseobject)]|false|none|none| -|» id|string|true|none|none| -|» subaccountId|string|true|none|none| -|» clientId|string|true|none|none| -|» clobPairId|string|true|none|none| -|» side|[OrderSide](#schemaorderside)|true|none|none| -|» size|string|true|none|none| -|» totalFilled|string|true|none|none| -|» price|string|true|none|none| -|» type|[OrderType](#schemaordertype)|true|none|none| -|» reduceOnly|boolean|true|none|none| -|» orderFlags|string|true|none|none| -|» goodTilBlock|string|false|none|none| + + +> Code samples + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/fills/parentSubaccount', params={ + 'address': 'string', 'parentSubaccountNumber': '0.1' +}, headers = headers) + +print(r.json()) + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/fills/parentSubaccount?address=string&parentSubaccountNumber=0.1`, +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +`GET /fills/parentSubaccount` + +### Parameters + +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|address|query|string|true|none| +|parentSubaccountNumber|query|number(double)|true|none| +|market|query|string|false|none| +|marketType|query|[MarketType](#schemamarkettype)|false|none| +|limit|query|number(double)|false|none| +|createdBeforeOrAtHeight|query|number(double)|false|none| +|createdBeforeOrAt|query|[IsoString](#schemaisostring)|false|none| +|page|query|number(double)|false|none| + +#### Enumerated Values + +|Parameter|Value| +|---|---| +|marketType|PERPETUAL| +|marketType|SPOT| + +> Example responses + +> 200 Response + +```json +{ + "pageSize": 0, + "totalResults": 0, + "offset": 0, + "fills": [ + { + "id": "string", + "side": "BUY", + "liquidity": "TAKER", + "type": "LIMIT", + "market": "string", + "marketType": "PERPETUAL", + "price": "string", + "size": "string", + "fee": "string", + "affiliateRevShare": "string", + "createdAt": "string", + "createdAtHeight": "string", + "orderId": "string", + "clientMetadata": "string", + "subaccountNumber": 0 + } + ] +} +``` + +### Responses + +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[FillResponse](#schemafillresponse)| + + + +## GetHeight + + + +> Code samples + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/height', headers = headers) + +print(r.json()) + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/height`, +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +`GET /height` + +> Example responses + +> 200 Response + +```json +{ + "height": "string", + "time": "string" +} +``` + +### Responses + +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[HeightResponse](#schemaheightresponse)| + + + +## GetTradingRewards + + + +> Code samples + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/historicalBlockTradingRewards/{address}', headers = headers) + +print(r.json()) + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/historicalBlockTradingRewards/{address}`, +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +`GET /historicalBlockTradingRewards/{address}` + +### Parameters + +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|address|path|string|true|none| +|limit|query|number(double)|false|none| +|startingBeforeOrAt|query|[IsoString](#schemaisostring)|false|none| +|startingBeforeOrAtHeight|query|string|false|none| + +> Example responses + +> 200 Response + +```json +{ + "rewards": [ + { + "tradingReward": "string", + "createdAt": "string", + "createdAtHeight": "string" + } + ] +} +``` + +### Responses + +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[HistoricalBlockTradingRewardsResponse](#schemahistoricalblocktradingrewardsresponse)| + + + +## GetHistoricalFunding + + + +> Code samples + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/historicalFunding/{ticker}', headers = headers) + +print(r.json()) + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/historicalFunding/{ticker}`, +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +`GET /historicalFunding/{ticker}` + +### Parameters + +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|ticker|path|string|true|none| +|limit|query|number(double)|false|none| +|effectiveBeforeOrAtHeight|query|number(double)|false|none| +|effectiveBeforeOrAt|query|[IsoString](#schemaisostring)|false|none| + +> Example responses + +> 200 Response + +```json +{ + "historicalFunding": [ + { + "ticker": "string", + "rate": "string", + "price": "string", + "effectiveAt": "string", + "effectiveAtHeight": "string" + } + ] +} +``` + +### Responses + +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[HistoricalFundingResponse](#schemahistoricalfundingresponse)| + + + +## GetHistoricalPnl + + + +> Code samples + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/historical-pnl', params={ + 'address': 'string', 'subaccountNumber': '0.1' +}, headers = headers) + +print(r.json()) + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/historical-pnl?address=string&subaccountNumber=0.1`, +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +`GET /historical-pnl` + +### Parameters + +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|address|query|string|true|none| +|subaccountNumber|query|number(double)|true|none| +|limit|query|number(double)|false|none| +|createdBeforeOrAtHeight|query|number(double)|false|none| +|createdBeforeOrAt|query|[IsoString](#schemaisostring)|false|none| +|createdOnOrAfterHeight|query|number(double)|false|none| +|createdOnOrAfter|query|[IsoString](#schemaisostring)|false|none| +|page|query|number(double)|false|none| + +> Example responses + +> 200 Response + +```json +{ + "pageSize": 0, + "totalResults": 0, + "offset": 0, + "historicalPnl": [ + { + "id": "string", + "subaccountId": "string", + "equity": "string", + "totalPnl": "string", + "netTransfers": "string", + "createdAt": "string", + "blockHeight": "string", + "blockTime": "string" + } + ] +} +``` + +### Responses + +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[HistoricalPnlResponse](#schemahistoricalpnlresponse)| + + + +## GetHistoricalPnlForParentSubaccount + + + +> Code samples + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/historical-pnl/parentSubaccountNumber', params={ + 'address': 'string', 'parentSubaccountNumber': '0.1' +}, headers = headers) + +print(r.json()) + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/historical-pnl/parentSubaccountNumber?address=string&parentSubaccountNumber=0.1`, +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +`GET /historical-pnl/parentSubaccountNumber` + +### Parameters + +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|address|query|string|true|none| +|parentSubaccountNumber|query|number(double)|true|none| +|limit|query|number(double)|false|none| +|createdBeforeOrAtHeight|query|number(double)|false|none| +|createdBeforeOrAt|query|[IsoString](#schemaisostring)|false|none| +|createdOnOrAfterHeight|query|number(double)|false|none| +|createdOnOrAfter|query|[IsoString](#schemaisostring)|false|none| + +> Example responses + +> 200 Response + +```json +{ + "pageSize": 0, + "totalResults": 0, + "offset": 0, + "historicalPnl": [ + { + "id": "string", + "subaccountId": "string", + "equity": "string", + "totalPnl": "string", + "netTransfers": "string", + "createdAt": "string", + "blockHeight": "string", + "blockTime": "string" + } + ] +} +``` + +### Responses + +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[HistoricalPnlResponse](#schemahistoricalpnlresponse)| + + + +## GetAggregations + + + +> Code samples + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/historicalTradingRewardAggregations/{address}', params={ + 'period': 'DAILY' +}, headers = headers) + +print(r.json()) + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/historicalTradingRewardAggregations/{address}?period=DAILY`, +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +`GET /historicalTradingRewardAggregations/{address}` + +### Parameters + +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|address|path|string|true|none| +|period|query|[TradingRewardAggregationPeriod](#schematradingrewardaggregationperiod)|true|none| +|limit|query|number(double)|false|none| +|startingBeforeOrAt|query|[IsoString](#schemaisostring)|false|none| +|startingBeforeOrAtHeight|query|string|false|none| + +#### Enumerated Values + +|Parameter|Value| +|---|---| +|period|DAILY| +|period|WEEKLY| +|period|MONTHLY| + +> Example responses + +> 200 Response + +```json +{ + "rewards": [ + { + "tradingReward": "string", + "startedAt": "string", + "startedAtHeight": "string", + "endedAt": "string", + "endedAtHeight": "string", + "period": "DAILY" + } + ] +} +``` + +### Responses + +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[HistoricalTradingRewardAggregationsResponse](#schemahistoricaltradingrewardaggregationsresponse)| + + + +## GetPerpetualMarket + + + +> Code samples + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/orderbooks/perpetualMarket/{ticker}', headers = headers) + +print(r.json()) + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/orderbooks/perpetualMarket/{ticker}`, +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +`GET /orderbooks/perpetualMarket/{ticker}` + +### Parameters + +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|ticker|path|string|true|none| + +> Example responses + +> 200 Response + +```json +{ + "bids": [ + { + "price": "string", + "size": "string" + } + ], + "asks": [ + { + "price": "string", + "size": "string" + } + ] +} +``` + +### Responses + +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[OrderbookResponseObject](#schemaorderbookresponseobject)| + + + +## ListOrders + + + +> Code samples + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/orders', params={ + 'address': 'string', 'subaccountNumber': '0.1' +}, headers = headers) + +print(r.json()) + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/orders?address=string&subaccountNumber=0.1`, +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +`GET /orders` + +### Parameters + +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|address|query|string|true|none| +|subaccountNumber|query|number(double)|true|none| +|limit|query|number(double)|false|none| +|ticker|query|string|false|none| +|side|query|[OrderSide](#schemaorderside)|false|none| +|type|query|[OrderType](#schemaordertype)|false|none| +|status|query|array[any]|false|none| +|goodTilBlockBeforeOrAt|query|number(double)|false|none| +|goodTilBlockTimeBeforeOrAt|query|[IsoString](#schemaisostring)|false|none| +|returnLatestOrders|query|boolean|false|none| + +#### Enumerated Values + +|Parameter|Value| +|---|---| +|side|BUY| +|side|SELL| +|type|LIMIT| +|type|MARKET| +|type|STOP_LIMIT| +|type|STOP_MARKET| +|type|TRAILING_STOP| +|type|TAKE_PROFIT| +|type|TAKE_PROFIT_MARKET| + +> Example responses + +> 200 Response + +```json +[ + { + "id": "string", + "subaccountId": "string", + "clientId": "string", + "clobPairId": "string", + "side": "BUY", + "size": "string", + "totalFilled": "string", + "price": "string", + "type": "LIMIT", + "reduceOnly": true, + "orderFlags": "string", + "goodTilBlock": "string", + "goodTilBlockTime": "string", + "createdAtHeight": "string", + "clientMetadata": "string", + "triggerPrice": "string", + "timeInForce": "GTT", + "status": "OPEN", + "postOnly": true, + "ticker": "string", + "updatedAt": "string", + "updatedAtHeight": "string", + "subaccountNumber": 0 + } +] +``` + +### Responses + +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|Inline| + +### Response Schema + +Status Code **200** + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|[[OrderResponseObject](#schemaorderresponseobject)]|false|none|none| +|» id|string|true|none|none| +|» subaccountId|string|true|none|none| +|» clientId|string|true|none|none| +|» clobPairId|string|true|none|none| +|» side|[OrderSide](#schemaorderside)|true|none|none| +|» size|string|true|none|none| +|» totalFilled|string|true|none|none| +|» price|string|true|none|none| +|» type|[OrderType](#schemaordertype)|true|none|none| +|» reduceOnly|boolean|true|none|none| +|» orderFlags|string|true|none|none| +|» goodTilBlock|string|false|none|none| +|» goodTilBlockTime|string|false|none|none| +|» createdAtHeight|string|false|none|none| +|» clientMetadata|string|true|none|none| +|» triggerPrice|string|false|none|none| +|» timeInForce|[APITimeInForce](#schemaapitimeinforce)|true|none|none| +|» status|any|true|none|none| + +*anyOf* + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|»» *anonymous*|[OrderStatus](#schemaorderstatus)|false|none|none| + +*or* + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|»» *anonymous*|[BestEffortOpenedStatus](#schemabesteffortopenedstatus)|false|none|none| + +*continued* + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|» postOnly|boolean|true|none|none| +|» ticker|string|true|none|none| +|» updatedAt|[IsoString](#schemaisostring)|false|none|none| +|» updatedAtHeight|string|false|none|none| +|» subaccountNumber|integer(int32)|true|none|none| + +#### Enumerated Values + +|Property|Value| +|---|---| +|side|BUY| +|side|SELL| +|type|LIMIT| +|type|MARKET| +|type|STOP_LIMIT| +|type|STOP_MARKET| +|type|TRAILING_STOP| +|type|TAKE_PROFIT| +|type|TAKE_PROFIT_MARKET| +|timeInForce|GTT| +|timeInForce|FOK| +|timeInForce|IOC| +|*anonymous*|OPEN| +|*anonymous*|FILLED| +|*anonymous*|CANCELED| +|*anonymous*|BEST_EFFORT_CANCELED| +|*anonymous*|UNTRIGGERED| +|*anonymous*|BEST_EFFORT_OPENED| + + + +## ListOrdersForParentSubaccount + + + +> Code samples + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/orders/parentSubaccountNumber', params={ + 'address': 'string', 'parentSubaccountNumber': '0.1' +}, headers = headers) + +print(r.json()) + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/orders/parentSubaccountNumber?address=string&parentSubaccountNumber=0.1`, +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +`GET /orders/parentSubaccountNumber` + +### Parameters + +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|address|query|string|true|none| +|parentSubaccountNumber|query|number(double)|true|none| +|limit|query|number(double)|false|none| +|ticker|query|string|false|none| +|side|query|[OrderSide](#schemaorderside)|false|none| +|type|query|[OrderType](#schemaordertype)|false|none| +|status|query|array[any]|false|none| +|goodTilBlockBeforeOrAt|query|number(double)|false|none| +|goodTilBlockTimeBeforeOrAt|query|[IsoString](#schemaisostring)|false|none| +|returnLatestOrders|query|boolean|false|none| + +#### Enumerated Values + +|Parameter|Value| +|---|---| +|side|BUY| +|side|SELL| +|type|LIMIT| +|type|MARKET| +|type|STOP_LIMIT| +|type|STOP_MARKET| +|type|TRAILING_STOP| +|type|TAKE_PROFIT| +|type|TAKE_PROFIT_MARKET| + +> Example responses + +> 200 Response + +```json +[ + { + "id": "string", + "subaccountId": "string", + "clientId": "string", + "clobPairId": "string", + "side": "BUY", + "size": "string", + "totalFilled": "string", + "price": "string", + "type": "LIMIT", + "reduceOnly": true, + "orderFlags": "string", + "goodTilBlock": "string", + "goodTilBlockTime": "string", + "createdAtHeight": "string", + "clientMetadata": "string", + "triggerPrice": "string", + "timeInForce": "GTT", + "status": "OPEN", + "postOnly": true, + "ticker": "string", + "updatedAt": "string", + "updatedAtHeight": "string", + "subaccountNumber": 0 + } +] +``` + +### Responses + +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|Inline| + +### Response Schema + +Status Code **200** + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|[[OrderResponseObject](#schemaorderresponseobject)]|false|none|none| +|» id|string|true|none|none| +|» subaccountId|string|true|none|none| +|» clientId|string|true|none|none| +|» clobPairId|string|true|none|none| +|» side|[OrderSide](#schemaorderside)|true|none|none| +|» size|string|true|none|none| +|» totalFilled|string|true|none|none| +|» price|string|true|none|none| +|» type|[OrderType](#schemaordertype)|true|none|none| +|» reduceOnly|boolean|true|none|none| +|» orderFlags|string|true|none|none| +|» goodTilBlock|string|false|none|none| |» goodTilBlockTime|string|false|none|none| |» createdAtHeight|string|false|none|none| |» clientMetadata|string|true|none|none| @@ -1200,60 +2278,732 @@ Status Code **200** |» timeInForce|[APITimeInForce](#schemaapitimeinforce)|true|none|none| |» status|any|true|none|none| -*anyOf* +*anyOf* + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|»» *anonymous*|[OrderStatus](#schemaorderstatus)|false|none|none| + +*or* + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|»» *anonymous*|[BestEffortOpenedStatus](#schemabesteffortopenedstatus)|false|none|none| + +*continued* + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|» postOnly|boolean|true|none|none| +|» ticker|string|true|none|none| +|» updatedAt|[IsoString](#schemaisostring)|false|none|none| +|» updatedAtHeight|string|false|none|none| +|» subaccountNumber|integer(int32)|true|none|none| + +#### Enumerated Values + +|Property|Value| +|---|---| +|side|BUY| +|side|SELL| +|type|LIMIT| +|type|MARKET| +|type|STOP_LIMIT| +|type|STOP_MARKET| +|type|TRAILING_STOP| +|type|TAKE_PROFIT| +|type|TAKE_PROFIT_MARKET| +|timeInForce|GTT| +|timeInForce|FOK| +|timeInForce|IOC| +|*anonymous*|OPEN| +|*anonymous*|FILLED| +|*anonymous*|CANCELED| +|*anonymous*|BEST_EFFORT_CANCELED| +|*anonymous*|UNTRIGGERED| +|*anonymous*|BEST_EFFORT_OPENED| + + + +## GetOrder + + + +> Code samples + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/orders/{orderId}', headers = headers) + +print(r.json()) + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/orders/{orderId}`, +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +`GET /orders/{orderId}` + +### Parameters + +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|orderId|path|string|true|none| + +> Example responses + +> 200 Response + +```json +{ + "id": "string", + "subaccountId": "string", + "clientId": "string", + "clobPairId": "string", + "side": "BUY", + "size": "string", + "totalFilled": "string", + "price": "string", + "type": "LIMIT", + "reduceOnly": true, + "orderFlags": "string", + "goodTilBlock": "string", + "goodTilBlockTime": "string", + "createdAtHeight": "string", + "clientMetadata": "string", + "triggerPrice": "string", + "timeInForce": "GTT", + "status": "OPEN", + "postOnly": true, + "ticker": "string", + "updatedAt": "string", + "updatedAtHeight": "string", + "subaccountNumber": 0 +} +``` + +### Responses + +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[OrderResponseObject](#schemaorderresponseobject)| + + + +## ListPerpetualMarkets + + + +> Code samples + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/perpetualMarkets', headers = headers) + +print(r.json()) + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/perpetualMarkets`, +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +`GET /perpetualMarkets` + +### Parameters + +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|limit|query|number(double)|false|none| +|ticker|query|string|false|none| + +> Example responses + +> 200 Response + +```json +{ + "markets": { + "property1": { + "clobPairId": "string", + "ticker": "string", + "status": "ACTIVE", + "oraclePrice": "string", + "priceChange24H": "string", + "volume24H": "string", + "trades24H": 0, + "nextFundingRate": "string", + "initialMarginFraction": "string", + "maintenanceMarginFraction": "string", + "openInterest": "string", + "atomicResolution": 0, + "quantumConversionExponent": 0, + "tickSize": "string", + "stepSize": "string", + "stepBaseQuantums": 0, + "subticksPerTick": 0, + "marketType": "CROSS", + "openInterestLowerCap": "string", + "openInterestUpperCap": "string", + "baseOpenInterest": "string", + "defaultFundingRate1H": "string" + }, + "property2": { + "clobPairId": "string", + "ticker": "string", + "status": "ACTIVE", + "oraclePrice": "string", + "priceChange24H": "string", + "volume24H": "string", + "trades24H": 0, + "nextFundingRate": "string", + "initialMarginFraction": "string", + "maintenanceMarginFraction": "string", + "openInterest": "string", + "atomicResolution": 0, + "quantumConversionExponent": 0, + "tickSize": "string", + "stepSize": "string", + "stepBaseQuantums": 0, + "subticksPerTick": 0, + "marketType": "CROSS", + "openInterestLowerCap": "string", + "openInterestUpperCap": "string", + "baseOpenInterest": "string", + "defaultFundingRate1H": "string" + } + } +} +``` + +### Responses + +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[PerpetualMarketResponse](#schemaperpetualmarketresponse)| + + + +## ListPositions + + + +> Code samples + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/perpetualPositions', params={ + 'address': 'string', 'subaccountNumber': '0.1' +}, headers = headers) + +print(r.json()) + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/perpetualPositions?address=string&subaccountNumber=0.1`, +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +`GET /perpetualPositions` + +### Parameters + +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|address|query|string|true|none| +|subaccountNumber|query|number(double)|true|none| +|status|query|array[string]|false|none| +|limit|query|number(double)|false|none| +|createdBeforeOrAtHeight|query|number(double)|false|none| +|createdBeforeOrAt|query|[IsoString](#schemaisostring)|false|none| + +#### Enumerated Values + +|Parameter|Value| +|---|---| +|status|OPEN| +|status|CLOSED| +|status|LIQUIDATED| + +> Example responses + +> 200 Response + +```json +{ + "positions": [ + { + "market": "string", + "status": "OPEN", + "side": "LONG", + "size": "string", + "maxSize": "string", + "entryPrice": "string", + "realizedPnl": "string", + "createdAt": "string", + "createdAtHeight": "string", + "sumOpen": "string", + "sumClose": "string", + "netFunding": "string", + "unrealizedPnl": "string", + "closedAt": "string", + "exitPrice": "string", + "subaccountNumber": 0 + } + ] +} +``` + +### Responses + +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[PerpetualPositionResponse](#schemaperpetualpositionresponse)| + + + +## ListPositionsForParentSubaccount + + + +> Code samples + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/perpetualPositions/parentSubaccountNumber', params={ + 'address': 'string', 'parentSubaccountNumber': '0.1' +}, headers = headers) + +print(r.json()) + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/perpetualPositions/parentSubaccountNumber?address=string&parentSubaccountNumber=0.1`, +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +`GET /perpetualPositions/parentSubaccountNumber` + +### Parameters + +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|address|query|string|true|none| +|parentSubaccountNumber|query|number(double)|true|none| +|status|query|array[string]|false|none| +|limit|query|number(double)|false|none| +|createdBeforeOrAtHeight|query|number(double)|false|none| +|createdBeforeOrAt|query|[IsoString](#schemaisostring)|false|none| + +#### Enumerated Values + +|Parameter|Value| +|---|---| +|status|OPEN| +|status|CLOSED| +|status|LIQUIDATED| + +> Example responses + +> 200 Response + +```json +{ + "positions": [ + { + "market": "string", + "status": "OPEN", + "side": "LONG", + "size": "string", + "maxSize": "string", + "entryPrice": "string", + "realizedPnl": "string", + "createdAt": "string", + "createdAtHeight": "string", + "sumOpen": "string", + "sumClose": "string", + "netFunding": "string", + "unrealizedPnl": "string", + "closedAt": "string", + "exitPrice": "string", + "subaccountNumber": 0 + } + ] +} +``` + +### Responses + +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[PerpetualPositionResponse](#schemaperpetualpositionresponse)| + + + +## SearchTrader + + + +> Code samples + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/trader/search', params={ + 'searchParam': 'string' +}, headers = headers) + +print(r.json()) + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/trader/search?searchParam=string`, +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +`GET /trader/search` + +### Parameters + +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|searchParam|query|string|true|none| + +> Example responses + +> 200 Response + +```json +{ + "result": { + "address": "string", + "subaccountNumber": 0.1, + "subaccountId": "string", + "username": "string" + } +} +``` + +### Responses + +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[TraderSearchResponse](#schematradersearchresponse)| + + + +## Get + + + +> Code samples + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/sparklines', params={ + 'timePeriod': 'ONE_DAY' +}, headers = headers) + +print(r.json()) + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/sparklines?timePeriod=ONE_DAY`, +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +`GET /sparklines` + +### Parameters + +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|timePeriod|query|[SparklineTimePeriod](#schemasparklinetimeperiod)|true|none| + +#### Enumerated Values + +|Parameter|Value| +|---|---| +|timePeriod|ONE_DAY| +|timePeriod|SEVEN_DAYS| + +> Example responses + +> 200 Response + +```json +{ + "property1": [ + "string" + ], + "property2": [ + "string" + ] +} +``` + +### Responses + +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[SparklineResponseObject](#schemasparklineresponseobject)| + + + +## GetTime + + + +> Code samples + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/time', headers = headers) + +print(r.json()) + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/time`, +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); -|Name|Type|Required|Restrictions|Description| -|---|---|---|---|---| -|»» *anonymous*|[OrderStatus](#schemaorderstatus)|false|none|none| +``` -*or* +`GET /time` -|Name|Type|Required|Restrictions|Description| -|---|---|---|---|---| -|»» *anonymous*|[BestEffortOpenedStatus](#schemabesteffortopenedstatus)|false|none|none| +> Example responses -*continued* +> 200 Response -|Name|Type|Required|Restrictions|Description| -|---|---|---|---|---| -|» postOnly|boolean|true|none|none| -|» ticker|string|true|none|none| -|» updatedAt|[IsoString](#schemaisostring)|false|none|none| -|» updatedAtHeight|string|false|none|none| +```json +{ + "iso": "string", + "epoch": 0.1 +} +``` -#### Enumerated Values +### Responses -|Property|Value| -|---|---| -|side|BUY| -|side|SELL| -|type|LIMIT| -|type|MARKET| -|type|STOP_LIMIT| -|type|STOP_MARKET| -|type|TRAILING_STOP| -|type|TAKE_PROFIT| -|type|TAKE_PROFIT_MARKET| -|type|HARD_TRADE| -|type|FAILED_HARD_TRADE| -|type|TRANSFER_PLACEHOLDER| -|timeInForce|GTT| -|timeInForce|FOK| -|timeInForce|IOC| -|*anonymous*|OPEN| -|*anonymous*|FILLED| -|*anonymous*|CANCELED| -|*anonymous*|BEST_EFFORT_CANCELED| -|*anonymous*|UNTRIGGERED| -|*anonymous*|BEST_EFFORT_OPENED| +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[TimeResponse](#schematimeresponse)| -## GetOrder +## GetTrades - + > Code samples @@ -1263,7 +3013,11 @@ headers = { 'Accept': 'application/json' } -r = requests.get('https://dydx-testnet.imperator.co/v4/orders/{orderId}', headers = headers) +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/trades/perpetualMarket/{ticker}', headers = headers) print(r.json()) @@ -1275,7 +3029,11 @@ const headers = { 'Accept':'application/json' }; -fetch('https://dydx-testnet.imperator.co/v4/orders/{orderId}', +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/trades/perpetualMarket/{ticker}`, { method: 'GET', @@ -1289,13 +3047,17 @@ fetch('https://dydx-testnet.imperator.co/v4/orders/{orderId}', ``` -`GET /orders/{orderId}` +`GET /trades/perpetualMarket/{ticker}` ### Parameters |Name|In|Type|Required|Description| |---|---|---|---|---| -|orderId|path|string|true|none| +|ticker|path|string|true|none| +|limit|query|number(double)|false|none| +|createdBeforeOrAtHeight|query|number(double)|false|none| +|createdBeforeOrAt|query|[IsoString](#schemaisostring)|false|none| +|page|query|number(double)|false|none| > Example responses @@ -1303,28 +3065,20 @@ fetch('https://dydx-testnet.imperator.co/v4/orders/{orderId}', ```json { - "id": "string", - "subaccountId": "string", - "clientId": "string", - "clobPairId": "string", - "side": "BUY", - "size": "string", - "totalFilled": "string", - "price": "string", - "type": "LIMIT", - "reduceOnly": true, - "orderFlags": "string", - "goodTilBlock": "string", - "goodTilBlockTime": "string", - "createdAtHeight": "string", - "clientMetadata": "string", - "triggerPrice": "string", - "timeInForce": "GTT", - "status": "OPEN", - "postOnly": true, - "ticker": "string", - "updatedAt": "string", - "updatedAtHeight": "string" + "pageSize": 0, + "totalResults": 0, + "offset": 0, + "trades": [ + { + "id": "string", + "side": "BUY", + "size": "string", + "price": "string", + "type": "LIMIT", + "createdAt": "string", + "createdAtHeight": "string" + } + ] } ``` @@ -1332,15 +3086,15 @@ fetch('https://dydx-testnet.imperator.co/v4/orders/{orderId}', |Status|Meaning|Description|Schema| |---|---|---|---| -|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[OrderResponseObject](#schemaorderresponseobject)| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[TradeResponse](#schematraderesponse)| -## ListPerpetualMarkets +## GetTransfers - + > Code samples @@ -1350,7 +3104,13 @@ headers = { 'Accept': 'application/json' } -r = requests.get('https://dydx-testnet.imperator.co/v4/perpetualMarkets', headers = headers) +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/transfers', params={ + 'address': 'string', 'subaccountNumber': '0.1' +}, headers = headers) print(r.json()) @@ -1362,7 +3122,11 @@ const headers = { 'Accept':'application/json' }; -fetch('https://dydx-testnet.imperator.co/v4/perpetualMarkets', +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/transfers?address=string&subaccountNumber=0.1`, { method: 'GET', @@ -1376,14 +3140,18 @@ fetch('https://dydx-testnet.imperator.co/v4/perpetualMarkets', ``` -`GET /perpetualMarkets` +`GET /transfers` ### Parameters |Name|In|Type|Required|Description| |---|---|---|---|---| +|address|query|string|true|none| +|subaccountNumber|query|number(double)|true|none| |limit|query|number(double)|false|none| -|ticker|query|string|false|none| +|createdBeforeOrAtHeight|query|number(double)|false|none| +|createdBeforeOrAt|query|[IsoString](#schemaisostring)|false|none| +|page|query|number(double)|false|none| > Example responses @@ -1391,46 +3159,28 @@ fetch('https://dydx-testnet.imperator.co/v4/perpetualMarkets', ```json { - "markets": { - "property1": { - "clobPairId": "string", - "ticker": "string", - "status": "ACTIVE", - "oraclePrice": "string", - "priceChange24H": "string", - "volume24H": "string", - "trades24H": 0, - "nextFundingRate": "string", - "initialMarginFraction": "string", - "maintenanceMarginFraction": "string", - "openInterest": "string", - "atomicResolution": 0, - "quantumConversionExponent": 0, - "tickSize": "string", - "stepSize": "string", - "stepBaseQuantums": 0, - "subticksPerTick": 0 - }, - "property2": { - "clobPairId": "string", - "ticker": "string", - "status": "ACTIVE", - "oraclePrice": "string", - "priceChange24H": "string", - "volume24H": "string", - "trades24H": 0, - "nextFundingRate": "string", - "initialMarginFraction": "string", - "maintenanceMarginFraction": "string", - "openInterest": "string", - "atomicResolution": 0, - "quantumConversionExponent": 0, - "tickSize": "string", - "stepSize": "string", - "stepBaseQuantums": 0, - "subticksPerTick": 0 + "pageSize": 0, + "totalResults": 0, + "offset": 0, + "transfers": [ + { + "id": "string", + "sender": { + "subaccountNumber": 0, + "address": "string" + }, + "recipient": { + "subaccountNumber": 0, + "address": "string" + }, + "size": "string", + "createdAt": "string", + "createdAtHeight": "string", + "symbol": "string", + "type": "TRANSFER_IN", + "transactionHash": "string" } - } + ] } ``` @@ -1438,15 +3188,15 @@ fetch('https://dydx-testnet.imperator.co/v4/perpetualMarkets', |Status|Meaning|Description|Schema| |---|---|---|---| -|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[PerpetualMarketResponse](#schemaperpetualmarketresponse)| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[TransferResponse](#schematransferresponse)| -## ListPositions +## GetTransfersForParentSubaccount - + > Code samples @@ -1456,8 +3206,12 @@ headers = { 'Accept': 'application/json' } -r = requests.get('https://dydx-testnet.imperator.co/v4/perpetualPositions', params={ - 'address': 'string', 'subaccountNumber': '0' +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/transfers/parentSubaccountNumber', params={ + 'address': 'string', 'parentSubaccountNumber': '0.1' }, headers = headers) print(r.json()) @@ -1470,7 +3224,11 @@ const headers = { 'Accept':'application/json' }; -fetch('https://dydx-testnet.imperator.co/v4/perpetualPositions?address=string&subaccountNumber=0', +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/transfers/parentSubaccountNumber?address=string&parentSubaccountNumber=0.1`, { method: 'GET', @@ -1484,26 +3242,18 @@ fetch('https://dydx-testnet.imperator.co/v4/perpetualPositions?address=string&su ``` -`GET /perpetualPositions` +`GET /transfers/parentSubaccountNumber` ### Parameters |Name|In|Type|Required|Description| |---|---|---|---|---| |address|query|string|true|none| -|subaccountNumber|query|number(double)|true|none| -|status|query|array[string]|false|none| +|parentSubaccountNumber|query|number(double)|true|none| |limit|query|number(double)|false|none| |createdBeforeOrAtHeight|query|number(double)|false|none| |createdBeforeOrAt|query|[IsoString](#schemaisostring)|false|none| - -#### Enumerated Values - -|Parameter|Value| -|---|---| -|status|OPEN| -|status|CLOSED| -|status|LIQUIDATED| +|page|query|number(double)|false|none| > Example responses @@ -1511,23 +3261,26 @@ fetch('https://dydx-testnet.imperator.co/v4/perpetualPositions?address=string&su ```json { - "positions": [ + "pageSize": 0, + "totalResults": 0, + "offset": 0, + "transfers": [ { - "market": "string", - "status": "OPEN", - "side": "LONG", + "id": "string", + "sender": { + "subaccountNumber": 0, + "address": "string" + }, + "recipient": { + "subaccountNumber": 0, + "address": "string" + }, "size": "string", - "maxSize": "string", - "entryPrice": "string", - "realizedPnl": "string", "createdAt": "string", "createdAtHeight": "string", - "sumOpen": "string", - "sumClose": "string", - "netFunding": "string", - "unrealizedPnl": "string", - "closedAt": "string", - "exitPrice": "string" + "symbol": "string", + "type": "TRANSFER_IN", + "transactionHash": "string" } ] } @@ -1537,15 +3290,15 @@ fetch('https://dydx-testnet.imperator.co/v4/perpetualPositions?address=string&su |Status|Meaning|Description|Schema| |---|---|---|---| -|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[PerpetualPositionResponse](#schemaperpetualpositionresponse)| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[ParentSubaccountTransferResponse](#schemaparentsubaccounttransferresponse)| -## Get +## GetTransferBetween - + > Code samples @@ -1555,8 +3308,12 @@ headers = { 'Accept': 'application/json' } -r = requests.get('https://dydx-testnet.imperator.co/v4/sparklines', params={ - 'timePeriod': 'ONE_DAY' +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/transfers/between', params={ + 'sourceAddress': 'string', 'sourceSubaccountNumber': '0.1', 'recipientAddress': 'string', 'recipientSubaccountNumber': '0.1' }, headers = headers) print(r.json()) @@ -1569,7 +3326,11 @@ const headers = { 'Accept':'application/json' }; -fetch('https://dydx-testnet.imperator.co/v4/sparklines?timePeriod=ONE_DAY', +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/transfers/between?sourceAddress=string&sourceSubaccountNumber=0.1&recipientAddress=string&recipientSubaccountNumber=0.1`, { method: 'GET', @@ -1583,20 +3344,18 @@ fetch('https://dydx-testnet.imperator.co/v4/sparklines?timePeriod=ONE_DAY', ``` -`GET /sparklines` +`GET /transfers/between` ### Parameters |Name|In|Type|Required|Description| |---|---|---|---|---| -|timePeriod|query|[SparklineTimePeriod](#schemasparklinetimeperiod)|true|none| - -#### Enumerated Values - -|Parameter|Value| -|---|---| -|timePeriod|ONE_DAY| -|timePeriod|SEVEN_DAYS| +|sourceAddress|query|string|true|none| +|sourceSubaccountNumber|query|number(double)|true|none| +|recipientAddress|query|string|true|none| +|recipientSubaccountNumber|query|number(double)|true|none| +|createdBeforeOrAtHeight|query|number(double)|false|none| +|createdBeforeOrAt|query|[IsoString](#schemaisostring)|false|none| > Example responses @@ -1604,12 +3363,29 @@ fetch('https://dydx-testnet.imperator.co/v4/sparklines?timePeriod=ONE_DAY', ```json { - "property1": [ - "string" + "pageSize": 0, + "totalResults": 0, + "offset": 0, + "transfersSubset": [ + { + "id": "string", + "sender": { + "subaccountNumber": 0, + "address": "string" + }, + "recipient": { + "subaccountNumber": 0, + "address": "string" + }, + "size": "string", + "createdAt": "string", + "createdAtHeight": "string", + "symbol": "string", + "type": "TRANSFER_IN", + "transactionHash": "string" + } ], - "property2": [ - "string" - ] + "totalNetTransfers": "string" } ``` @@ -1617,15 +3393,15 @@ fetch('https://dydx-testnet.imperator.co/v4/sparklines?timePeriod=ONE_DAY', |Status|Meaning|Description|Schema| |---|---|---|---| -|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[SparklineResponseObject](#schemasparklineresponseobject)| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[TransferBetweenResponse](#schematransferbetweenresponse)| -## GetTime +## GetMegavaultHistoricalPnl - + > Code samples @@ -1635,7 +3411,11 @@ headers = { 'Accept': 'application/json' } -r = requests.get('https://dydx-testnet.imperator.co/v4/time', headers = headers) +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/vault/v1/megavault/historicalPnl', headers = headers) print(r.json()) @@ -1647,7 +3427,11 @@ const headers = { 'Accept':'application/json' }; -fetch('https://dydx-testnet.imperator.co/v4/time', +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/vault/v1/megavault/historicalPnl`, { method: 'GET', @@ -1659,9 +3443,22 @@ fetch('https://dydx-testnet.imperator.co/v4/time', console.log(body); }); -``` +``` + +`GET /vault/v1/megavault/historicalPnl` + +### Parameters + +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|resolution|query|[PnlTickInterval](#schemapnltickinterval)|false|none| -`GET /time` +#### Enumerated Values + +|Parameter|Value| +|---|---| +|resolution|hour| +|resolution|day| > Example responses @@ -1669,8 +3466,18 @@ fetch('https://dydx-testnet.imperator.co/v4/time', ```json { - "iso": "string", - "epoch": 0 + "megavaultPnl": [ + { + "id": "string", + "subaccountId": "string", + "equity": "string", + "totalPnl": "string", + "netTransfers": "string", + "createdAt": "string", + "blockHeight": "string", + "blockTime": "string" + } + ] } ``` @@ -1678,15 +3485,15 @@ fetch('https://dydx-testnet.imperator.co/v4/time', |Status|Meaning|Description|Schema| |---|---|---|---| -|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[TimeResponse](#schematimeresponse)| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[MegavaultHistoricalPnlResponse](#schemamegavaulthistoricalpnlresponse)| -## GetTrades +## GetVaultsHistoricalPnl - + > Code samples @@ -1696,7 +3503,11 @@ headers = { 'Accept': 'application/json' } -r = requests.get('https://dydx-testnet.imperator.co/v4/trades/perpetualMarket/{ticker}', headers = headers) +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/vault/v1/vaults/historicalPnl', headers = headers) print(r.json()) @@ -1708,7 +3519,11 @@ const headers = { 'Accept':'application/json' }; -fetch('https://dydx-testnet.imperator.co/v4/trades/perpetualMarket/{ticker}', +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/vault/v1/vaults/historicalPnl`, { method: 'GET', @@ -1722,16 +3537,20 @@ fetch('https://dydx-testnet.imperator.co/v4/trades/perpetualMarket/{ticker}', ``` -`GET /trades/perpetualMarket/{ticker}` +`GET /vault/v1/vaults/historicalPnl` ### Parameters |Name|In|Type|Required|Description| |---|---|---|---|---| -|ticker|path|string|true|none| -|limit|query|number(double)|false|none| -|createdBeforeOrAtHeight|query|number(double)|false|none| -|createdBeforeOrAt|query|[IsoString](#schemaisostring)|false|none| +|resolution|query|[PnlTickInterval](#schemapnltickinterval)|false|none| + +#### Enumerated Values + +|Parameter|Value| +|---|---| +|resolution|hour| +|resolution|day| > Example responses @@ -1739,15 +3558,21 @@ fetch('https://dydx-testnet.imperator.co/v4/trades/perpetualMarket/{ticker}', ```json { - "trades": [ + "vaultsPnl": [ { - "id": "string", - "side": "BUY", - "size": "string", - "price": "string", - "type": "LIMIT", - "createdAt": "string", - "createdAtHeight": "string" + "ticker": "string", + "historicalPnl": [ + { + "id": "string", + "subaccountId": "string", + "equity": "string", + "totalPnl": "string", + "netTransfers": "string", + "createdAt": "string", + "blockHeight": "string", + "blockTime": "string" + } + ] } ] } @@ -1757,15 +3582,15 @@ fetch('https://dydx-testnet.imperator.co/v4/trades/perpetualMarket/{ticker}', |Status|Meaning|Description|Schema| |---|---|---|---| -|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[TradeResponse](#schematraderesponse)| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[VaultsHistoricalPnlResponse](#schemavaultshistoricalpnlresponse)| -## GetTransfers +## GetMegavaultPositions - + > Code samples @@ -1775,9 +3600,11 @@ headers = { 'Accept': 'application/json' } -r = requests.get('https://dydx-testnet.imperator.co/v4/transfers', params={ - 'address': 'string', 'subaccountNumber': '0' -}, headers = headers) +# For the deployment by DYDX token holders, use +# baseURL = 'https://indexer.dydx.trade/v4' +baseURL = 'https://indexer.v4testnet.dydx.exchange/v4' + +r = requests.get(f'{baseURL}/vault/v1/megavault/positions', headers = headers) print(r.json()) @@ -1789,7 +3616,11 @@ const headers = { 'Accept':'application/json' }; -fetch('https://dydx-testnet.imperator.co/v4/transfers?address=string&subaccountNumber=0', +// For the deployment by DYDX token holders, use +// const baseURL = 'https://indexer.dydx.trade/v4'; +const baseURL = 'https://indexer.v4testnet.dydx.exchange/v4'; + +fetch(`${baseURL}/vault/v1/megavault/positions`, { method: 'GET', @@ -1803,17 +3634,7 @@ fetch('https://dydx-testnet.imperator.co/v4/transfers?address=string&subaccountN ``` -`GET /transfers` - -### Parameters - -|Name|In|Type|Required|Description| -|---|---|---|---|---| -|address|query|string|true|none| -|subaccountNumber|query|number(double)|true|none| -|limit|query|number(double)|false|none| -|createdBeforeOrAtHeight|query|number(double)|false|none| -|createdBeforeOrAt|query|[IsoString](#schemaisostring)|false|none| +`GET /vault/v1/megavault/positions` > Example responses @@ -1821,23 +3642,35 @@ fetch('https://dydx-testnet.imperator.co/v4/transfers?address=string&subaccountN ```json { - "transfers": [ + "positions": [ { - "id": "string", - "sender": { - "subaccountNumber": 0, - "address": "string" + "ticker": "string", + "assetPosition": { + "symbol": "string", + "side": "LONG", + "size": "string", + "assetId": "string", + "subaccountNumber": 0 }, - "recipient": { - "subaccountNumber": 0, - "address": "string" + "perpetualPosition": { + "market": "string", + "status": "OPEN", + "side": "LONG", + "size": "string", + "maxSize": "string", + "entryPrice": "string", + "realizedPnl": "string", + "createdAt": "string", + "createdAtHeight": "string", + "sumOpen": "string", + "sumClose": "string", + "netFunding": "string", + "unrealizedPnl": "string", + "closedAt": "string", + "exitPrice": "string", + "subaccountNumber": 0 }, - "size": "string", - "createdAt": "string", - "createdAtHeight": "string", - "symbol": "string", - "type": "TRANSFER_IN", - "transactionHash": "string" + "equity": "string" } ] } @@ -1847,7 +3680,7 @@ fetch('https://dydx-testnet.imperator.co/v4/transfers?address=string&subaccountN |Status|Meaning|Description|Schema| |---|---|---|---| -|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[TransferResponse](#schematransferresponse)| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Ok|[MegavaultPositionResponse](#schemamegavaultpositionresponse)|