From 0f28150b84ba0f0aac7c4a9ecc15f9ab4d36aebd Mon Sep 17 00:00:00 2001 From: adam-gf Date: Mon, 5 Aug 2024 11:40:28 +0200 Subject: [PATCH 01/31] wip - failing app context --- backend/app/extensions.py | 8 ++- .../app/infrastructure/contracts/epochs.py | 1 + backend/app/infrastructure/events.py | 64 +++++++++++------ backend/poetry.lock | 41 ++++++++++- backend/pyproject.toml | 1 + backend/startup.py | 70 +++++++++++++------ 6 files changed, 141 insertions(+), 44 deletions(-) diff --git a/backend/app/extensions.py b/backend/app/extensions.py index 8a749b7c8a..17db5d23ec 100644 --- a/backend/app/extensions.py +++ b/backend/app/extensions.py @@ -1,3 +1,4 @@ +import socketio from flask_apscheduler import APScheduler from flask_cors import CORS from flask_migrate import Migrate @@ -23,7 +24,12 @@ description="Octant REST API documentation", catch_all_404s=True, ) -socketio = SocketIO(cors_allowed_origins="*") +# from flask import current_app as app + +socketio = socketio.AsyncServer(async_mode="asgi", cors_allowed_origins="*", logger=True) + # async_mode="asgi", cors_allowed_origins="*", client_manager=mgr +# ) +# socketio = SocketIO(cors_allowed_origins="*") db = SQLAlchemy() migrate = Migrate() cors = CORS() diff --git a/backend/app/infrastructure/contracts/epochs.py b/backend/app/infrastructure/contracts/epochs.py index b2c95ada82..bb47df0f31 100644 --- a/backend/app/infrastructure/contracts/epochs.py +++ b/backend/app/infrastructure/contracts/epochs.py @@ -27,6 +27,7 @@ def get_current_epoch(self) -> int: def get_pending_epoch(self) -> Optional[int]: try: app.logger.debug("[Epochs contract] Getting pending epoch") + return 5 return self.contract.functions.getPendingEpoch().call() except exceptions.ContractLogicError: app.logger.warning("[Epochs contract] No pending epoch") diff --git a/backend/app/infrastructure/events.py b/backend/app/infrastructure/events.py index 86ba0f99c0..87af413dec 100644 --- a/backend/app/infrastructure/events.py +++ b/backend/app/infrastructure/events.py @@ -1,8 +1,8 @@ import json from typing import List -from flask import current_app as app -from flask_socketio import emit +from flask import current_app +# from flask_socketio import emit from app.engine.projects.rewards import ProjectRewardDTO from app.exceptions import OctantException @@ -17,25 +17,42 @@ @socketio.on("connect") -def handle_connect(): - app.logger.debug("Client connected") +async def handle_connect(sid: str, environ: dict): - if epochs.get_pending_epoch() is not None: - threshold = get_allocation_threshold() - emit("threshold", {"threshold": str(threshold)}) + print("Type of sid", type(sid)) + print("Type of environ", type(environ)) - project_rewards = get_estimated_project_rewards().rewards - emit("project_rewards", _serialize_project_rewards(project_rewards)) + # socketio.logger.debug("Client connected") + # app_instance = current_app._get_current_object() + with current_app.app_context(): + + current_app.logger.debug("Cl/ient connected") + + print("Epochs are here") + + await socketio.emit("epoch", {"epoch": "fuckup"}) + + if epochs.get_pending_epoch() is not None: + threshold = get_allocation_threshold() + await socketio.emit("threshold", {"threshold": str(threshold)}) + + project_rewards = get_estimated_project_rewards().rewards + await socketio.emit("project_rewards", _serialize_project_rewards(project_rewards)) @socketio.on("disconnect") -def handle_disconnect(): - app.logger.debug("Client disconnected") +async def handle_disconnect(sid): + socketio.logger.debug("Client disconnected") @socketio.on("allocate") -def handle_allocate(msg): +async def handle_allocate(sid, msg): + + print("message", msg) msg = json.loads(msg) + + print("MEssage", msg) + is_manually_edited = msg["isManuallyEdited"] if "isManuallyEdited" in msg else None user_address = msg["userAddress"] app.logger.info(f"User allocation payload: {msg}") @@ -44,7 +61,7 @@ def handle_allocate(msg): msg, is_manually_edited=is_manually_edited, ) - app.logger.info(f"User: {user_address} allocated successfully") + socketio.logger.info(f"User: {user_address} allocated successfully") threshold = get_allocation_threshold() emit("threshold", {"threshold": str(threshold)}, broadcast=True) @@ -64,16 +81,21 @@ def handle_allocate(msg): ) -@socketio.on("project_donors") -def handle_project_donors(project_address: str): - donors = controller.get_all_donations_by_project(project_address) - emit( - "project_donors", - {"project": project_address, "donors": _serialize_donors(donors)}, - ) +# @socketio.on("project_donors") +# def handle_project_donors(project_address: str): +# print("Project donors") +# emit( +# "project_donors", +# {"project": project_address, "donors": []}, +# ) +# donors = controller.get_all_donations_by_project(project_address) +# emit( +# "project_donors", +# {"project": project_address, "donors": _serialize_donors(donors)}, +# ) -@socketio.on_error_default +# @socketio. def default_error_handler(e): ExceptionHandler.print_stacktrace(e) if isinstance(e, OctantException): diff --git a/backend/poetry.lock b/backend/poetry.lock index 209ea55a9f..306715b6b7 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand. [[package]] name = "aiohttp" @@ -1103,6 +1103,26 @@ dnspython = ">=1.15.0" greenlet = ">=0.3" six = ">=1.10.0" +[[package]] +name = "fastapi" +version = "0.112.0" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastapi-0.112.0-py3-none-any.whl", hash = "sha256:3487ded9778006a45834b8c816ec4a48d522e2631ca9e75ec5a774f1b052f821"}, + {file = "fastapi-0.112.0.tar.gz", hash = "sha256:d262bc56b7d101d1f4e8fc0ad2ac75bb9935fec504d2b7117686cec50710cf05"}, +] + +[package.dependencies] +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +starlette = ">=0.37.2,<0.38.0" +typing-extensions = ">=4.8.0" + +[package.extras] +all = ["email_validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +standard = ["email_validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "jinja2 (>=2.11.2)", "python-multipart (>=0.0.7)", "uvicorn[standard] (>=0.12.0)"] + [[package]] name = "flake8" version = "6.1.0" @@ -3202,6 +3222,23 @@ postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] sqlcipher = ["sqlcipher3_binary"] +[[package]] +name = "starlette" +version = "0.37.2" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.8" +files = [ + {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"}, + {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"}, +] + +[package.dependencies] +anyio = ">=3.4.0,<5" + +[package.extras] +full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] + [[package]] name = "toolz" version = "0.12.1" @@ -3521,4 +3558,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "8beb2e0b06481e87b431a937a21c11d21f06810f5b7497781c4be087d48e4b44" +content-hash = "07a0348e2a44a40f17ba9605b8de32af30fd335a64b6fd9559bce5a3cd821c76" diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 683adf29af..12df28a42e 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -29,6 +29,7 @@ pandas = "^2.2.0" gmpy2 = "^2.1.5" sentry-sdk = {extras = ["flask"], version = "^2.5.1"} redis = "^5.0.7" +fastapi = "^0.112.0" [tool.poetry.group.dev.dependencies] pytest = "^7.3.1" diff --git a/backend/startup.py b/backend/startup.py index f100cc84d9..5603127e6f 100644 --- a/backend/startup.py +++ b/backend/startup.py @@ -1,21 +1,14 @@ -# !!! IMPORTANT: DO NOT REARRANGE IMPORTS IN THIS FILE !!! -# The eventlet monkey patch needs to be applied before importing the Flask application for the following reasons: -# 1. Enabling Asynchronous I/O: The monkey patch is required to activate eventlet’s asynchronous and non-blocking I/O capabilities. -# Without this patch, the app's I/O requests might be blocked, which is not desirable for our API's performance. -# 2. Import Order Significance: The monkey patch must be applied before importing the Flask application to ensure that the app utilizes -# the asynchronous versions of standard library modules that have been patched by eventlet. If not done in this order, we might experience issues similar to -# what is reported in the following eventlet issue: https://github.com/eventlet/eventlet/issues/371 -# This comment provides additional insight and helped resolve our specific problem: https://github.com/eventlet/eventlet/issues/371#issuecomment-779967181 -# 3. Issue with dnspython: If dnspython is present in the environment, eventlet monkeypatches socket.getaddrinfo(), -# which breaks dns functionality. By setting the EVENTLET_NO_GREENDNS environment variable before importing eventlet, -# we prevent this monkeypatching - +import asyncio +from concurrent.futures import ThreadPoolExecutor +import io import os +from fastapi import FastAPI, Request +from fastapi.middleware.wsgi import WSGIMiddleware +from fastapi.responses import JSONResponse, RedirectResponse, StreamingResponse +from starlette.responses import Response -os.environ["EVENTLET_NO_GREENDNS"] = "yes" -import eventlet # noqa +from starlette.middleware.base import BaseHTTPMiddleware -eventlet.monkey_patch() if os.getenv("SENTRY_DSN"): import sentry_sdk @@ -31,13 +24,50 @@ from app import create_app # noqa from app.extensions import db # noqa -app = create_app() - +# Create Flask app +flask_app = create_app() -@app.teardown_request +@flask_app.teardown_request def teardown_session(*args, **kwargs): db.session.remove() -if __name__ == "__main__": - eventlet.wsgi.server(eventlet.listen(("0.0.0.0", 5000)), app, log=app.logger) +# Create FastAPI app +fastapi_app = FastAPI() + +@fastapi_app.get("/fastapi-endpoint") +async def fastapi_endpoint(): + return {"message": "This is a FastAPI endpoint."} + +# Mount Flask app under a sub-path +fastapi_app.mount("/flask", WSGIMiddleware(flask_app)) + + +# Middleware to check if the path exists in FastAPI +class PathCheckMiddleware(BaseHTTPMiddleware): + async def dispatch(self, request: Request, call_next): + path = request.url.path + # Check if the path exists in FastAPI routes + for route in fastapi_app.routes: + if path == route.path: + # If path exists, proceed with the request + return await call_next(request) + # If path does not exist, modify the request to forward to the Flask app + if path.startswith('/flask'): + return await call_next(request) + request.scope['path'] = '/flask' + path # Adjust the path as needed + response = await call_next(request) + return response + + +fastapi_app.add_middleware(PathCheckMiddleware) + + +from app.extensions import socketio as our_socketio +import socketio + +sio_asgi_app = socketio.ASGIApp(socketio_server=our_socketio, other_asgi_app=fastapi_app) + +# app.mount("/static", StaticFiles(directory="static"), name="static") +fastapi_app.add_route("/socket.io/", route=sio_asgi_app) +fastapi_app.add_websocket_route("/socket.io/", sio_asgi_app) From e2310d10e07114a78a9dc591eeaf70f7ae8c4f1b Mon Sep 17 00:00:00 2001 From: adam-gf Date: Tue, 13 Aug 2024 10:57:12 +0200 Subject: [PATCH 02/31] Throwing more wip --- backend/app/extensions.py | 1 + backend/app/logging.py | 5 + backend/startup.py | 22 ++-- backend/v2/__init__.py | 0 backend/v2/allocations/__init__.py | 0 backend/v2/allocations/repositories.py | 18 +++ backend/v2/allocations/socket.py | 163 +++++++++++++++++++++++++ backend/v2/core/__init__.py | 0 backend/v2/core/contracts.py | 11 ++ backend/v2/core/dependencies.py | 14 +++ backend/v2/epochs/__init__.py | 0 backend/v2/epochs/contracts.py | 155 +++++++++++++++++++++++ backend/v2/epochs/dependencies.py | 8 ++ backend/v2/epochs/repositories.py | 12 ++ backend/v2/main.py | 26 ++++ backend/v2/projects/__init__.py | 0 backend/v2/projects/contracts.py | 33 +++++ backend/v2/projects/depdendencies.py | 9 ++ backend/v2/projects/services.py | 123 +++++++++++++++++++ 19 files changed, 590 insertions(+), 10 deletions(-) create mode 100644 backend/v2/__init__.py create mode 100644 backend/v2/allocations/__init__.py create mode 100644 backend/v2/allocations/repositories.py create mode 100644 backend/v2/allocations/socket.py create mode 100644 backend/v2/core/__init__.py create mode 100644 backend/v2/core/contracts.py create mode 100644 backend/v2/core/dependencies.py create mode 100644 backend/v2/epochs/__init__.py create mode 100644 backend/v2/epochs/contracts.py create mode 100644 backend/v2/epochs/dependencies.py create mode 100644 backend/v2/epochs/repositories.py create mode 100644 backend/v2/main.py create mode 100644 backend/v2/projects/__init__.py create mode 100644 backend/v2/projects/contracts.py create mode 100644 backend/v2/projects/depdendencies.py create mode 100644 backend/v2/projects/services.py diff --git a/backend/app/extensions.py b/backend/app/extensions.py index 17db5d23ec..40cf6528f6 100644 --- a/backend/app/extensions.py +++ b/backend/app/extensions.py @@ -27,6 +27,7 @@ # from flask import current_app as app socketio = socketio.AsyncServer(async_mode="asgi", cors_allowed_origins="*", logger=True) + # async_mode="asgi", cors_allowed_origins="*", client_manager=mgr # ) # socketio = SocketIO(cors_allowed_origins="*") diff --git a/backend/app/logging.py b/backend/app/logging.py index 958eb9c0eb..239b8ec059 100644 --- a/backend/app/logging.py +++ b/backend/app/logging.py @@ -58,6 +58,11 @@ def config(app_level): "apscheduler.executors.default": { "level": "WARNING", }, + "uvicorn": { # Adding for the uvicorn logger (FastAPI) + "level": app_level, + "handlers": ["stdout", "stderr"], + "propagate": 0, + }, }, } diff --git a/backend/startup.py b/backend/startup.py index 5603127e6f..f6a4b81596 100644 --- a/backend/startup.py +++ b/backend/startup.py @@ -32,12 +32,14 @@ def teardown_session(*args, **kwargs): db.session.remove() +from v2.main import fastapi_app + # Create FastAPI app -fastapi_app = FastAPI() +# fastapi_app = FastAPI() -@fastapi_app.get("/fastapi-endpoint") -async def fastapi_endpoint(): - return {"message": "This is a FastAPI endpoint."} +# @fastapi_app.get("/fastapi-endpoint") +# async def fastapi_endpoint(): +# return {"message": "This is a FastAPI endpoint."} # Mount Flask app under a sub-path fastapi_app.mount("/flask", WSGIMiddleware(flask_app)) @@ -63,11 +65,11 @@ async def dispatch(self, request: Request, call_next): fastapi_app.add_middleware(PathCheckMiddleware) -from app.extensions import socketio as our_socketio -import socketio +# from app.extensions import socketio as our_socketio +# import socketio -sio_asgi_app = socketio.ASGIApp(socketio_server=our_socketio, other_asgi_app=fastapi_app) +# sio_asgi_app = socketio.ASGIApp(socketio_server=our_socketio, other_asgi_app=fastapi_app) -# app.mount("/static", StaticFiles(directory="static"), name="static") -fastapi_app.add_route("/socket.io/", route=sio_asgi_app) -fastapi_app.add_websocket_route("/socket.io/", sio_asgi_app) +# # app.mount("/static", StaticFiles(directory="static"), name="static") +# fastapi_app.add_route("/socket.io/", route=sio_asgi_app) +# fastapi_app.add_websocket_route("/socket.io/", sio_asgi_app) diff --git a/backend/v2/__init__.py b/backend/v2/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/v2/allocations/__init__.py b/backend/v2/allocations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/v2/allocations/repositories.py b/backend/v2/allocations/repositories.py new file mode 100644 index 0000000000..0a0bb84dc6 --- /dev/null +++ b/backend/v2/allocations/repositories.py @@ -0,0 +1,18 @@ + + +from sqlalchemy import func, select +from sqlalchemy.ext.asyncio import AsyncSession + + +from backend.app.infrastructure.database.models import Allocation + + +async def sum_allocations_by_epoch(session: AsyncSession, epoch: int) -> int: + """Get the sum of all allocations for a given epoch. We only consider the allocations that have not been deleted. + """ + + result = await session.execute( + select(func.sum(Allocation.amount)).filter(Allocation.epoch == epoch).filter(Allocation.deleted_at.is_(None)) + ) + count = result.scalar() + return count \ No newline at end of file diff --git a/backend/v2/allocations/socket.py b/backend/v2/allocations/socket.py new file mode 100644 index 0000000000..6b84064cb9 --- /dev/null +++ b/backend/v2/allocations/socket.py @@ -0,0 +1,163 @@ +import json +import logging +from typing import List + +from flask import current_app +import socketio +# from flask_socketio import emit + +from app.engine.projects.rewards import ProjectRewardDTO +from app.exceptions import OctantException +# from app.extensions import socketio, epochs +from app.infrastructure.exception_handler import UNEXPECTED_EXCEPTION, ExceptionHandler + +from app.modules.dto import ProjectDonationDTO +from app.modules.projects.rewards.controller import get_estimated_project_rewards +from app.modules.user.allocations import controller + + +from backend.v2.core.dependencies import get_w3 +from backend.v2.epochs.contracts import Epochs +from backend.v2.epochs.dependencies import get_epochs +from backend.v2.projects.contracts import Projects + +from backend.v2.projects.depdendencies import get_projects +from backend.v2.projects.services import get_projects_allocation_threshold + + +class AllocateNamespace(socketio.AsyncNamespace): + + def __init__(self, namespace: str): + super().__init__(namespace=namespace) + + self.w3 = get_w3() + self.settings = get_settings() + self.epochs = get_epochs(self.w3, self.settings.epochs_contract_address) + self.projects = get_projects(self.w3, self.settings.projects_contract_address) + + async def on_connect(self, sid: str, environ: dict): + """ + Handle client connection + """ + + print("Type of sid", type(sid)) + print("Type of environ", type(environ)) + + # socketio.logger.debug("Client connected") + # app_instance = current_app._get_current_object() + + logging.debug("Client connected") + + print("Epochs are here") + + await self.emit("epoch", {"epoch": "fuckup"}) + + # We send the data only in PENDING state + pending_epoch_number = await self.epochs.get_pending_epoch() + + # We do not handle requests outside of pending epoch state + if pending_epoch_number is None: + return + + + threshold = await get_projects_allocation_threshold( + session=self.session, # TODO: + projects=self.projects, + epoch_number=pending_epoch_number + ) + + await self.emit("threshold", {"threshold": str(threshold)}) + + project_rewards = get_estimated_project_rewards().rewards + await self.emit("project_rewards", _serialize_project_rewards(project_rewards)) + + + async def on_disconnect(self, sid): + + logging.debug("Client disconnected") + + + async def on_allocate(self, sid: str, environ: dict) -> None: + + + print("message", msg) + msg = json.loads(msg) + + print("MEssage", msg) + + is_manually_edited = msg["isManuallyEdited"] if "isManuallyEdited" in msg else None + user_address = msg["userAddress"] + logging.info(f"User allocation payload: {msg}") + controller.allocate( + user_address, + msg, + is_manually_edited=is_manually_edited, + ) + socketio.logger.info(f"User: {user_address} allocated successfully") + + threshold = get_projects_allocation_threshold() + await self.emit("threshold", {"threshold": str(threshold)}, broadcast=True) + + project_rewards = get_estimated_project_rewards().rewards + await self.emit( + "project_rewards", + _serialize_project_rewards(project_rewards), + broadcast=True, + ) + for project in project_rewards: + donors = controller.get_all_donations_by_project(project.address) + await self.emit( + "project_donors", + {"project": project.address, "donors": _serialize_donors(donors)}, + broadcast=True, + ) + + +# def state_context(epoch_state: EpochState) -> Context: +# epoch_num = get_epoch_number(epoch_state) +# return build_context(epoch_num, epoch_state, with_block_range) + + + +# @socketio.on("project_donors") +# def handle_project_donors(project_address: str): +# print("Project donors") +# emit( +# "project_donors", +# {"project": project_address, "donors": []}, +# ) +# donors = controller.get_all_donations_by_project(project_address) +# emit( +# "project_donors", +# {"project": project_address, "donors": _serialize_donors(donors)}, +# ) + + +# @socketio. +def default_error_handler(e): + ExceptionHandler.print_stacktrace(e) + if isinstance(e, OctantException): + emit("exception", {"message": str(e.message)}) + else: + emit("exception", {"message": UNEXPECTED_EXCEPTION}) + + +def _serialize_project_rewards(project_rewards: List[ProjectRewardDTO]) -> List[dict]: + return [ + { + "address": project_reward.address, + "allocated": str(project_reward.allocated), + "matched": str(project_reward.matched), + } + for project_reward in project_rewards + ] + + +def _serialize_donors(donors: List[ProjectDonationDTO]) -> List[dict]: + return [ + { + "address": donor.donor, + "amount": str(donor.amount), + } + for donor in donors + ] diff --git a/backend/v2/core/__init__.py b/backend/v2/core/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/v2/core/contracts.py b/backend/v2/core/contracts.py new file mode 100644 index 0000000000..2ffe188ea7 --- /dev/null +++ b/backend/v2/core/contracts.py @@ -0,0 +1,11 @@ + +from web3 import AsyncWeb3 +from web3.contract import AsyncContract +from web3.types import ABI + + +class SmartContract: + def __init__(self, w3: AsyncWeb3, abi: ABI, address: str) -> None: + self.abi = abi + self.w3 = w3 + self.contract: AsyncContract = w3.eth.contract(address=address, abi=abi) diff --git a/backend/v2/core/dependencies.py b/backend/v2/core/dependencies.py new file mode 100644 index 0000000000..0a7a0af65a --- /dev/null +++ b/backend/v2/core/dependencies.py @@ -0,0 +1,14 @@ +from web3 import AsyncWeb3 +from web3.middleware import async_geth_poa_middleware + + +# TODO: Cache? +def get_w3(web3_provider: str) -> AsyncWeb3: + + w3 = AsyncWeb3() + w3.provider = web3_provider + if async_geth_poa_middleware not in w3.middleware_onion: + w3.middleware_onion.inject(async_geth_poa_middleware, layer=0) + + return w3 + diff --git a/backend/v2/epochs/__init__.py b/backend/v2/epochs/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/v2/epochs/contracts.py b/backend/v2/epochs/contracts.py new file mode 100644 index 0000000000..a4614f7fd7 --- /dev/null +++ b/backend/v2/epochs/contracts.py @@ -0,0 +1,155 @@ +from typing import Dict, Literal, Optional, TypedDict + +from web3 import exceptions + +import logging + +from backend.v2.core.contracts import SmartContract + + +class Epochs(SmartContract): + + async def is_decision_window_open(self) -> bool: + logging.debug("[Epochs contract] Checking if decision window is open") + return await self.contract.functions.isDecisionWindowOpen().call() + + async def get_decision_window(self) -> bool: + logging.debug("[Epochs contract] Checking decision window length") + return await self.contract.functions.getDecisionWindow().call() + + async def get_current_epoch(self) -> int: + try: + logging.debug("[Epochs contract] Getting current epoch") + return await self.contract.functions.getCurrentEpoch().call() + except exceptions.ContractLogicError: + logging.warning("[Epochs contract] Current epoch not started yet") + # HN:Epochs/not-started-yet + return 0 + + async def get_pending_epoch(self) -> Optional[int]: + try: + logging.debug("[Epochs contract] Getting pending epoch") + # return 5 + return await self.contract.functions.getPendingEpoch().call() + except exceptions.ContractLogicError: + logging.warning("[Epochs contract] No pending epoch") + # HN:Epochs/not-pending + return None + + async def get_finalized_epoch(self) -> int: + try: + logging.debug("[Epochs contract] Getting finalized epoch") + return await self.contract.functions.getFinalizedEpoch().call() + except exceptions.ContractLogicError: + logging.warning("[Epochs contract] No finalized epoch") + # HN:Epochs/not-finalized + return 0 + + async def get_current_epoch_end(self) -> int: + logging.debug("[Epochs contract] Checking when current epoch ends") + return await self.contract.functions.getCurrentEpochEnd().call() + + async def get_epoch_duration(self) -> int: + logging.debug("[Epochs contract] Checking epoch duration") + return await self.contract.functions.getEpochDuration().call() + + async def get_future_epoch_props(self) -> Dict: + logging.debug("[Epochs contract] Getting epoch props index") + index = await self.contract.functions.epochPropsIndex().call() + logging.debug("[Epochs contract] Getting next epoch props") + return await self.contract.functions.epochProps(index).call() + + async def is_started(self) -> bool: + logging.debug("[Epochs contract] Checking if first epoch has started") + return await self.contract.functions.isStarted().call() + + async def start(self) -> int: + logging.debug("[Epochs contract] Checking when first epochs starts") + return await self.contract.functions.start().call() + + +EPOCHS_ABI = [ + { + "inputs": [], + "name": "getCurrentEpoch", + "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], + "stateMutability": "view", + "type": "function", + }, + { + "inputs": [], + "name": "getCurrentEpochEnd", + "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], + "stateMutability": "view", + "type": "function", + }, + { + "inputs": [], + "name": "getPendingEpoch", + "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], + "stateMutability": "view", + "type": "function", + }, + { + "inputs": [], + "name": "getFinalizedEpoch", + "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], + "stateMutability": "view", + "type": "function", + }, + { + "inputs": [], + "name": "getEpochDuration", + "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], + "stateMutability": "view", + "type": "function", + }, + { + "inputs": [], + "name": "getDecisionWindow", + "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], + "stateMutability": "view", + "type": "function", + }, + { + "inputs": [], + "name": "isDecisionWindowOpen", + "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], + "stateMutability": "view", + "type": "function", + }, + { + "inputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], + "name": "epochProps", + "outputs": [ + {"internalType": "uint32", "name": "from", "type": "uint32"}, + {"internalType": "uint32", "name": "to", "type": "uint32"}, + {"internalType": "uint64", "name": "fromTs", "type": "uint64"}, + {"internalType": "uint64", "name": "duration", "type": "uint64"}, + {"internalType": "uint64", "name": "decisionWindow", "type": "uint64"}, + ], + "stateMutability": "view", + "type": "function", + }, + { + "inputs": [], + "name": "epochPropsIndex", + "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], + "stateMutability": "view", + "type": "function", + }, + { + "inputs": [], + "name": "isStarted", + "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], + "stateMutability": "view", + "type": "function", + }, + { + "inputs": [], + "name": "start", + "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], + "stateMutability": "view", + "type": "function", + }, +] \ No newline at end of file diff --git a/backend/v2/epochs/dependencies.py b/backend/v2/epochs/dependencies.py new file mode 100644 index 0000000000..a32f25b813 --- /dev/null +++ b/backend/v2/epochs/dependencies.py @@ -0,0 +1,8 @@ +from web3 import AsyncWeb3 +from .contracts import Epochs, EPOCHS_ABI + + +# TODO: cache +def get_epochs(w3: AsyncWeb3, epochs_contract_address: str) -> Epochs: + + return Epochs(w3, EPOCHS_ABI, epochs_contract_address) diff --git a/backend/v2/epochs/repositories.py b/backend/v2/epochs/repositories.py new file mode 100644 index 0000000000..33aa9178a2 --- /dev/null +++ b/backend/v2/epochs/repositories.py @@ -0,0 +1,12 @@ + + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from backend.app.infrastructure.database.models import PendingEpochSnapshot + + +async def get_pending_epoch_snapshot_by_epoch(session: AsyncSession, epoch: int) -> PendingEpochSnapshot | None: + + result = await session.execute(select(PendingEpochSnapshot).filter(PendingEpochSnapshot.epoch == epoch)) + return result.scalars().first() diff --git a/backend/v2/main.py b/backend/v2/main.py new file mode 100644 index 0000000000..d9d51cb01e --- /dev/null +++ b/backend/v2/main.py @@ -0,0 +1,26 @@ + + +# Create FastAPI app +from fastapi import FastAPI + +from v2.allocations.socket import AllocateNamespace + +import socketio + + + +fastapi_app = FastAPI() + +@fastapi_app.get("/fastapi-endpoint") +async def fastapi_endpoint(): + return {"message": "This is a FastAPI endpoint."} + + +sio=socketio.AsyncServer(cors_allowed_origins='*',async_mode='asgi') +sio.register_namespace(AllocateNamespace('/')) +sio_asgi_app = socketio.ASGIApp(socketio_server=sio, other_asgi_app=fastapi_app) + +# app.mount("/static", StaticFiles(directory="static"), name="static") +# fastapi_app.mount("/", sio_asgi_app) +fastapi_app.add_route("/socket.io/", route=sio_asgi_app) +fastapi_app.add_websocket_route("/socket.io/", sio_asgi_app) \ No newline at end of file diff --git a/backend/v2/projects/__init__.py b/backend/v2/projects/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/v2/projects/contracts.py b/backend/v2/projects/contracts.py new file mode 100644 index 0000000000..09f28afe03 --- /dev/null +++ b/backend/v2/projects/contracts.py @@ -0,0 +1,33 @@ + +import logging +from backend.v2.core.contracts import SmartContract + + +class Projects(SmartContract): + async def get_project_addresses(self, epoch: int) -> list[str]: + logging.debug( + f"[Projects contract] Getting project addresses for epoch: {epoch}" + ) + return await self.contract.functions.getProposalAddresses(epoch).call() + + async def get_project_cid(self): + logging.debug("[Projects contract] Getting projects CID") + return await self.contract.functions.cid().call() + + +PROJECTS_ABI = [ + { + "inputs": [{"internalType": "uint256", "name": "_epoch", "type": "uint256"}], + "name": "getProposalAddresses", + "outputs": [{"internalType": "address[]", "name": "", "type": "address[]"}], + "stateMutability": "view", + "type": "function", + }, + { + "inputs": [], + "name": "cid", + "outputs": [{"internalType": "string", "name": "", "type": "string"}], + "stateMutability": "view", + "type": "function", + }, +] diff --git a/backend/v2/projects/depdendencies.py b/backend/v2/projects/depdendencies.py new file mode 100644 index 0000000000..a59bf553ca --- /dev/null +++ b/backend/v2/projects/depdendencies.py @@ -0,0 +1,9 @@ +from web3 import AsyncWeb3 +from .contracts import Projects, PROJECTS_ABI + + +# TODO: cache +def get_projects(w3: AsyncWeb3, projects_contract_address: str) -> Projects: + + # projects.init_web3(w3, app.config["PROJECTS_CONTRACT_ADDRESS"]) + return Projects(w3, PROJECTS_ABI, projects_contract_address) diff --git a/backend/v2/projects/services.py b/backend/v2/projects/services.py new file mode 100644 index 0000000000..ae9984aef9 --- /dev/null +++ b/backend/v2/projects/services.py @@ -0,0 +1,123 @@ + + + +from sqlalchemy.ext.asyncio import AsyncSession + +from backend.v2.allocations.repositories import sum_allocations_by_epoch +from backend.v2.epochs.repositories import get_pending_epoch_snapshot_by_epoch +from backend.v2.projects.contracts import Projects + + + +async def get_projects_allocation_threshold( + # Dependencies + session: AsyncSession, + projects: Projects, + # Arguments + epoch_number: int, + project_count_multiplier: int = 1, +) -> int: + + # PROJECTS_COUNT_MULTIPLIER = 1 # TODO: from settings? + + total_allocated = await sum_allocations_by_epoch(session, epoch_number) + project_addresses = await projects.get_project_addresses(epoch_number) + + return _calculate_threshold(total_allocated, len(project_addresses), project_count_multiplier) + + +def _calculate_threshold( + total_allocated: int, + projects_count: int, + project_count_multiplier: int, +) -> int: + return ( + int(total_allocated / (projects_count * project_count_multiplier)) + if projects_count + else 0 + ) + + + +async def get_estimated_project_rewards( + # Dependencies + session: AsyncSession, + projects: Projects, + + # Arguments + epoch_number: int, +) -> int: + pass + + all_projects = await projects.get_project_addresses(epoch_number) + + +async def get_estimated_project_matched_rewards_pending( + # Dependencies + session: AsyncSession, + projects: Projects, + + # Arguments + epoch_number: int, +) -> int: + +# pending_snapshot = await get_pending_epoch_snapshot_by_epoch(session, epoch_number) + +# patrons_rewards = + +# def get_patrons_rewards(self, context: Context) -> int: +# epoch = context.epoch_details +# patrons = database.patrons.get_all_patrons_at_timestamp( +# epoch.finalized_timestamp.datetime() +# ) +# return database.budgets.get_sum_by_users_addresses_and_epoch( +# patrons, epoch.epoch_num +# ) + + +# def get_matched_rewards() +# patrons_mode: UserPatronMode + +# def get_matched_rewards(self, context: Context) -> int: +# pending_snapshot = database.pending_epoch_snapshot.get_by_epoch( +# context.epoch_details.epoch_num +# ) +# patrons_rewards = self.patrons_mode.get_patrons_rewards(context) +# matched_rewards_settings = context.epoch_settings.octant_rewards.matched_rewards + +# return matched_rewards_settings.calculate_matched_rewards( +# MatchedRewardsPayload( +# total_rewards=int(pending_snapshot.total_rewards), +# vanilla_individual_rewards=int( +# pending_snapshot.vanilla_individual_rewards +# ), +# patrons_rewards=patrons_rewards, +# staking_proceeds=int(pending_snapshot.eth_proceeds), +# locked_ratio=Decimal(pending_snapshot.locked_ratio), +# ire_percent=context.epoch_settings.octant_rewards.total_and_vanilla_individual_rewards.IRE_PERCENT, +# tr_percent=context.epoch_settings.octant_rewards.total_and_vanilla_individual_rewards.TR_PERCENT, +# ) +# ) + + +# project_rewards = get_estimated_project_rewards().rewards + +# def get_project_rewards(self, context: Context) -> ProjectRewardsResult: +# project_settings = context.epoch_settings.project # .rewards CappedQuadraticFundingProjectRewards +# # all_projects = context.projects_details.projects +# matched_rewards = self.octant_rewards.get_matched_rewards(context) +# allocations = database.allocations.get_all_with_uqs( +# context.epoch_details.epoch_num +# ) + +# allocations_payload = AllocationsPayload( +# before_allocations=allocations, user_new_allocations=[] +# ) +# projects_rewards = get_projects_rewards( +# project_settings, +# allocations_payload, +# all_projects, +# matched_rewards, +# ) + +# return projects_rewards From 591ef920e7033ee36149ef9e43064e7947360790 Mon Sep 17 00:00:00 2001 From: adam-gf Date: Tue, 27 Aug 2024 17:54:14 +0200 Subject: [PATCH 03/31] Adds more migrated code. Allocation and rewards look ok --- backend/app/__init__.py | 2 +- backend/app/extensions.py | 7 +- backend/app/infrastructure/events.py | 69 ++-- backend/app/settings.py | 2 +- backend/poetry.lock | 160 ++++++++- backend/pyproject.toml | 8 + backend/socket_client.py | 85 +++++ backend/startup.py | 7 +- backend/v2/allocations/models.py | 38 +++ backend/v2/allocations/repositories.py | 265 ++++++++++++++- backend/v2/allocations/services.py | 315 ++++++++++++++++++ backend/v2/allocations/socket.py | 254 ++++++++++---- backend/v2/core/contracts.py | 4 +- backend/v2/core/dependencies.py | 45 ++- backend/v2/crypto/__init__.py | 0 backend/v2/crypto/contracts.py | 40 +++ backend/v2/crypto/signatures.py | 63 ++++ backend/v2/epoch_snapshots/__init__.py | 0 backend/v2/epoch_snapshots/repositories.py | 12 + backend/v2/epochs/contracts.py | 13 +- backend/v2/epochs/dependencies.py | 34 +- backend/v2/epochs/repositories.py | 12 - backend/v2/epochs/subgraphs.py | 136 ++++++++ backend/v2/gitcoin_passport/__init__.py | 0 backend/v2/gitcoin_passport/repositories.py | 19 ++ backend/v2/gitcoin_passport/services.py | 25 ++ backend/v2/main.py | 19 +- backend/v2/project_rewards/__init__.py | 0 .../v2/project_rewards/capped_quadriatic.py | 138 ++++++++ backend/v2/projects/contracts.py | 12 +- backend/v2/projects/depdendencies.py | 21 +- backend/v2/projects/services.py | 171 +++++----- backend/v2/uniqueness_quotients/__init__.py | 0 .../v2/uniqueness_quotients/repositories.py | 45 +++ backend/v2/uniqueness_quotients/services.py | 60 ++++ backend/v2/user_patron_mode/__init__.py | 0 backend/v2/user_patron_mode/repositories.py | 116 +++++++ backend/v2/users/__init__.py | 0 backend/v2/users/repositories.py | 11 + 39 files changed, 1943 insertions(+), 265 deletions(-) create mode 100644 backend/socket_client.py create mode 100644 backend/v2/allocations/models.py create mode 100644 backend/v2/allocations/services.py create mode 100644 backend/v2/crypto/__init__.py create mode 100644 backend/v2/crypto/contracts.py create mode 100644 backend/v2/crypto/signatures.py create mode 100644 backend/v2/epoch_snapshots/__init__.py create mode 100644 backend/v2/epoch_snapshots/repositories.py delete mode 100644 backend/v2/epochs/repositories.py create mode 100644 backend/v2/epochs/subgraphs.py create mode 100644 backend/v2/gitcoin_passport/__init__.py create mode 100644 backend/v2/gitcoin_passport/repositories.py create mode 100644 backend/v2/gitcoin_passport/services.py create mode 100644 backend/v2/project_rewards/__init__.py create mode 100644 backend/v2/project_rewards/capped_quadriatic.py create mode 100644 backend/v2/uniqueness_quotients/__init__.py create mode 100644 backend/v2/uniqueness_quotients/repositories.py create mode 100644 backend/v2/uniqueness_quotients/services.py create mode 100644 backend/v2/user_patron_mode/__init__.py create mode 100644 backend/v2/user_patron_mode/repositories.py create mode 100644 backend/v2/users/__init__.py create mode 100644 backend/v2/users/repositories.py diff --git a/backend/app/__init__.py b/backend/app/__init__.py index 1608020af3..7d01ad42e7 100644 --- a/backend/app/__init__.py +++ b/backend/app/__init__.py @@ -47,7 +47,7 @@ def register_extensions(app): cors.init_app(app) db.init_app(app) migrate.init_app(app, db) - socketio.init_app(app) + # socketio.init_app(app) cache.init_app(app) init_scheduler(app) init_logger(app) diff --git a/backend/app/extensions.py b/backend/app/extensions.py index 40cf6528f6..78e0dc2f6b 100644 --- a/backend/app/extensions.py +++ b/backend/app/extensions.py @@ -1,4 +1,3 @@ -import socketio from flask_apscheduler import APScheduler from flask_cors import CORS from flask_migrate import Migrate @@ -26,11 +25,11 @@ ) # from flask import current_app as app -socketio = socketio.AsyncServer(async_mode="asgi", cors_allowed_origins="*", logger=True) +# socketio = socketio.AsyncServer(async_mode="asgi", cors_allowed_origins="*", logger=True) - # async_mode="asgi", cors_allowed_origins="*", client_manager=mgr +# async_mode="asgi", cors_allowed_origins="*", client_manager=mgr # ) -# socketio = SocketIO(cors_allowed_origins="*") +socketio = SocketIO(cors_allowed_origins="*") db = SQLAlchemy() migrate = Migrate() cors = CORS() diff --git a/backend/app/infrastructure/events.py b/backend/app/infrastructure/events.py index 87af413dec..78cc07ded7 100644 --- a/backend/app/infrastructure/events.py +++ b/backend/app/infrastructure/events.py @@ -1,8 +1,8 @@ import json from typing import List -from flask import current_app -# from flask_socketio import emit +from flask import current_app as app +from flask_socketio import emit from app.engine.projects.rewards import ProjectRewardDTO from app.exceptions import OctantException @@ -17,42 +17,32 @@ @socketio.on("connect") -async def handle_connect(sid: str, environ: dict): +def handle_connect(): + app.logger.debug("Client connected") - print("Type of sid", type(sid)) - print("Type of environ", type(environ)) + if epochs.get_pending_epoch() is not None: + threshold = get_allocation_threshold() + emit("threshold", {"threshold": str(threshold)}) - # socketio.logger.debug("Client connected") - # app_instance = current_app._get_current_object() - with current_app.app_context(): + project_rewards = get_estimated_project_rewards().rewards + emit("project_rewards", _serialize_project_rewards(project_rewards)) - current_app.logger.debug("Cl/ient connected") - - print("Epochs are here") - - await socketio.emit("epoch", {"epoch": "fuckup"}) - - if epochs.get_pending_epoch() is not None: - threshold = get_allocation_threshold() - await socketio.emit("threshold", {"threshold": str(threshold)}) - - project_rewards = get_estimated_project_rewards().rewards - await socketio.emit("project_rewards", _serialize_project_rewards(project_rewards)) + for project in project_rewards: + donors = controller.get_all_donations_by_project(project.address) + emit( + "project_donors", + {"project": project.address, "donors": _serialize_donors(donors)}, + ) @socketio.on("disconnect") -async def handle_disconnect(sid): - socketio.logger.debug("Client disconnected") +def handle_disconnect(): + app.logger.debug("Client disconnected") @socketio.on("allocate") -async def handle_allocate(sid, msg): - - print("message", msg) +def handle_allocate(msg): msg = json.loads(msg) - - print("MEssage", msg) - is_manually_edited = msg["isManuallyEdited"] if "isManuallyEdited" in msg else None user_address = msg["userAddress"] app.logger.info(f"User allocation payload: {msg}") @@ -61,7 +51,7 @@ async def handle_allocate(sid, msg): msg, is_manually_edited=is_manually_edited, ) - socketio.logger.info(f"User: {user_address} allocated successfully") + app.logger.info(f"User: {user_address} allocated successfully") threshold = get_allocation_threshold() emit("threshold", {"threshold": str(threshold)}, broadcast=True) @@ -81,21 +71,16 @@ async def handle_allocate(sid, msg): ) -# @socketio.on("project_donors") -# def handle_project_donors(project_address: str): -# print("Project donors") -# emit( -# "project_donors", -# {"project": project_address, "donors": []}, -# ) -# donors = controller.get_all_donations_by_project(project_address) -# emit( -# "project_donors", -# {"project": project_address, "donors": _serialize_donors(donors)}, -# ) +@socketio.on("project_donors") +def handle_project_donors(project_address: str): + donors = controller.get_all_donations_by_project(project_address) + emit( + "project_donors", + {"project": project_address, "donors": _serialize_donors(donors)}, + ) -# @socketio. +@socketio.on_error_default def default_error_handler(e): ExceptionHandler.print_stacktrace(e) if isinstance(e, OctantException): diff --git a/backend/app/settings.py b/backend/app/settings.py index 75c37cb388..d60eacf8df 100644 --- a/backend/app/settings.py +++ b/backend/app/settings.py @@ -104,7 +104,7 @@ class DevConfig(Config): ENV = "dev" DEBUG = True - LOG_LVL = os.getenv("OCTANT_LOG_LEVEL", "DEBUG") + LOG_LVL = os.getenv("OCTANT_LOG_LEVEL", "INFO") DB_NAME = "dev.db" CHAIN_ID = int(os.getenv("CHAIN_ID", 1337)) # Put the db file in project root diff --git a/backend/poetry.lock b/backend/poetry.lock index 306715b6b7..3d379525a8 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -109,6 +109,24 @@ files = [ [package.dependencies] frozenlist = ">=1.1.0" +[[package]] +name = "aiosqlite" +version = "0.20.0" +description = "asyncio bridge to the standard sqlite3 module" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiosqlite-0.20.0-py3-none-any.whl", hash = "sha256:36a1deaca0cac40ebe32aac9977a6e2bbc7f5189f23f4a54d5908986729e5bd6"}, + {file = "aiosqlite-0.20.0.tar.gz", hash = "sha256:6d35c8c256637f4672f843c31021464090805bf925385ac39473fb16eaaca3d7"}, +] + +[package.dependencies] +typing_extensions = ">=4.0" + +[package.extras] +dev = ["attribution (==1.7.0)", "black (==24.2.0)", "coverage[toml] (==7.4.1)", "flake8 (==7.0.0)", "flake8-bugbear (==24.2.6)", "flit (==3.9.0)", "mypy (==1.8.0)", "ufmt (==2.3.0)", "usort (==1.0.8.post1)"] +docs = ["sphinx (==7.2.6)", "sphinx-mdinclude (==0.5.3)"] + [[package]] name = "alembic" version = "1.13.1" @@ -1664,6 +1682,20 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + [[package]] name = "itsdangerous" version = "2.2.0" @@ -2022,6 +2054,52 @@ url = "https://github.com/stakewise/multiproof.git" reference = "v0.1.2" resolved_reference = "e1f3633a10cb5929cc08d4f261effd170976e7b9" +[[package]] +name = "mypy" +version = "1.11.2" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a"}, + {file = "mypy-1.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef"}, + {file = "mypy-1.11.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383"}, + {file = "mypy-1.11.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8"}, + {file = "mypy-1.11.2-cp310-cp310-win_amd64.whl", hash = "sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7"}, + {file = "mypy-1.11.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385"}, + {file = "mypy-1.11.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca"}, + {file = "mypy-1.11.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104"}, + {file = "mypy-1.11.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4"}, + {file = "mypy-1.11.2-cp311-cp311-win_amd64.whl", hash = "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6"}, + {file = "mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318"}, + {file = "mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36"}, + {file = "mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987"}, + {file = "mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca"}, + {file = "mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70"}, + {file = "mypy-1.11.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b"}, + {file = "mypy-1.11.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86"}, + {file = "mypy-1.11.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce"}, + {file = "mypy-1.11.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1"}, + {file = "mypy-1.11.2-cp38-cp38-win_amd64.whl", hash = "sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b"}, + {file = "mypy-1.11.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6"}, + {file = "mypy-1.11.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70"}, + {file = "mypy-1.11.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d"}, + {file = "mypy-1.11.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d"}, + {file = "mypy-1.11.2-cp39-cp39-win_amd64.whl", hash = "sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24"}, + {file = "mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12"}, + {file = "mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +typing-extensions = ">=4.6.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + [[package]] name = "mypy-extensions" version = "1.0.0" @@ -2499,6 +2577,26 @@ files = [ [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" +[[package]] +name = "pydantic-settings" +version = "2.4.0" +description = "Settings management using Pydantic" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_settings-2.4.0-py3-none-any.whl", hash = "sha256:bb6849dc067f1687574c12a639e231f3a6feeed0a12d710c1382045c5db1c315"}, + {file = "pydantic_settings-2.4.0.tar.gz", hash = "sha256:ed81c3a0f46392b4d7c0a565c05884e6e54b3456e6f0fe4d8814981172dc9a88"}, +] + +[package.dependencies] +pydantic = ">=2.7.0" +python-dotenv = ">=0.21.0" + +[package.extras] +azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0)"] +toml = ["tomli (>=2.0.1)"] +yaml = ["pyyaml (>=6.0.1)"] + [[package]] name = "pyflakes" version = "3.1.0" @@ -3017,6 +3115,33 @@ docs = ["sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx-rtd-theme rust-backend = ["rusty-rlp (>=0.2.1)"] test = ["hypothesis (==5.19.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] +[[package]] +name = "ruff" +version = "0.6.2" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.6.2-py3-none-linux_armv6l.whl", hash = "sha256:5c8cbc6252deb3ea840ad6a20b0f8583caab0c5ef4f9cca21adc5a92b8f79f3c"}, + {file = "ruff-0.6.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:17002fe241e76544448a8e1e6118abecbe8cd10cf68fde635dad480dba594570"}, + {file = "ruff-0.6.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3dbeac76ed13456f8158b8f4fe087bf87882e645c8e8b606dd17b0b66c2c1158"}, + {file = "ruff-0.6.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:094600ee88cda325988d3f54e3588c46de5c18dae09d683ace278b11f9d4d534"}, + {file = "ruff-0.6.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:316d418fe258c036ba05fbf7dfc1f7d3d4096db63431546163b472285668132b"}, + {file = "ruff-0.6.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d72b8b3abf8a2d51b7b9944a41307d2f442558ccb3859bbd87e6ae9be1694a5d"}, + {file = "ruff-0.6.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:2aed7e243be68487aa8982e91c6e260982d00da3f38955873aecd5a9204b1d66"}, + {file = "ruff-0.6.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d371f7fc9cec83497fe7cf5eaf5b76e22a8efce463de5f775a1826197feb9df8"}, + {file = "ruff-0.6.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8f310d63af08f583363dfb844ba8f9417b558199c58a5999215082036d795a1"}, + {file = "ruff-0.6.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7db6880c53c56addb8638fe444818183385ec85eeada1d48fc5abe045301b2f1"}, + {file = "ruff-0.6.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1175d39faadd9a50718f478d23bfc1d4da5743f1ab56af81a2b6caf0a2394f23"}, + {file = "ruff-0.6.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5b939f9c86d51635fe486585389f54582f0d65b8238e08c327c1534844b3bb9a"}, + {file = "ruff-0.6.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d0d62ca91219f906caf9b187dea50d17353f15ec9bb15aae4a606cd697b49b4c"}, + {file = "ruff-0.6.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:7438a7288f9d67ed3c8ce4d059e67f7ed65e9fe3aa2ab6f5b4b3610e57e3cb56"}, + {file = "ruff-0.6.2-py3-none-win32.whl", hash = "sha256:279d5f7d86696df5f9549b56b9b6a7f6c72961b619022b5b7999b15db392a4da"}, + {file = "ruff-0.6.2-py3-none-win_amd64.whl", hash = "sha256:d9f3469c7dd43cd22eb1c3fc16926fb8258d50cb1b216658a07be95dd117b0f2"}, + {file = "ruff-0.6.2-py3-none-win_arm64.whl", hash = "sha256:f28fcd2cd0e02bdf739297516d5643a945cc7caf09bd9bcb4d932540a5ea4fa9"}, + {file = "ruff-0.6.2.tar.gz", hash = "sha256:239ee6beb9e91feb8e0ec384204a763f36cb53fb895a1a364618c6abb076b3be"}, +] + [[package]] name = "sentry-sdk" version = "2.6.0" @@ -3306,24 +3431,43 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "uvicorn" +version = "0.30.6" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.8" +files = [ + {file = "uvicorn-0.30.6-py3-none-any.whl", hash = "sha256:65fd46fe3fda5bdc1b03b94eb634923ff18cd35b2f084813ea79d1f103f711b5"}, + {file = "uvicorn-0.30.6.tar.gz", hash = "sha256:4b15decdda1e72be08209e860a1e10e92439ad5b97cf44cc945fcbee66fc5788"}, +] + +[package.dependencies] +click = ">=7.0" +h11 = ">=0.8" + +[package.extras] +standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] + [[package]] name = "web3" -version = "6.19.0" +version = "6.20.3" description = "web3.py" optional = false python-versions = ">=3.7.2" files = [ - {file = "web3-6.19.0-py3-none-any.whl", hash = "sha256:fb39683d6aa7586ce0ab0be4be392f8acb62c2503958079d61b59f2a0b883718"}, - {file = "web3-6.19.0.tar.gz", hash = "sha256:d27fbd4ac5aa70d0e0c516bd3e3b802fbe74bc159b407c34052d9301b400f757"}, + {file = "web3-6.20.3-py3-none-any.whl", hash = "sha256:529fbb33f2476ce8185f7a2ed7e2e07c4c28621b0e89b845fbfdcaea9571286d"}, + {file = "web3-6.20.3.tar.gz", hash = "sha256:c69dbf1a61ace172741d06990e60afc7f55f303eac087e7235f382df3047d017"}, ] [package.dependencies] aiohttp = ">=3.7.4.post0" +ckzg = "<2" eth-abi = ">=4.0.0" eth-account = ">=0.8.0,<0.13" eth-hash = {version = ">=0.5.1", extras = ["pycryptodome"]} -eth-typing = ">=3.0.0,<4.2.0 || >4.2.0" -eth-utils = ">=2.1.0" +eth-typing = ">=3.0.0,<4.2.0 || >4.2.0,<5.0.0" +eth-utils = ">=2.1.0,<5" hexbytes = ">=0.1.0,<0.4.0" jsonschema = ">=4.0.0" lru-dict = ">=1.1.6,<1.3.0" @@ -3335,10 +3479,10 @@ typing-extensions = ">=4.0.1" websockets = ">=10.0.0" [package.extras] -dev = ["build (>=0.9.0)", "bumpversion", "eth-tester[py-evm] (>=0.11.0b1,<0.12.0b1)", "eth-tester[py-evm] (>=0.9.0b1,<0.10.0b1)", "flaky (>=3.7.0)", "hypothesis (>=3.31.2)", "importlib-metadata (<5.0)", "ipfshttpclient (==0.8.0a2)", "pre-commit (>=2.21.0)", "py-geth (>=3.14.0)", "pytest (>=7.0.0)", "pytest-asyncio (>=0.21.2,<0.23)", "pytest-mock (>=1.10)", "pytest-watch (>=4.2)", "pytest-xdist (>=1.29)", "setuptools (>=38.6.0)", "sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=3.18.0)", "tqdm (>4.32)", "twine (>=1.13)", "when-changed (>=0.3.0)"] +dev = ["build (>=0.9.0)", "bumpversion", "eth-tester[py-evm] (>=0.11.0b1,<0.12.0b1)", "eth-tester[py-evm] (>=0.9.0b1,<0.10.0b1)", "flaky (>=3.7.0)", "hypothesis (>=3.31.2)", "importlib-metadata (<5.0)", "ipfshttpclient (==0.8.0a2)", "pre-commit (>=2.21.0)", "py-geth (>=3.14.0,<4)", "pytest (>=7.0.0)", "pytest-asyncio (>=0.21.2,<0.23)", "pytest-mock (>=1.10)", "pytest-watch (>=4.2)", "pytest-xdist (>=1.29)", "setuptools (>=38.6.0)", "sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=3.18.0)", "tqdm (>4.32)", "twine (>=1.13)", "when-changed (>=0.3.0)"] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"] ipfs = ["ipfshttpclient (==0.8.0a2)"] -tester = ["eth-tester[py-evm] (>=0.11.0b1,<0.12.0b1)", "eth-tester[py-evm] (>=0.9.0b1,<0.10.0b1)", "py-geth (>=3.14.0)"] +tester = ["eth-tester[py-evm] (>=0.11.0b1,<0.12.0b1)", "eth-tester[py-evm] (>=0.9.0b1,<0.10.0b1)", "py-geth (>=3.14.0,<4)"] [[package]] name = "websockets" @@ -3558,4 +3702,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "07a0348e2a44a40f17ba9605b8de32af30fd335a64b6fd9559bce5a3cd821c76" +content-hash = "16f114d4cb7ff5c5e93c64c3feb4a4a27ca7e7998c556380dbec82b6d2668d77" diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 12df28a42e..b57281164e 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -30,6 +30,10 @@ gmpy2 = "^2.1.5" sentry-sdk = {extras = ["flask"], version = "^2.5.1"} redis = "^5.0.7" fastapi = "^0.112.0" +mypy = "^1.11.2" +isort = "^5.13.2" +pydantic-settings = "^2.4.0" +uvicorn = "^0.30.6" [tool.poetry.group.dev.dependencies] pytest = "^7.3.1" @@ -44,6 +48,10 @@ pyright = "^1.1.366" pylookup = "^0.2.2" importmagic = "^0.1.7" epc = "^0.0.5" +isort = "^5.13.2" +mypy = "^1.11.2" +ruff = "^0.6.2" +aiosqlite = "^0.20.0" [tool.poetry.group.prod] optional = true diff --git a/backend/socket_client.py b/backend/socket_client.py new file mode 100644 index 0000000000..b49fff7767 --- /dev/null +++ b/backend/socket_client.py @@ -0,0 +1,85 @@ +import asyncio +import socketio + +# Create a Socket.IO client +sio = socketio.AsyncClient(logger=True, engineio_logger=True) + + +# Define event handlers +@sio.event +async def connect(): + print(">>>Connected to the server") + + +@sio.event +async def connect_error(data): + print(">>>The connection failed with error:", data) + + +@sio.event +async def disconnect(): + print(">>>I'm disconnected!") + + +# A handler for any event with a wildcard (not all implementations of Socket.IO support this feature directly) +@sio.on("*") +async def catch_all(event, data): + print(f">>>Received an event of type '{event}' with data:", data) + + +@sio.event +async def epoch(data): + print(f">>>Epoch received: {data}") + + +@sio.event +async def project_rewards(data): + print(f"Message received: {data}") + + +@sio.event +async def threshold(data): + print(f"Custom event received: {data}") + + +# Connect to the server +async def main(): + print("Connecting to the server...") + await sio.connect("http://localhost:8000/", wait_timeout=10) + print("Connected. Waiting for events...") + # This line will keep the client running and listening for events + + # Emit events + + # Emit a custom event + data = { + "userAddress": "0xb429d71F676f6e804010D8B699EefbF1ed050420", + "payload": { + "allocations": [ + { + "proposalAddress": "0x1c01595f9534E33d411035AE99a4317faeC4f6Fe", + "amount": 100, + }, + { + "proposalAddress": "0x6e8873085530406995170Da467010565968C7C62", + "amount": 200, + }, + ], + "nonce": 0, + "signature": "0x03c0e67cdc612bf1c0a690346805c5f461fbc0a8fe3041b4849c9ddbc939553a53997dfb6578200192e071618d9f054ae68513f134206149acf70ff04cea02931c", + }, + "isManuallyEdited": False, + } + await sio.emit("allocate", data) + + await sio.wait() + + +# Emit events +async def emit_event(event_name, data): + await sio.emit(event_name, data) + + +# Run the client +if __name__ == "__main__": + asyncio.run(main()) diff --git a/backend/startup.py b/backend/startup.py index f6a4b81596..5e3a7b9dc7 100644 --- a/backend/startup.py +++ b/backend/startup.py @@ -27,6 +27,7 @@ # Create Flask app flask_app = create_app() + @flask_app.teardown_request def teardown_session(*args, **kwargs): db.session.remove() @@ -55,12 +56,12 @@ async def dispatch(self, request: Request, call_next): # If path exists, proceed with the request return await call_next(request) # If path does not exist, modify the request to forward to the Flask app - if path.startswith('/flask'): + if path.startswith("/flask"): return await call_next(request) - request.scope['path'] = '/flask' + path # Adjust the path as needed + request.scope["path"] = "/flask" + path # Adjust the path as needed response = await call_next(request) return response - + fastapi_app.add_middleware(PathCheckMiddleware) diff --git a/backend/v2/allocations/models.py b/backend/v2/allocations/models.py new file mode 100644 index 0000000000..8f1a4a9fe6 --- /dev/null +++ b/backend/v2/allocations/models.py @@ -0,0 +1,38 @@ +from decimal import Decimal + +from pydantic import BaseModel, ConfigDict + + +class AllocationWithUserUQScore(BaseModel): + model_config = ConfigDict(frozen=True) + + project_address: str + amount: int + user_address: str + user_uq_score: Decimal + + +class AllocationRequest(BaseModel): + model_config = ConfigDict(frozen=True) + + project_address: str + amount: int + + +class UserAllocationRequest(BaseModel): + model_config = ConfigDict(frozen=True) + + user_address: str + allocations: list[AllocationRequest] + nonce: int + signature: str + + is_manually_edited: bool + + +class ProjectDonation(BaseModel): + model_config = ConfigDict(frozen=True) + + amount: int + donor_address: str # user address + project_address: str diff --git a/backend/v2/allocations/repositories.py b/backend/v2/allocations/repositories.py index 0a0bb84dc6..48d2c6200e 100644 --- a/backend/v2/allocations/repositories.py +++ b/backend/v2/allocations/repositories.py @@ -1,18 +1,265 @@ +from datetime import datetime +from decimal import Decimal - -from sqlalchemy import func, select +from app.infrastructure.database.models import Allocation +from app.infrastructure.database.models import AllocationRequest as AllocationRequestDB +from app.infrastructure.database.models import UniquenessQuotient, User +from eth_utils import to_checksum_address +from sqlalchemy import func, select, update from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import joinedload +from sqlalchemy.sql.functions import coalesce +from v2.users.repositories import get_user_by_address - -from backend.app.infrastructure.database.models import Allocation +from .models import AllocationWithUserUQScore, ProjectDonation, UserAllocationRequest -async def sum_allocations_by_epoch(session: AsyncSession, epoch: int) -> int: - """Get the sum of all allocations for a given epoch. We only consider the allocations that have not been deleted. - """ +async def sum_allocations_by_epoch(session: AsyncSession, epoch_number: int) -> int: + """Get the sum of all allocations for a given epoch. We only consider the allocations that have not been deleted.""" result = await session.execute( - select(func.sum(Allocation.amount)).filter(Allocation.epoch == epoch).filter(Allocation.deleted_at.is_(None)) + select(coalesce(func.sum(Allocation.amount), 0)) + .filter(Allocation.epoch == epoch_number) + .filter(Allocation.deleted_at.is_(None)) ) count = result.scalar() - return count \ No newline at end of file + + if count is None: + return 0 + + return count + + +async def get_allocations_with_user_uqs( + session: AsyncSession, epoch_number: int +) -> list[AllocationWithUserUQScore]: + """Get all allocations for a given epoch, including the uniqueness quotients of the users.""" + + # result = await session.execute( + # select(Allocation) + # .filter(Allocation.epoch == epoch) + # .filter(Allocation.deleted_at.is_(None)) + # .options(joinedload(Allocation.user).joinedload(User.uniqueness_quotients)) + # ) + # allocations = result.scalars().all() + + # return [ + # AllocationWithUserUQScore( + # project_address=a.project_address, + # amount=int(a.amount), + # user_address=a.user.address, + # user_uq_score=next( + # ( + # uq.validated_score + # for uq in a.user.uniqueness_quotients + # if uq.epoch == epoch + # ), + # None, + # ), + # ) + # for a in allocations + # ] + + result = await session.execute( + select( + Allocation.project_address, + Allocation.amount, + User.address.label("user_address"), + UniquenessQuotient.score, + ) + .join(User, Allocation.user_id == User.id) + .join(UniquenessQuotient, UniquenessQuotient.user_id == User.id) + .filter(Allocation.epoch == epoch_number) + .filter(Allocation.deleted_at.is_(None)) + .filter(UniquenessQuotient.epoch == epoch_number) + ) + + # result = await session.execute( + # select( + # Allocation.id.label('allocation_id'), + # Allocation.amount.label('allocation_amount'), + # User.id.label('user_id'), + # User.name.label('user_name'), + # UniquenessQuotient.id.label('uq_id'), + # UniquenessQuotient.score.label('uq_score') + # ) + # .join(User, Allocation.user_id == User.id) + # .join(UniquenessQuotient, UniquenessQuotient.user_id == User.id) + # .filter(Allocation.epoch == epoch_number) + # .filter(Allocation.deleted_at.is_(None)) + # .filter(UniquenessQuotient.epoch == epoch_number) + # ) + + rows = result.all() + + return [ + AllocationWithUserUQScore( + project_address=project_address, + amount=amount, + user_address=user_address, + user_uq_score=Decimal(uq_score), + ) + for project_address, amount, user_address, uq_score in rows + ] + + +# allocations = database.allocations.get_all_with_uqs( +# context.epoch_details.epoch_num +# ) + +# def get_all_allocations_with_uqs(epoch: int) -> List[AllocationDTO]: +# allocations = ( +# Allocation.query.filter_by(epoch=epoch) +# .filter(Allocation.deleted_at.is_(None)) +# .options(joinedload(Allocation.user).joinedload(User.uniqueness_quotients)) +# .all() +# ) + +# return [ +# AllocationDTO( +# amount=int(a.amount), +# project_address=a.project_address, +# user_address=a.user.address, +# uq_score=next( +# ( +# uq.validated_score +# for uq in a.user.uniqueness_quotients +# if uq.epoch == epoch +# ), +# None, +# ), +# ) +# for a in allocations +# ] + + +async def soft_delete_user_allocations_by_epoch( + session: AsyncSession, + user_address: str, + epoch_number: int, +) -> None: + """Soft delete all user allocations for a given epoch.""" + + # Find all the allocations for the user and epoch that have not been deleted + user = await get_user_by_address(session, user_address) + + if user is None: + return None + + now = datetime.utcnow() + + # Perform a batch update to soft delete the allocations + await session.execute( + update(Allocation) + .where( + Allocation.epoch == epoch_number, + Allocation.user_id == user.id, + Allocation.deleted_at.is_(None), + ) + .values(deleted_at=now) + ) + + +async def store_allocation_request( + session: AsyncSession, + user_address: str, + epoch_number: int, + request: UserAllocationRequest, + leverage: float, +) -> None: + """Store an allocation request in the database.""" + + user = await get_user_by_address(session, user_address) + if user is None: + return None + + new_allocations = [ + Allocation( + epoch=epoch_number, + user_id=user.id, + nonce=request.nonce, + project_address=to_checksum_address(a.project_address), + amount=str(a.amount), + ) + for a in request.allocations + ] + + allocation_request = AllocationRequestDB( + user_id=user.id, + epoch=epoch_number, + nonce=request.nonce, + signature=request.signature, + is_manually_edited=request.is_manually_edited, + leverage=leverage, + ) + + session.add(allocation_request) + session.add_all(new_allocations) + + +async def get_last_allocation_request_nonce( + session: AsyncSession, + user_address: str, +) -> int | None: + """Get the last nonce of the allocation requests for a user.""" + + user = await get_user_by_address(session, user_address) + if user is None: + return None + + result = await session.execute( + select(func.max(AllocationRequestDB.nonce)).filter( + AllocationRequestDB.user_id == user.id + ) + ) + + return result.scalar() + + +async def get_donations_by_project( + session: AsyncSession, + project_address: str, + epoch_number: int, +) -> list[ProjectDonation]: + result = await session.execute( + select(Allocation) + .filter(Allocation.project_address == project_address) + .filter(Allocation.epoch == epoch_number) + .filter(Allocation.deleted_at.is_(None)) + .options(joinedload(Allocation.user)) + ) + + allocations = result.all() + + return [ + ProjectDonation( + amount=int(a.amount), + donor_address=a.user.address, + project_address=a.project_address, + ) + for a in allocations + ] + + # query: Query = Allocation.query.filter_by( + # project_address=to_checksum_address(project_address), epoch=epoch + # ).options(joinedload(Allocation.user)) + + # if not with_deleted: + # query = query.filter(Allocation.deleted_at.is_(None)) + + # return query.all() + + # def get_allocations_by_project( + # self, context: Context, project_address: str + # ) -> List[ProjectDonationDTO]: + # allocations = database.allocations.get_all_by_project_addr_and_epoch( + # project_address, context.epoch_details.epoch_num + # ) + + # return [ + # ProjectDonationDTO( + # donor=a.user.address, amount=int(a.amount), project=project_address + # ) + # for a in allocations + # if int(a.amount) != 0 + # ] diff --git a/backend/v2/allocations/services.py b/backend/v2/allocations/services.py new file mode 100644 index 0000000000..5fdea2ea4d --- /dev/null +++ b/backend/v2/allocations/services.py @@ -0,0 +1,315 @@ +from decimal import Decimal + +from app import exceptions +from app.modules.common.crypto.signature import EncodingStandardFor, encode_for_signing +from sqlalchemy.ext.asyncio import AsyncSession +from v2.crypto.signatures import verify_signed_message +from v2.epochs.subgraphs import EpochsSubgraph +from v2.project_rewards.capped_quadriatic import ( + capped_quadriatic_funding, + cqf_calculate_individual_leverage, +) +from v2.projects.contracts import ProjectsContracts +from v2.projects.services import get_estimated_project_matched_rewards_pending +from v2.uniqueness_quotients.services import get_or_calculate_uq_score +from v2.user_patron_mode.repositories import ( + get_budget_by_user_address_and_epoch, + user_is_patron_with_budget, +) +from v2.users.repositories import get_user_by_address +from web3 import AsyncWeb3 + +from .models import AllocationWithUserUQScore, UserAllocationRequest +from .repositories import ( + get_allocations_with_user_uqs, + get_last_allocation_request_nonce, + soft_delete_user_allocations_by_epoch, + store_allocation_request, +) + + +async def allocate( + # Component dependencies + session: AsyncSession, + projects_contracts: ProjectsContracts, + epochs_subgraph: EpochsSubgraph, + # Arguments + epoch_number: int, + request: UserAllocationRequest, + # Settings + uq_score_threshold: float = 21.0, + low_uq_score: Decimal = Decimal("0.2"), + max_uq_score: Decimal = Decimal("1.0"), + chain_id: int = 11155111, +) -> str: + await verify_logic( + session=session, + epoch_subgraph=epochs_subgraph, + projects_contracts=projects_contracts, + epoch_number=epoch_number, + payload=request, + ) + await verify_signature( + w3=projects_contracts.w3, + chain_id=chain_id, + user_address=request.user_address, + payload=request, + ) + + # Get user + # ? Do we need to get the user here ? + # user = await get_user_by_address(session, request.user_address) + + # Get or calculate UQ score of the user + # TODO: k=v arguments + user_uq_score = await get_or_calculate_uq_score( + session=session, + user_address=request.user_address, + epoch_number=epoch_number, + uq_score_threshold=uq_score_threshold, + max_uq_score=max_uq_score, + low_uq_score=low_uq_score, + ) + + # Calculate leverage by simulating the allocation + new_allocations = [ + AllocationWithUserUQScore( + project_address=a.project_address, + amount=a.amount, + user_address=request.user_address, + user_uq_score=user_uq_score, + ) + for a in request.allocations + ] + leverage = await calculate_leverage( + session=session, + projects=projects_contracts, + epochs_subgraph=epochs_subgraph, + epoch_number=epoch_number, + user_address=request.user_address, + new_allocations=new_allocations, + ) + + await soft_delete_user_allocations_by_epoch( + session, + user_address=request.user_address, + epoch_number=epoch_number, + ) + + # Get user and update allocation nonce + user = await get_user_by_address(session, request.user_address) + if user is None: + raise exceptions.UserNotFound(request.user_address) + + user.allocation_nonce = request.nonce + + await store_allocation_request( + session, + request.user_address, + epoch_number, + request, + leverage=leverage, + ) + + # Commit the transaction + await session.commit() + + return request.user_address + + +async def calculate_leverage( + # Component dependencies + session: AsyncSession, + projects: ProjectsContracts, + epochs_subgraph: EpochsSubgraph, + # Arguments + epoch_number: int, + user_address: str, + new_allocations: list[AllocationWithUserUQScore], +) -> float: + """ + Calculate leverage of the allocation made by the user. + """ + + all_projects = await projects.get_project_addresses(epoch_number) + + matched_rewards = await get_estimated_project_matched_rewards_pending( + session=session, + epochs_subgraph=epochs_subgraph, + epoch_number=epoch_number, + ) + + # Get all allocations before user's allocation + existing_allocations = await get_allocations_with_user_uqs(session, epoch_number) + # Remove allocations made by this user (as they will be removed in a second) + allocations_without_user = [ + a for a in existing_allocations if a.user_address != user_address + ] + + # Calculate funding without user's allocations + before = capped_quadriatic_funding( + allocations=allocations_without_user, + matched_rewards=matched_rewards, + project_addresses=all_projects, + ) + + # Calculate funding with user's allocations + after = capped_quadriatic_funding( + allocations=allocations_without_user + new_allocations, + matched_rewards=matched_rewards, + project_addresses=all_projects, + ) + + # Calculate leverage based on the difference in funding + return cqf_calculate_individual_leverage( + new_allocations_amount=sum(a.amount for a in new_allocations), + project_addresses=[a.project_address for a in new_allocations], + before_allocation_matched=before.matched_by_project, + after_allocation_matched=after.matched_by_project, + ) + + +async def verify_logic( + # Component dependencies + session: AsyncSession, + epoch_subgraph: EpochsSubgraph, + projects_contracts: ProjectsContracts, + # Arguments + epoch_number: int, + payload: UserAllocationRequest, +): + # Check if the epoch is in the decision window + # epoch_details = await epoch_subgraph.get_epoch_by_number(epoch_number) + # if epoch_details.state != "PENDING": + # raise exceptions.NotInDecision + + # Check if the allocations are not empty + if not payload.allocations: + raise exceptions.EmptyAllocations() + + # Check if the nonce is as expected + expected_nonce = await get_next_user_nonce(session, payload.user_address) + if payload.nonce != expected_nonce: + raise exceptions.WrongAllocationsNonce(payload.nonce, expected_nonce) + + # Check if the user is not a patron + epoch_details = await epoch_subgraph.get_epoch_by_number(epoch_number) + is_patron = await user_is_patron_with_budget( + session, + payload.user_address, + epoch_number, + epoch_details.finalized_timestamp.datetime(), + ) + if is_patron: + raise exceptions.NotAllowedInPatronMode(payload.user_address) + + # Check if the user is not a project + all_projects = await projects_contracts.get_project_addresses(epoch_number) + if payload.user_address in all_projects: + raise exceptions.ProjectAllocationToSelf() + + project_addresses = [a.project_address for a in payload.allocations] + + # Check if the projects are valid + invalid_projects = set(project_addresses) - set(all_projects) + if invalid_projects: + raise exceptions.InvalidProjects(invalid_projects) + + # Check if there are no duplicates + duplicates = [p for p in project_addresses if project_addresses.count(p) > 1] + if duplicates: + raise exceptions.DuplicatedProjects(duplicates) + + # Get the user's budget + user_budget = await get_budget_by_user_address_and_epoch( + session, payload.user_address, epoch_number + ) + + # if user_budget is None: + # raise exceptions.BudgetNotFound(payload.user_address, epoch_number) + + # # Check if the allocations are within the budget + # if sum(a.amount for a in payload.allocations) > user_budget: + # raise exceptions.RewardsBudgetExceeded() + + +async def get_next_user_nonce( + # Component dependencies + session: AsyncSession, + # Arguments + user_address: str, +) -> int: + """ + Get the next expected nonce for the user. + It's a simple increment of the last nonce, or 0 if there is no previous nonce. + """ + # Get the last allocation request of the user + last_allocation_request = await get_last_allocation_request_nonce( + session, user_address + ) + + # Calculate the next nonce + if last_allocation_request is None: + return 0 + + # Increment the last nonce + return last_allocation_request + 1 + + +async def verify_signature( + w3: AsyncWeb3, chain_id: int, user_address: str, payload: UserAllocationRequest +) -> None: + eip712_encoded = build_allocations_eip712_structure(chain_id, payload) + encoded_msg = encode_for_signing(EncodingStandardFor.DATA, eip712_encoded) + + # Verify the signature + is_valid = await verify_signed_message( + w3, user_address, encoded_msg, payload.signature + ) + if not is_valid: + raise exceptions.InvalidSignature(user_address, payload.signature) + + +def build_allocations_eip712_structure(chain_id: int, payload: UserAllocationRequest): + message = {} + message["allocations"] = [ + {"proposalAddress": a.project_address, "amount": a.amount} + for a in payload.allocations + ] + message["nonce"] = payload.nonce # type: ignore + return build_allocations_eip712_data(chain_id, message) + + +def build_allocations_eip712_data(chain_id: int, message: dict) -> dict: + # Convert amount value to int + message["allocations"] = [ + {**allocation, "amount": int(allocation["amount"])} + for allocation in message["allocations"] + ] + + allocation_types = { + "EIP712Domain": [ + {"name": "name", "type": "string"}, + {"name": "version", "type": "string"}, + {"name": "chainId", "type": "uint256"}, + ], + "Allocation": [ + {"name": "proposalAddress", "type": "address"}, + {"name": "amount", "type": "uint256"}, + ], + "AllocationPayload": [ + {"name": "allocations", "type": "Allocation[]"}, + {"name": "nonce", "type": "uint256"}, + ], + } + + return { + "types": allocation_types, + "domain": { + "name": "Octant", + "version": "1.0.0", + "chainId": chain_id, + }, + "primaryType": "AllocationPayload", + "message": message, + } diff --git a/backend/v2/allocations/socket.py b/backend/v2/allocations/socket.py index 6b84064cb9..508d1a086f 100644 --- a/backend/v2/allocations/socket.py +++ b/backend/v2/allocations/socket.py @@ -1,40 +1,37 @@ -import json import logging from typing import List -from flask import current_app import socketio -# from flask_socketio import emit - from app.engine.projects.rewards import ProjectRewardDTO from app.exceptions import OctantException + # from app.extensions import socketio, epochs from app.infrastructure.exception_handler import UNEXPECTED_EXCEPTION, ExceptionHandler - from app.modules.dto import ProjectDonationDTO -from app.modules.projects.rewards.controller import get_estimated_project_rewards -from app.modules.user.allocations import controller - +from eth_utils import to_checksum_address +from v2.allocations.repositories import get_donations_by_project +from v2.allocations.services import allocate +from v2.core.dependencies import db_getter, get_w3, w3_getter +from v2.epochs.dependencies import epochs_getter, epochs_subgraph_getter, get_epochs +from v2.projects.depdendencies import get_projects, projects_getter +from v2.projects.services import ( + get_estimated_project_rewards, + get_projects_allocation_threshold, +) -from backend.v2.core.dependencies import get_w3 -from backend.v2.epochs.contracts import Epochs -from backend.v2.epochs.dependencies import get_epochs -from backend.v2.projects.contracts import Projects - -from backend.v2.projects.depdendencies import get_projects -from backend.v2.projects.services import get_projects_allocation_threshold +from .models import AllocationRequest, UserAllocationRequest class AllocateNamespace(socketio.AsyncNamespace): - def __init__(self, namespace: str): super().__init__(namespace=namespace) - self.w3 = get_w3() - self.settings = get_settings() - self.epochs = get_epochs(self.w3, self.settings.epochs_contract_address) - self.projects = get_projects(self.w3, self.settings.projects_contract_address) - + # self.w3 = w3_getter() + self.epochs_contracts = epochs_getter() + self.epochs_subgraph = epochs_subgraph_getter() + self.projects_contracts = projects_getter() + self.db_session = db_getter() + async def on_connect(self, sid: str, environ: dict): """ Handle client connection @@ -43,9 +40,6 @@ async def on_connect(self, sid: str, environ: dict): print("Type of sid", type(sid)) print("Type of environ", type(environ)) - # socketio.logger.debug("Client connected") - # app_instance = current_app._get_current_object() - logging.debug("Client connected") print("Epochs are here") @@ -53,72 +47,200 @@ async def on_connect(self, sid: str, environ: dict): await self.emit("epoch", {"epoch": "fuckup"}) # We send the data only in PENDING state - pending_epoch_number = await self.epochs.get_pending_epoch() - + pending_epoch_number = await self.epochs_contracts.get_pending_epoch() + + epoch_end = await self.epochs_contracts.get_current_epoch_end() + + print("epocg_end", epoch_end) + print("Pending epoch =", pending_epoch_number) + # We do not handle requests outside of pending epoch state - if pending_epoch_number is None: - return + # if pending_epoch_number is None: + # return + pending_epoch_number = 124 - threshold = await get_projects_allocation_threshold( - session=self.session, # TODO: - projects=self.projects, - epoch_number=pending_epoch_number - ) + async with self.db_session() as session: + threshold = await get_projects_allocation_threshold( + session=session, + projects=self.projects_contracts, + epoch_number=pending_epoch_number, + ) - await self.emit("threshold", {"threshold": str(threshold)}) + await self.emit("threshold", {"threshold": str(threshold)}) - project_rewards = get_estimated_project_rewards().rewards - await self.emit("project_rewards", _serialize_project_rewards(project_rewards)) + project_rewards = await get_estimated_project_rewards( + session=session, + projects=self.projects_contracts, + epochs_subgraph=self.epochs_subgraph, + epoch_number=pending_epoch_number, + ) + rewards = [ + { + "address": project_address, + "allocated": str(project_rewards.amounts_by_project[project_address]), + "matched": str(project_rewards.matched_by_project[project_address]), + } + for project_address in project_rewards.amounts_by_project.keys() + ] - async def on_disconnect(self, sid): + # project_rewards = get_estimated_project_rewards().rewards + await self.emit("project_rewards", rewards) + async def on_disconnect(self, sid): logging.debug("Client disconnected") + async def on_allocate(self, sid: str, data: dict): + """ + Handle allocation request + """ - async def on_allocate(self, sid: str, environ: dict) -> None: + # # We do not handle requests outside of pending epoch state (Allocation Window) + # pending_epoch_number = await self.epochs_contracts.get_pending_epoch() + # if pending_epoch_number is None: + # return - - print("message", msg) - msg = json.loads(msg) + print("message", data, type(data)) - print("MEssage", msg) + request = from_dict(data) + pending_epoch_number = 124 - is_manually_edited = msg["isManuallyEdited"] if "isManuallyEdited" in msg else None - user_address = msg["userAddress"] - logging.info(f"User allocation payload: {msg}") - controller.allocate( - user_address, - msg, - is_manually_edited=is_manually_edited, - ) - socketio.logger.info(f"User: {user_address} allocated successfully") + async with self.db_session() as session: + await allocate( + session=session, + projects_contracts=self.projects_contracts, + epochs_subgraph=self.epochs_subgraph, + epoch_number=pending_epoch_number, + request=request, + ) - threshold = get_projects_allocation_threshold() - await self.emit("threshold", {"threshold": str(threshold)}, broadcast=True) + threshold = await get_projects_allocation_threshold( + session=session, + projects=self.projects_contracts, + epoch_number=pending_epoch_number, + ) - project_rewards = get_estimated_project_rewards().rewards - await self.emit( - "project_rewards", - _serialize_project_rewards(project_rewards), - broadcast=True, - ) - for project in project_rewards: - donors = controller.get_all_donations_by_project(project.address) - await self.emit( - "project_donors", - {"project": project.address, "donors": _serialize_donors(donors)}, - broadcast=True, + await self.emit("threshold", {"threshold": str(threshold)}) + project_rewards = await get_estimated_project_rewards( + session=session, + projects=self.projects_contracts, + epochs_subgraph=self.epochs_subgraph, + epoch_number=pending_epoch_number, ) + rewards = [ + { + "address": project_address, + "allocated": str( + project_rewards.amounts_by_project[project_address] + ), + "matched": str(project_rewards.matched_by_project[project_address]), + } + for project_address in project_rewards.amounts_by_project.keys() + ] + + await self.emit("project_rewards", rewards) + + for project_address in project_rewards.amounts_by_project.keys(): + donations = await get_donations_by_project( + session=session, + project_address=project_address, + epoch_number=pending_epoch_number, + ) + + await self.emit( + "project_donors", + {"project": project_address, "donors": donations}, + ) + + # msg = json.loads(msg) + + # print("MEssage", msg) + + # is_manually_edited = data.get("isManuallyEdited", None) + # user_address = data["userAddress"] + # # is_manually_edited = ( + # # msg["isManuallyEdited"] if "isManuallyEdited" in msg else None + # # ) + # logging.info(f"User allocation payload: {msg}") + + # controller.allocate( + # user_address, + # msg, + # is_manually_edited=is_manually_edited, + # ) + # socketio.logger.info(f"User: {user_address} allocated successfully") + + # threshold = get_projects_allocation_threshold() + # await self.emit("threshold", {"threshold": str(threshold)}, broadcast=True) + + # project_rewards = get_estimated_project_rewards().rewards + # await self.emit( + # "project_rewards", + # _serialize_project_rewards(project_rewards), + # broadcast=True, + # ) + # for project in project_rewards: + # donors = controller.get_all_donations_by_project(project.address) + # await self.emit( + # "project_donors", + # {"project": project.address, "donors": _serialize_donors(donors)}, + # broadcast=True, + # ) + + +def from_dict(data: dict) -> UserAllocationRequest: + """ + Example of data: + { + "userAddress": "0x123", + "payload": { + "allocations": [ + { + "proposalAddress": "0x456", + "amount": 100 + }, + { + "proposalAddress": "0x789", + "amount": 200 + } + ], + "nonce": 1, + "signature": "0xabc" + }, + "isManuallyEdited": False + } + """ + user_address = to_checksum_address(data["userAddress"]) + payload = data["payload"] + allocations = [ + AllocationRequest( + project_address=to_checksum_address(allocation_data["proposalAddress"]), + amount=allocation_data["amount"], + ) + for allocation_data in payload["allocations"] + ] + nonce = int(payload["nonce"]) + signature = payload.get("signature") + is_manually_edited = data.get("isManuallyEdited", False) + + # fmt: off + return UserAllocationRequest( + user_address = user_address, + allocations = allocations, + nonce = nonce, + signature = signature, + is_manually_edited = is_manually_edited, + ) + # fmt: on + # def state_context(epoch_state: EpochState) -> Context: # epoch_num = get_epoch_number(epoch_state) # return build_context(epoch_num, epoch_state, with_block_range) - # @socketio.on("project_donors") # def handle_project_donors(project_address: str): # print("Project donors") diff --git a/backend/v2/core/contracts.py b/backend/v2/core/contracts.py index 2ffe188ea7..e73c01d134 100644 --- a/backend/v2/core/contracts.py +++ b/backend/v2/core/contracts.py @@ -1,11 +1,11 @@ - +from eth_typing import ChecksumAddress from web3 import AsyncWeb3 from web3.contract import AsyncContract from web3.types import ABI class SmartContract: - def __init__(self, w3: AsyncWeb3, abi: ABI, address: str) -> None: + def __init__(self, w3: AsyncWeb3, abi: ABI, address: ChecksumAddress) -> None: self.abi = abi self.w3 = w3 self.contract: AsyncContract = w3.eth.contract(address=address, abi=abi) diff --git a/backend/v2/core/dependencies.py b/backend/v2/core/dependencies.py index 0a7a0af65a..b19f65cdad 100644 --- a/backend/v2/core/dependencies.py +++ b/backend/v2/core/dependencies.py @@ -1,14 +1,49 @@ -from web3 import AsyncWeb3 +from app.infrastructure.database.models import BaseModel +from pydantic import Field +from pydantic_settings import BaseSettings + +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine +from sqlalchemy.orm import sessionmaker +from web3 import AsyncHTTPProvider, AsyncWeb3 from web3.middleware import async_geth_poa_middleware -# TODO: Cache? -def get_w3(web3_provider: str) -> AsyncWeb3: +class Web3ProviderSettings(BaseSettings): + eth_rpc_provider_url: str - w3 = AsyncWeb3() - w3.provider = web3_provider + +# TODO: Cache? +def get_w3(eth_rpc_provider_url: str) -> AsyncWeb3: + w3 = AsyncWeb3(provider=AsyncHTTPProvider(eth_rpc_provider_url)) if async_geth_poa_middleware not in w3.middleware_onion: w3.middleware_onion.inject(async_geth_poa_middleware, layer=0) return w3 + +def w3_getter() -> AsyncWeb3: + settings = Web3ProviderSettings() + return get_w3(settings.eth_rpc_provider_url) + + +class DatabaseSettings(BaseSettings): + sqlalchemy_database_uri: str = Field(validation_alias="db_uri") + # TODO other settings of the database + + +async def create_tables(): + settings = DatabaseSettings() + engine = create_async_engine(settings.sqlalchemy_database_uri) + async with engine.begin() as conn: + await conn.run_sync(BaseModel.metadata.create_all) + + +def get_db_engine(database_uri: str) -> async_sessionmaker[AsyncSession]: + engine = create_async_engine(database_uri) + + return sessionmaker(bind=engine, class_=AsyncSession) + + +def db_getter() -> async_sessionmaker[AsyncSession]: + settings = DatabaseSettings() + return get_db_engine(settings.sqlalchemy_database_uri) diff --git a/backend/v2/crypto/__init__.py b/backend/v2/crypto/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/v2/crypto/contracts.py b/backend/v2/crypto/contracts.py new file mode 100644 index 0000000000..101d1584b4 --- /dev/null +++ b/backend/v2/crypto/contracts.py @@ -0,0 +1,40 @@ +import logging + +from app.constants import EIP1271_MAGIC_VALUE_BYTES +from v2.core.contracts import SmartContract + + +class GnosisSafeContracts(SmartContract): + async def is_valid_signature(self, msg_hash: str, signature: str) -> bool: + logging.info( + f"[Gnosis Safe Contract] checking if a message with hash: {msg_hash} is already signed by {self.contract.address}" + ) + + result = await self.contract.functions.isValidSignature( + msg_hash, signature + ).call() + return result == bytes.fromhex(EIP1271_MAGIC_VALUE_BYTES) + + async def get_message_hash(self, message: bytes) -> str: + return await self.contract.functions.getMessageHash(message).call() + + +GNOSIS_SAFE = [ + { + "inputs": [ + {"internalType": "bytes", "name": "_data", "type": "bytes"}, + {"internalType": "bytes", "name": "_signature", "type": "bytes"}, + ], + "name": "isValidSignature", + "outputs": [{"internalType": "bytes4", "name": "", "type": "bytes4"}], + "stateMutability": "view", + "type": "function", + }, + { + "inputs": [{"internalType": "bytes", "name": "message", "type": "bytes"}], + "name": "getMessageHash", + "outputs": [{"internalType": "bytes32", "name": "", "type": "bytes32"}], + "stateMutability": "view", + "type": "function", + }, +] diff --git a/backend/v2/crypto/signatures.py b/backend/v2/crypto/signatures.py new file mode 100644 index 0000000000..85409203d2 --- /dev/null +++ b/backend/v2/crypto/signatures.py @@ -0,0 +1,63 @@ +from eth_account import Account +from eth_account.messages import SignableMessage, _hash_eip191_message +from eth_keys.exceptions import BadSignature +from eth_utils import to_checksum_address +from v2.crypto.contracts import GNOSIS_SAFE, GnosisSafeContracts +from web3 import AsyncWeb3 +from web3.exceptions import ContractLogicError + + +async def verify_signed_message( + w3: AsyncWeb3, + user_address: str, + encoded_msg: SignableMessage, + signature: str, +) -> bool: + contract = await is_contract(w3, user_address) + if contract: + return await _verify_multisig(w3, user_address, encoded_msg, signature) + + return _verify_eoa(user_address, encoded_msg, signature) + + +async def is_contract(w3: AsyncWeb3, address: str) -> bool: + """ + Check if the given address is a contract. + + Args: + - address (str): Ethereum address to check. + """ + address = to_checksum_address(address) + is_address = w3.is_address(address) + + if not is_address: + raise ValueError(f"{address} is not a valid Ethereum address!") + + code = await w3.eth.get_code(address) + + return code.hex() != "0x" + + +def hash_signable_message(encoded_msg: SignableMessage) -> str: + return "0x" + _hash_eip191_message(encoded_msg).hex() + + +async def _verify_multisig( + w3: AsyncWeb3, user_address: str, encoded_msg: SignableMessage, signature: str +) -> bool: + msg_hash = hash_signable_message(encoded_msg) + try: + gnosis_safe = GnosisSafeContracts(w3=w3, abi=GNOSIS_SAFE, address=user_address) + return await gnosis_safe.is_valid_signature(msg_hash, signature) + except ContractLogicError: + return False + + +def _verify_eoa( + user_address: str, encoded_msg: SignableMessage, signature: str +) -> bool: + try: + recovered_address = Account.recover_message(encoded_msg, signature=signature) + except BadSignature: + return False + return recovered_address == user_address diff --git a/backend/v2/epoch_snapshots/__init__.py b/backend/v2/epoch_snapshots/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/v2/epoch_snapshots/repositories.py b/backend/v2/epoch_snapshots/repositories.py new file mode 100644 index 0000000000..b1e3117d90 --- /dev/null +++ b/backend/v2/epoch_snapshots/repositories.py @@ -0,0 +1,12 @@ +from app.infrastructure.database.models import PendingEpochSnapshot +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + + +async def get_pending_epoch_snapshot( + session: AsyncSession, epoch_number: int +) -> PendingEpochSnapshot | None: + result = await session.execute( + select(PendingEpochSnapshot).filter(PendingEpochSnapshot.epoch == epoch_number) + ) + return result.scalar_one_or_none() diff --git a/backend/v2/epochs/contracts.py b/backend/v2/epochs/contracts.py index a4614f7fd7..565d353c20 100644 --- a/backend/v2/epochs/contracts.py +++ b/backend/v2/epochs/contracts.py @@ -1,14 +1,11 @@ -from typing import Dict, Literal, Optional, TypedDict - -from web3 import exceptions - import logging +from typing import Dict, Optional -from backend.v2.core.contracts import SmartContract - +from v2.core.contracts import SmartContract +from web3 import exceptions -class Epochs(SmartContract): +class EpochsContracts(SmartContract): async def is_decision_window_open(self) -> bool: logging.debug("[Epochs contract] Checking if decision window is open") return await self.contract.functions.isDecisionWindowOpen().call() @@ -152,4 +149,4 @@ async def start(self) -> int: "stateMutability": "view", "type": "function", }, -] \ No newline at end of file +] diff --git a/backend/v2/epochs/dependencies.py b/backend/v2/epochs/dependencies.py index a32f25b813..712f2d9da6 100644 --- a/backend/v2/epochs/dependencies.py +++ b/backend/v2/epochs/dependencies.py @@ -1,8 +1,36 @@ +from typing import Callable + +from pydantic_settings import BaseSettings +from v2.core.dependencies import w3_getter from web3 import AsyncWeb3 -from .contracts import Epochs, EPOCHS_ABI + +from .contracts import EPOCHS_ABI, EpochsContracts +from .subgraphs import EpochsSubgraph + + +class EpochsSettings(BaseSettings): + epochs_contract_address: str # TODO: cache -def get_epochs(w3: AsyncWeb3, epochs_contract_address: str) -> Epochs: +def get_epochs(w3: AsyncWeb3, epochs_contract_address: str) -> EpochsContracts: + return EpochsContracts(w3, EPOCHS_ABI, epochs_contract_address) # type: ignore + + +def epochs_getter() -> EpochsContracts: + settings = EpochsSettings() # type: ignore + return get_epochs(w3_getter(), settings.epochs_contract_address) + + +getter = Callable[[], EpochsContracts] + + +class EpochsSubgraphSettings(BaseSettings): + subgraph_endpoint: str + + # url = config["SUBGRAPH_ENDPOINT"] + - return Epochs(w3, EPOCHS_ABI, epochs_contract_address) +def epochs_subgraph_getter() -> EpochsSubgraph: + settings = EpochsSubgraphSettings() # type: ignore + return EpochsSubgraph(settings.subgraph_endpoint) diff --git a/backend/v2/epochs/repositories.py b/backend/v2/epochs/repositories.py deleted file mode 100644 index 33aa9178a2..0000000000 --- a/backend/v2/epochs/repositories.py +++ /dev/null @@ -1,12 +0,0 @@ - - -from sqlalchemy import select -from sqlalchemy.ext.asyncio import AsyncSession - -from backend.app.infrastructure.database.models import PendingEpochSnapshot - - -async def get_pending_epoch_snapshot_by_epoch(session: AsyncSession, epoch: int) -> PendingEpochSnapshot | None: - - result = await session.execute(select(PendingEpochSnapshot).filter(PendingEpochSnapshot.epoch == epoch)) - return result.scalars().first() diff --git a/backend/v2/epochs/subgraphs.py b/backend/v2/epochs/subgraphs.py new file mode 100644 index 0000000000..d7f8e6cf72 --- /dev/null +++ b/backend/v2/epochs/subgraphs.py @@ -0,0 +1,136 @@ +import logging +from dataclasses import dataclass +from typing import Callable, Sequence, Type, Union + +import backoff +from app import exceptions +from app.context.epoch.details import EpochDetails +from gql import Client, gql +from gql.transport.aiohttp import AIOHTTPTransport +from gql.transport.exceptions import TransportQueryError + +# def lookup_max_time(): +# return config.SUBGRAPH_RETRY_TIMEOUT_SEC + + +exception_type = TransportQueryError + + +def is_graph_error_permanent(error: TransportQueryError) -> bool: + # TODO: if we differentiate between reasons for the error, + # we can differentiate between transient and permanent ones, + # so we can return True for permanent ones saving + # up to SUBGRAPH_RETRY_TIMEOUT_SEC. + # Look for these prints in logs and find + # "the chain was reorganized while executing the query" line. + logging.debug("going through giveup...") + logging.debug(f"got TransportQueryError.query_id: {error.query_id}") + logging.debug(f"got TransportQueryError.errors: {error.errors}") + logging.debug(f"got TransportQueryError.data: {error.data}") + logging.debug(f"got TransportQueryError.extensions: {error.extensions}") + return False + + +# url = config["SUBGRAPH_ENDPOINT"] + + +@dataclass +class BackoffParams: + exception: Union[Type[Exception], Sequence[Type[Exception]]] + max_time: int + giveup: Callable[[Exception], bool] = lambda e: False + + +class EpochsSubgraph: + def __init__( + self, + url: str, + backoff_params: BackoffParams | None = None, + ): + self.url = url + self.gql_client = Client( + transport=AIOHTTPTransport(url=self.url, timeout=2), + fetch_schema_from_transport=False, + ) + + if backoff_params is not None: + backoff_decorator = backoff.on_exception( + backoff.expo, + backoff_params.exception, + max_time=backoff_params.max_time, + giveup=backoff_params.giveup, + ) + + self.gql_client.execute_async = backoff_decorator( + self.gql_client.execute_async + ) + + async def get_epoch_by_number(self, epoch_number: int) -> EpochDetails: + """Get EpochDetails from the subgraph for a given epoch number.""" + + logging.debug( + f"[Subgraph] Getting epoch properties for epoch number: {epoch_number}" + ) + + # Prepare query and variables + query = gql( + """\ + query GetEpoch($epochNo: Int!) { + epoches(where: {epoch: $epochNo}) { + epoch + fromTs + toTs + duration + decisionWindow + } + } + """ + ) + variables = {"epochNo": epoch_number} + + # Execute query + response = await self.gql_client.execute_async(query, variable_values=variables) + + # Raise exception if no data received + data = response["epoches"] + if not data: + logging.warning( + f"[Subgraph] No epoch properties received for epoch number: {epoch_number}" + ) + raise exceptions.EpochNotIndexed(epoch_number) + + # Parse response and return result + logging.debug(f"[Subgraph] Received epoch properties: {data[0]}") + + epoch_details = data[0] + + return EpochDetails( + epoch_num=epoch_details["epoch"], + start=epoch_details["fromTs"], + duration=epoch_details["duration"], + decision_window=epoch_details["decisionWindow"], + remaining_sec=0, + ) + + +# def get_epochs(): +# query = gql( +# """ +# query { +# epoches(first: 1000) { +# epoch +# fromTs +# toTs +# } +# _meta { +# block { +# number +# } +# } +# } +# """ +# ) + +# app.logger.debug("[Subgraph] Getting list of all epochs") +# data = gql_factory.build().execute(query) +# return data diff --git a/backend/v2/gitcoin_passport/__init__.py b/backend/v2/gitcoin_passport/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/v2/gitcoin_passport/repositories.py b/backend/v2/gitcoin_passport/repositories.py new file mode 100644 index 0000000000..272a129908 --- /dev/null +++ b/backend/v2/gitcoin_passport/repositories.py @@ -0,0 +1,19 @@ +from app.infrastructure.database.models import GPStamps, User +from eth_utils import to_checksum_address +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + + +async def get_gp_stamps_by_address( + session: AsyncSession, user_address: str +) -> GPStamps | None: + """Gets the latest GitcoinPassport Stamps record for a user.""" + + result = await session.execute( + select(GPStamps) + .join(User) + .filter(User.address == to_checksum_address(user_address)) + .order_by(GPStamps.created_at.desc()) + ) + + return result.scalar_one_or_none() diff --git a/backend/v2/gitcoin_passport/services.py b/backend/v2/gitcoin_passport/services.py new file mode 100644 index 0000000000..786c3cbe9f --- /dev/null +++ b/backend/v2/gitcoin_passport/services.py @@ -0,0 +1,25 @@ +from app.constants import GUEST_LIST +from app.modules.user.antisybil.service.initial import _has_guest_stamp_applied_by_gp +from eth_utils import to_checksum_address +from sqlalchemy.ext.asyncio import AsyncSession + +from .repositories import get_gp_stamps_by_address + + +async def get_gitcoin_passport_score(session: AsyncSession, user_address: str) -> float: + """Gets saved Gitcoin Passport score for a user. + Returns None if the score is not saved. + If the user is in the GUEST_LIST, the score will be adjusted to include the guest stamp. + """ + + user_address = to_checksum_address(user_address) + + stamps = await get_gp_stamps_by_address(session, user_address) + + if stamps is None: + return 0.0 + + if user_address in GUEST_LIST and not _has_guest_stamp_applied_by_gp(stamps): + return stamps.score + 21.0 + + return stamps.score diff --git a/backend/v2/main.py b/backend/v2/main.py index d9d51cb01e..4fb429fed3 100644 --- a/backend/v2/main.py +++ b/backend/v2/main.py @@ -1,26 +1,25 @@ - - # Create FastAPI app +import socketio from fastapi import FastAPI - from v2.allocations.socket import AllocateNamespace - -import socketio - - +from v2.core.dependencies import create_tables fastapi_app = FastAPI() + @fastapi_app.get("/fastapi-endpoint") async def fastapi_endpoint(): return {"message": "This is a FastAPI endpoint."} -sio=socketio.AsyncServer(cors_allowed_origins='*',async_mode='asgi') -sio.register_namespace(AllocateNamespace('/')) +sio = socketio.AsyncServer(cors_allowed_origins="*", async_mode="asgi") +sio.register_namespace(AllocateNamespace("/")) sio_asgi_app = socketio.ASGIApp(socketio_server=sio, other_asgi_app=fastapi_app) # app.mount("/static", StaticFiles(directory="static"), name="static") # fastapi_app.mount("/", sio_asgi_app) fastapi_app.add_route("/socket.io/", route=sio_asgi_app) -fastapi_app.add_websocket_route("/socket.io/", sio_asgi_app) \ No newline at end of file +fastapi_app.add_websocket_route("/socket.io/", sio_asgi_app) + + +fastapi_app.add_event_handler("startup", create_tables) diff --git a/backend/v2/project_rewards/__init__.py b/backend/v2/project_rewards/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/v2/project_rewards/capped_quadriatic.py b/backend/v2/project_rewards/capped_quadriatic.py new file mode 100644 index 0000000000..4e5d1db12b --- /dev/null +++ b/backend/v2/project_rewards/capped_quadriatic.py @@ -0,0 +1,138 @@ +from collections import defaultdict +from decimal import Decimal +from math import sqrt +from typing import Dict, NamedTuple + +from v2.allocations.models import AllocationWithUserUQScore + + +class CappedQuadriaticFunding(NamedTuple): + amounts_by_project: Dict[ + str, Decimal + ] # Sum of all allocation amounts for each project + matched_by_project: Dict[str, Decimal] # Sum of matched rewards for each project + amounts_total: Decimal # Sum of all allocation amounts for all projects + matched_total: Decimal # Sum of all matched rewards for all projects + + +MR_FUNDING_CAP_PERCENT = Decimal("0.2") + + +def capped_quadriatic_funding( + allocations: list[AllocationWithUserUQScore], + matched_rewards: int, + project_addresses: list[str], + MR_FUNDING_CAP_PERCENT: Decimal = MR_FUNDING_CAP_PERCENT, +) -> CappedQuadriaticFunding: + """ + Calculate capped quadratic funding based on a list of allocations. + + Args: + allocations (list[AllocationItem]): A list of allocation items, each containing a project address and an amount. + matched_rewards (int): The total amount of matched rewards available for distribution. + project_addresses (list[str] | None, optional): A list of project addresses to consider. If None, all projects in allocations are considered. Defaults to None. + MR_FUNDING_CAP_PERCENT (float, optional): The maximum percentage of matched rewards that any single project can receive. Defaults to MR_FUNDING_CAP_PERCENT. + + Returns: + CappedQuadriaticFunding: A named tuple containing the total and per-project amounts and matched rewards. + """ + + # Group allocations by project + per_project_allocations: Dict[str, list[AllocationWithUserUQScore]] = defaultdict( + list + ) + for allocation in allocations: + per_project_allocations[allocation.project_address].append(allocation) + + # Variables necessary for calculation of quadratic funding + total_qf = Decimal(0) + qf_by_project: Dict[str, Decimal] = {} + + # Aggregate variables for amounts & matched rewards + amount_by_project: Dict[str, Decimal] = { + project_address: Decimal(0) for project_address in project_addresses + } + matched_by_project: Dict[str, Decimal] = { + project_address: Decimal(0) for project_address in project_addresses + } + matched_total = Decimal(0) + amounts_total = Decimal(0) + + # Calculate quadratic funding for each project + for project_address, allocations in per_project_allocations.items(): + qf = ( + sum( + ( + Decimal(sqrt(allocation.user_uq_score * allocation.amount)) + for allocation in allocations + ), + start=Decimal(0), + ) + ** 2 + ) + + total_qf += qf + qf_by_project[project_address] = qf + + # Aggregate amount by project + sum_amount = sum( + (Decimal(allocation.amount) for allocation in allocations), start=Decimal(0) + ) + amount_by_project[project_address] = sum_amount + amounts_total += sum_amount + + # Calculate funding cap + max_matched_reward = matched_rewards * MR_FUNDING_CAP_PERCENT + + # Calculate matched rewards for each project + for project_address, qf in qf_by_project.items(): + # Calculate matched rewards as proportion of quadratic funding + matched = qf / total_qf * matched_rewards if total_qf != 0 else Decimal(0) + + # Apply funding cap + matched_capped = min(matched, max_matched_reward) + + # Update matched rewards and total rewards + matched_by_project[project_address] = matched_capped + matched_total += matched_capped + + return CappedQuadriaticFunding( + amounts_by_project=amount_by_project, + matched_by_project=matched_by_project, + amounts_total=total_qf, + matched_total=matched_total, + ) + + +def cqf_calculate_total_leverage(matched_rewards: int, total_allocated: int) -> float: + if total_allocated == 0: + return 0.0 + + return matched_rewards / total_allocated + + +def cqf_calculate_individual_leverage( + new_allocations_amount: int, + project_addresses: list[str], + before_allocation_matched: Dict[str, Decimal], + after_allocation_matched: Dict[str, Decimal], +) -> float: + """Calculate the leverage of a user's new allocations in capped quadratic funding. + + This is a ratio of the sum of the absolute differences between the capped matched rewards before and after the user's allocation, to the total amount of the user's new allocations. + """ + + if new_allocations_amount == 0: + return 0.0 + + total_difference = Decimal(0) + for project_address in project_addresses: + before = before_allocation_matched.get(project_address, 0) + after = after_allocation_matched[project_address] + + difference = abs(before - after) + total_difference += difference + + leverage = total_difference / new_allocations_amount + + return float(leverage) diff --git a/backend/v2/projects/contracts.py b/backend/v2/projects/contracts.py index 09f28afe03..e2ae34df55 100644 --- a/backend/v2/projects/contracts.py +++ b/backend/v2/projects/contracts.py @@ -1,14 +1,14 @@ - import logging -from backend.v2.core.contracts import SmartContract + +from v2.core.contracts import SmartContract -class Projects(SmartContract): - async def get_project_addresses(self, epoch: int) -> list[str]: +class ProjectsContracts(SmartContract): + async def get_project_addresses(self, epoch_number: int) -> list[str]: logging.debug( - f"[Projects contract] Getting project addresses for epoch: {epoch}" + f"[Projects contract] Getting project addresses for epoch: {epoch_number}" ) - return await self.contract.functions.getProposalAddresses(epoch).call() + return await self.contract.functions.getProposalAddresses(epoch_number).call() async def get_project_cid(self): logging.debug("[Projects contract] Getting projects CID") diff --git a/backend/v2/projects/depdendencies.py b/backend/v2/projects/depdendencies.py index a59bf553ca..49869386d8 100644 --- a/backend/v2/projects/depdendencies.py +++ b/backend/v2/projects/depdendencies.py @@ -1,9 +1,22 @@ +from pydantic import Field +from pydantic_settings import BaseSettings +from v2.core.dependencies import w3_getter from web3 import AsyncWeb3 -from .contracts import Projects, PROJECTS_ABI + +from .contracts import PROJECTS_ABI, ProjectsContracts + + +class ProjectsSettings(BaseSettings): + projects_contract_address: str = Field( + validation_alias="proposals_contract_address" + ) # TODO: cache -def get_projects(w3: AsyncWeb3, projects_contract_address: str) -> Projects: +def get_projects(w3: AsyncWeb3, projects_contract_address: str) -> ProjectsContracts: + return ProjectsContracts(w3, PROJECTS_ABI, projects_contract_address) # type: ignore + - # projects.init_web3(w3, app.config["PROJECTS_CONTRACT_ADDRESS"]) - return Projects(w3, PROJECTS_ABI, projects_contract_address) +def projects_getter() -> ProjectsContracts: + settings = ProjectsSettings() # type: ignore + return get_projects(w3_getter(), settings.projects_contract_address) diff --git a/backend/v2/projects/services.py b/backend/v2/projects/services.py index ae9984aef9..7db0286f96 100644 --- a/backend/v2/projects/services.py +++ b/backend/v2/projects/services.py @@ -1,34 +1,44 @@ - - +from decimal import Decimal from sqlalchemy.ext.asyncio import AsyncSession - -from backend.v2.allocations.repositories import sum_allocations_by_epoch -from backend.v2.epochs.repositories import get_pending_epoch_snapshot_by_epoch -from backend.v2.projects.contracts import Projects - +from v2.allocations.repositories import ( + get_allocations_with_user_uqs, + sum_allocations_by_epoch, +) +from v2.epoch_snapshots.repositories import get_pending_epoch_snapshot +from v2.epochs.subgraphs import EpochsSubgraph +from v2.project_rewards.capped_quadriatic import ( + CappedQuadriaticFunding, + capped_quadriatic_funding, +) +from v2.projects.contracts import ProjectsContracts +from v2.user_patron_mode.repositories import get_patrons_rewards async def get_projects_allocation_threshold( # Dependencies session: AsyncSession, - projects: Projects, + projects: ProjectsContracts, # Arguments epoch_number: int, project_count_multiplier: int = 1, ) -> int: - # PROJECTS_COUNT_MULTIPLIER = 1 # TODO: from settings? total_allocated = await sum_allocations_by_epoch(session, epoch_number) project_addresses = await projects.get_project_addresses(epoch_number) - return _calculate_threshold(total_allocated, len(project_addresses), project_count_multiplier) + print("total_allocated", total_allocated) + print("project_addresses", project_addresses) + + return _calculate_threshold( + total_allocated, len(project_addresses), project_count_multiplier + ) def _calculate_threshold( total_allocated: int, - projects_count: int, + projects_count: int, project_count_multiplier: int, ) -> int: return ( @@ -38,86 +48,85 @@ def _calculate_threshold( ) - -async def get_estimated_project_rewards( +async def get_estimated_project_rewards( # Dependencies session: AsyncSession, - projects: Projects, - + projects: ProjectsContracts, + epochs_subgraph: EpochsSubgraph, # Arguments epoch_number: int, -) -> int: - pass - +) -> CappedQuadriaticFunding: + # project_settings project is ProjectSettings all_projects = await projects.get_project_addresses(epoch_number) + matched_rewards = await get_estimated_project_matched_rewards_pending( + session, + epochs_subgraph=epochs_subgraph, + epoch_number=epoch_number, + ) + allocations = await get_allocations_with_user_uqs(session, epoch_number) + + return capped_quadriatic_funding( + project_addresses=all_projects, + allocations=allocations, + matched_rewards=matched_rewards, + ) + + +TR_PERCENT = Decimal("0.7") +IRE_PERCENT = Decimal("0.35") +MATCHED_REWARDS_PERCENT = Decimal("0.35") + async def get_estimated_project_matched_rewards_pending( # Dependencies session: AsyncSession, - projects: Projects, - + epochs_subgraph: EpochsSubgraph, + # projects: Projects, # Arguments epoch_number: int, ) -> int: - -# pending_snapshot = await get_pending_epoch_snapshot_by_epoch(session, epoch_number) - -# patrons_rewards = - -# def get_patrons_rewards(self, context: Context) -> int: -# epoch = context.epoch_details -# patrons = database.patrons.get_all_patrons_at_timestamp( -# epoch.finalized_timestamp.datetime() -# ) -# return database.budgets.get_sum_by_users_addresses_and_epoch( -# patrons, epoch.epoch_num -# ) - - -# def get_matched_rewards() -# patrons_mode: UserPatronMode - -# def get_matched_rewards(self, context: Context) -> int: -# pending_snapshot = database.pending_epoch_snapshot.get_by_epoch( -# context.epoch_details.epoch_num -# ) -# patrons_rewards = self.patrons_mode.get_patrons_rewards(context) -# matched_rewards_settings = context.epoch_settings.octant_rewards.matched_rewards - -# return matched_rewards_settings.calculate_matched_rewards( -# MatchedRewardsPayload( -# total_rewards=int(pending_snapshot.total_rewards), -# vanilla_individual_rewards=int( -# pending_snapshot.vanilla_individual_rewards -# ), -# patrons_rewards=patrons_rewards, -# staking_proceeds=int(pending_snapshot.eth_proceeds), -# locked_ratio=Decimal(pending_snapshot.locked_ratio), -# ire_percent=context.epoch_settings.octant_rewards.total_and_vanilla_individual_rewards.IRE_PERCENT, -# tr_percent=context.epoch_settings.octant_rewards.total_and_vanilla_individual_rewards.TR_PERCENT, -# ) -# ) - - -# project_rewards = get_estimated_project_rewards().rewards - -# def get_project_rewards(self, context: Context) -> ProjectRewardsResult: -# project_settings = context.epoch_settings.project # .rewards CappedQuadraticFundingProjectRewards -# # all_projects = context.projects_details.projects -# matched_rewards = self.octant_rewards.get_matched_rewards(context) -# allocations = database.allocations.get_all_with_uqs( -# context.epoch_details.epoch_num -# ) - -# allocations_payload = AllocationsPayload( -# before_allocations=allocations, user_new_allocations=[] -# ) -# projects_rewards = get_projects_rewards( -# project_settings, -# allocations_payload, -# all_projects, -# matched_rewards, -# ) - -# return projects_rewards + pending_snapshot = await get_pending_epoch_snapshot(session, epoch_number) + # if pending_snapshot is None: + # raise ValueError(f"No pending snapshot for epoch {epoch_number}") + + from app.infrastructure.database.models import PendingEpochSnapshot + + pending_snapshot = PendingEpochSnapshot( + eth_proceeds="412042049081445321216", + locked_ratio="0.094755727584613854218098688", + ) + + epoch_details = await epochs_subgraph.get_epoch_by_number(epoch_number) + patrons_rewards = await get_patrons_rewards( + session, epoch_details.finalized_timestamp.datetime(), epoch_number + ) + + # fmt: off + return _calculate_percentage_matched_rewards( + locked_ratio = Decimal(pending_snapshot.locked_ratio), + tr_percent = TR_PERCENT, + ire_percent = IRE_PERCENT, + staking_proceeds = int(pending_snapshot.eth_proceeds), + patrons_rewards = patrons_rewards, + matched_rewards_percent = MATCHED_REWARDS_PERCENT, + ) + # fmt: on + + +def _calculate_percentage_matched_rewards( + locked_ratio: Decimal, + tr_percent: Decimal, + ire_percent: Decimal, + staking_proceeds: int, + patrons_rewards: int, + matched_rewards_percent: Decimal, # Config +) -> int: + if locked_ratio > tr_percent: + raise ValueError("Invalid Strategy - locked_ratio > tr_percent") + + if locked_ratio < ire_percent: + return int(matched_rewards_percent * staking_proceeds + patrons_rewards) + elif ire_percent <= locked_ratio < tr_percent: + return int((tr_percent - locked_ratio) * staking_proceeds + patrons_rewards) + return patrons_rewards diff --git a/backend/v2/uniqueness_quotients/__init__.py b/backend/v2/uniqueness_quotients/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/v2/uniqueness_quotients/repositories.py b/backend/v2/uniqueness_quotients/repositories.py new file mode 100644 index 0000000000..14e485fc28 --- /dev/null +++ b/backend/v2/uniqueness_quotients/repositories.py @@ -0,0 +1,45 @@ +from decimal import Decimal +from typing import Optional + +from app.infrastructure.database.models import UniquenessQuotient, User +from eth_utils import to_checksum_address +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from v2.users.repositories import get_user_by_address + + +async def get_uq_score_by_user_address( + session: AsyncSession, user_address: str, epoch_number: int +) -> Optional[Decimal]: + """Returns saved UQ score for a user in a given epoch. + None if the UQ score is not saved (allocation not made yet). + """ + + result = await session.execute( + select(UniquenessQuotient) + .join(User) + .filter(User.address == to_checksum_address(user_address)) + .filter(UniquenessQuotient.epoch == epoch_number) + ) + + uq = result.scalars().first() + return uq.validated_score if uq else None + + +async def save_uq_score_for_user_address( + session: AsyncSession, user_address: str, epoch_number: int, score: Decimal +): + """Saves UQ score for a user in a given epoch.""" + + user = await get_user_by_address(session, user_address) + + if not user: + return None + + uq_score = UniquenessQuotient( + epoch=epoch_number, + user_id=user.id, + score=str(score), + ) + + session.add(uq_score) diff --git a/backend/v2/uniqueness_quotients/services.py b/backend/v2/uniqueness_quotients/services.py new file mode 100644 index 0000000000..aaafd05228 --- /dev/null +++ b/backend/v2/uniqueness_quotients/services.py @@ -0,0 +1,60 @@ +from decimal import Decimal + +from sqlalchemy.ext.asyncio import AsyncSession +from v2.gitcoin_passport.services import get_gitcoin_passport_score + +from .repositories import get_uq_score_by_user_address, save_uq_score_for_user_address + +LOW_UQ_SCORE = Decimal("0.2") +MAX_UQ_SCORE = Decimal("1.0") + + +def calculate_uq_score( + gp_score: float, + uq_score_threshold: float, + max_uq_score: Decimal = MAX_UQ_SCORE, + low_uq_score: Decimal = LOW_UQ_SCORE, +) -> Decimal: + """Calculate UQ score (multiplier) based on the GP score and the UQ score threshold. + If the GP score is greater than or equal to the UQ score threshold, the UQ score is set to the maximum UQ score. + Otherwise, the UQ score is set to the low UQ score. + + Args: + gp_score (float): The GitcoinPassport antisybil score. + uq_score_threshold (int): Anything below this threshold will be considered low UQ score, and anything above will be considered maximum UQ score. + """ + + if gp_score >= uq_score_threshold: + return max_uq_score + + return low_uq_score + + +async def get_or_calculate_uq_score( + session: AsyncSession, + user_address: str, + epoch_number: int, + uq_score_threshold: float, + max_uq_score: Decimal = MAX_UQ_SCORE, + low_uq_score: Decimal = LOW_UQ_SCORE, +) -> Decimal: + """Get or calculate the UQ score for a user in a given epoch. + If the UQ score is already calculated, it will be returned. + Otherwise, it will be calculated based on the Gitcoin Passport score and saved for future reference. + """ + + # Check if the UQ score is already calculated and saved + uq_score = await get_uq_score_by_user_address(session, user_address, epoch_number) + if uq_score: + return uq_score + + # Otherwise, calculate the UQ score based on the gitcoin passport score + gp_score = await get_gitcoin_passport_score(session, user_address) + uq_score = calculate_uq_score( + gp_score, uq_score_threshold, max_uq_score, low_uq_score + ) + + # and save the UQ score for future reference + await save_uq_score_for_user_address(session, user_address, epoch_number, uq_score) + + return uq_score diff --git a/backend/v2/user_patron_mode/__init__.py b/backend/v2/user_patron_mode/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/v2/user_patron_mode/repositories.py b/backend/v2/user_patron_mode/repositories.py new file mode 100644 index 0000000000..38c16c174f --- /dev/null +++ b/backend/v2/user_patron_mode/repositories.py @@ -0,0 +1,116 @@ +from datetime import datetime +from typing import List + +from app.infrastructure.database.models import Budget, PatronModeEvent, User +from sqlalchemy import Integer, cast, func +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select +from sqlalchemy.orm import aliased +from v2.users.repositories import get_user_by_address + + +async def get_all_patrons_at_timestamp( + session: AsyncSession, dt: datetime +) -> List[str]: + """ + From PatronModeEvent table, get all the user addresses that have patron_mode_enabled=True at a given timestamp. + """ + + subquery = ( + select( + PatronModeEvent.user_address, + PatronModeEvent.patron_mode_enabled, + PatronModeEvent.created_at, + ) + .filter(PatronModeEvent.created_at <= dt) + .order_by(PatronModeEvent.user_address, PatronModeEvent.created_at.desc()) + .subquery() + ) + + alias = aliased(PatronModeEvent, subquery) + + result = await session.execute( + select(alias.user_address) + .filter(alias.patron_mode_enabled == True) + .group_by(alias.user_address) + ) + + patrons = [row[0] for row in result.fetchall()] + return patrons + + +async def get_budget_sum_by_users_addresses_and_epoch( + session: AsyncSession, users_addresses: List[str], epoch_number: int +) -> int: + """ + Sum the budgets of given users for a given epoch. + """ + result = await session.execute( + select(func.sum(cast(Budget.budget, Integer))) + .join(User) + .filter(User.address.in_(users_addresses), Budget.epoch == epoch_number) + ) + total_budget = result.scalar() + + if total_budget is None: + return 0 + + return total_budget + + +async def get_patrons_rewards( + session: AsyncSession, finalized_timestamp: datetime, epoch_number: int +) -> int: + """ + Patron rewards are the sum of budgets of all patrons for a given epoch. + """ + + patrons = await get_all_patrons_at_timestamp(session, finalized_timestamp) + return await get_budget_sum_by_users_addresses_and_epoch( + session, patrons, epoch_number + ) + + +async def get_budget_by_user_address_and_epoch( + session: AsyncSession, user_address: str, epoch: int +) -> int | None: + """ + Get the budget of a user for a given epoch. + """ + + user = await get_user_by_address(session, user_address) + if user is None: + return None + + result = await session.execute( + select(Budget.budget) + .filter(Budget.user_id == user.id) + .filter(Budget.epoch == epoch) + ) + + budget = result.scalar() + + if budget is None: + return None + + return int(budget) + + +async def user_is_patron_with_budget( + session: AsyncSession, + user_address: str, + epoch_number: int, + finalized_timestamp: datetime, +) -> bool: + """ + Check if a user is a patron with a budget for a given epoch. + """ + + patrons = await get_all_patrons_at_timestamp(session, finalized_timestamp) + if user_address not in patrons: + return False + + budget = await get_budget_by_user_address_and_epoch( + session, user_address, epoch_number + ) + return budget is not None diff --git a/backend/v2/users/__init__.py b/backend/v2/users/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/v2/users/repositories.py b/backend/v2/users/repositories.py new file mode 100644 index 0000000000..b28dc4777b --- /dev/null +++ b/backend/v2/users/repositories.py @@ -0,0 +1,11 @@ +from app.infrastructure.database.models import User +from eth_utils import to_checksum_address +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + + +async def get_user_by_address(session: AsyncSession, user_address: str) -> User | None: + user_address = to_checksum_address(user_address) + + result = await session.execute(select(User).filter(User.address == user_address)) + return result.scalar_one_or_none() From 56a5f6ed33952440b31887e6dfa7317873fe781c Mon Sep 17 00:00:00 2001 From: adam-gf Date: Tue, 27 Aug 2024 21:08:26 +0200 Subject: [PATCH 04/31] Small fixes after local testing --- backend/v2/allocations/repositories.py | 95 ------------------ backend/v2/allocations/services.py | 10 +- backend/v2/allocations/socket.py | 127 ++----------------------- backend/v2/epochs/contracts.py | 1 - backend/v2/main.py | 6 +- backend/v2/projects/services.py | 15 ++- 6 files changed, 22 insertions(+), 232 deletions(-) diff --git a/backend/v2/allocations/repositories.py b/backend/v2/allocations/repositories.py index 48d2c6200e..3dafd1311f 100644 --- a/backend/v2/allocations/repositories.py +++ b/backend/v2/allocations/repositories.py @@ -35,31 +35,6 @@ async def get_allocations_with_user_uqs( ) -> list[AllocationWithUserUQScore]: """Get all allocations for a given epoch, including the uniqueness quotients of the users.""" - # result = await session.execute( - # select(Allocation) - # .filter(Allocation.epoch == epoch) - # .filter(Allocation.deleted_at.is_(None)) - # .options(joinedload(Allocation.user).joinedload(User.uniqueness_quotients)) - # ) - # allocations = result.scalars().all() - - # return [ - # AllocationWithUserUQScore( - # project_address=a.project_address, - # amount=int(a.amount), - # user_address=a.user.address, - # user_uq_score=next( - # ( - # uq.validated_score - # for uq in a.user.uniqueness_quotients - # if uq.epoch == epoch - # ), - # None, - # ), - # ) - # for a in allocations - # ] - result = await session.execute( select( Allocation.project_address, @@ -74,22 +49,6 @@ async def get_allocations_with_user_uqs( .filter(UniquenessQuotient.epoch == epoch_number) ) - # result = await session.execute( - # select( - # Allocation.id.label('allocation_id'), - # Allocation.amount.label('allocation_amount'), - # User.id.label('user_id'), - # User.name.label('user_name'), - # UniquenessQuotient.id.label('uq_id'), - # UniquenessQuotient.score.label('uq_score') - # ) - # .join(User, Allocation.user_id == User.id) - # .join(UniquenessQuotient, UniquenessQuotient.user_id == User.id) - # .filter(Allocation.epoch == epoch_number) - # .filter(Allocation.deleted_at.is_(None)) - # .filter(UniquenessQuotient.epoch == epoch_number) - # ) - rows = result.all() return [ @@ -103,36 +62,6 @@ async def get_allocations_with_user_uqs( ] -# allocations = database.allocations.get_all_with_uqs( -# context.epoch_details.epoch_num -# ) - -# def get_all_allocations_with_uqs(epoch: int) -> List[AllocationDTO]: -# allocations = ( -# Allocation.query.filter_by(epoch=epoch) -# .filter(Allocation.deleted_at.is_(None)) -# .options(joinedload(Allocation.user).joinedload(User.uniqueness_quotients)) -# .all() -# ) - -# return [ -# AllocationDTO( -# amount=int(a.amount), -# project_address=a.project_address, -# user_address=a.user.address, -# uq_score=next( -# ( -# uq.validated_score -# for uq in a.user.uniqueness_quotients -# if uq.epoch == epoch -# ), -# None, -# ), -# ) -# for a in allocations -# ] - - async def soft_delete_user_allocations_by_epoch( session: AsyncSession, user_address: str, @@ -239,27 +168,3 @@ async def get_donations_by_project( ) for a in allocations ] - - # query: Query = Allocation.query.filter_by( - # project_address=to_checksum_address(project_address), epoch=epoch - # ).options(joinedload(Allocation.user)) - - # if not with_deleted: - # query = query.filter(Allocation.deleted_at.is_(None)) - - # return query.all() - - # def get_allocations_by_project( - # self, context: Context, project_address: str - # ) -> List[ProjectDonationDTO]: - # allocations = database.allocations.get_all_by_project_addr_and_epoch( - # project_address, context.epoch_details.epoch_num - # ) - - # return [ - # ProjectDonationDTO( - # donor=a.user.address, amount=int(a.amount), project=project_address - # ) - # for a in allocations - # if int(a.amount) != 0 - # ] diff --git a/backend/v2/allocations/services.py b/backend/v2/allocations/services.py index 5fdea2ea4d..02cd2b1d1c 100644 --- a/backend/v2/allocations/services.py +++ b/backend/v2/allocations/services.py @@ -225,12 +225,12 @@ async def verify_logic( session, payload.user_address, epoch_number ) - # if user_budget is None: - # raise exceptions.BudgetNotFound(payload.user_address, epoch_number) + if user_budget is None: + raise exceptions.BudgetNotFound(payload.user_address, epoch_number) - # # Check if the allocations are within the budget - # if sum(a.amount for a in payload.allocations) > user_budget: - # raise exceptions.RewardsBudgetExceeded() + # Check if the allocations are within the budget + if sum(a.amount for a in payload.allocations) > user_budget: + raise exceptions.RewardsBudgetExceeded() async def get_next_user_nonce( diff --git a/backend/v2/allocations/socket.py b/backend/v2/allocations/socket.py index 508d1a086f..7ea9718ee9 100644 --- a/backend/v2/allocations/socket.py +++ b/backend/v2/allocations/socket.py @@ -1,19 +1,14 @@ import logging -from typing import List import socketio -from app.engine.projects.rewards import ProjectRewardDTO -from app.exceptions import OctantException # from app.extensions import socketio, epochs -from app.infrastructure.exception_handler import UNEXPECTED_EXCEPTION, ExceptionHandler -from app.modules.dto import ProjectDonationDTO from eth_utils import to_checksum_address from v2.allocations.repositories import get_donations_by_project from v2.allocations.services import allocate -from v2.core.dependencies import db_getter, get_w3, w3_getter -from v2.epochs.dependencies import epochs_getter, epochs_subgraph_getter, get_epochs -from v2.projects.depdendencies import get_projects, projects_getter +from v2.core.dependencies import db_getter +from v2.epochs.dependencies import epochs_getter, epochs_subgraph_getter +from v2.projects.depdendencies import projects_getter from v2.projects.services import ( get_estimated_project_rewards, get_projects_allocation_threshold, @@ -37,28 +32,12 @@ async def on_connect(self, sid: str, environ: dict): Handle client connection """ - print("Type of sid", type(sid)) - print("Type of environ", type(environ)) - logging.debug("Client connected") - print("Epochs are here") - - await self.emit("epoch", {"epoch": "fuckup"}) - # We send the data only in PENDING state pending_epoch_number = await self.epochs_contracts.get_pending_epoch() - - epoch_end = await self.epochs_contracts.get_current_epoch_end() - - print("epocg_end", epoch_end) - print("Pending epoch =", pending_epoch_number) - - # We do not handle requests outside of pending epoch state - # if pending_epoch_number is None: - # return - - pending_epoch_number = 124 + if pending_epoch_number is None: + return async with self.db_session() as session: threshold = await get_projects_allocation_threshold( @@ -85,7 +64,6 @@ async def on_connect(self, sid: str, environ: dict): for project_address in project_rewards.amounts_by_project.keys() ] - # project_rewards = get_estimated_project_rewards().rewards await self.emit("project_rewards", rewards) async def on_disconnect(self, sid): @@ -96,15 +74,12 @@ async def on_allocate(self, sid: str, data: dict): Handle allocation request """ - # # We do not handle requests outside of pending epoch state (Allocation Window) - # pending_epoch_number = await self.epochs_contracts.get_pending_epoch() - # if pending_epoch_number is None: - # return - - print("message", data, type(data)) + # We do not handle requests outside of pending epoch state (Allocation Window) + pending_epoch_number = await self.epochs_contracts.get_pending_epoch() + if pending_epoch_number is None: + return request = from_dict(data) - pending_epoch_number = 124 async with self.db_session() as session: await allocate( @@ -154,41 +129,6 @@ async def on_allocate(self, sid: str, data: dict): {"project": project_address, "donors": donations}, ) - # msg = json.loads(msg) - - # print("MEssage", msg) - - # is_manually_edited = data.get("isManuallyEdited", None) - # user_address = data["userAddress"] - # # is_manually_edited = ( - # # msg["isManuallyEdited"] if "isManuallyEdited" in msg else None - # # ) - # logging.info(f"User allocation payload: {msg}") - - # controller.allocate( - # user_address, - # msg, - # is_manually_edited=is_manually_edited, - # ) - # socketio.logger.info(f"User: {user_address} allocated successfully") - - # threshold = get_projects_allocation_threshold() - # await self.emit("threshold", {"threshold": str(threshold)}, broadcast=True) - - # project_rewards = get_estimated_project_rewards().rewards - # await self.emit( - # "project_rewards", - # _serialize_project_rewards(project_rewards), - # broadcast=True, - # ) - # for project in project_rewards: - # donors = controller.get_all_donations_by_project(project.address) - # await self.emit( - # "project_donors", - # {"project": project.address, "donors": _serialize_donors(donors)}, - # broadcast=True, - # ) - def from_dict(data: dict) -> UserAllocationRequest: """ @@ -234,52 +174,3 @@ def from_dict(data: dict) -> UserAllocationRequest: is_manually_edited = is_manually_edited, ) # fmt: on - - -# def state_context(epoch_state: EpochState) -> Context: -# epoch_num = get_epoch_number(epoch_state) -# return build_context(epoch_num, epoch_state, with_block_range) - - -# @socketio.on("project_donors") -# def handle_project_donors(project_address: str): -# print("Project donors") -# emit( -# "project_donors", -# {"project": project_address, "donors": []}, -# ) -# donors = controller.get_all_donations_by_project(project_address) -# emit( -# "project_donors", -# {"project": project_address, "donors": _serialize_donors(donors)}, -# ) - - -# @socketio. -def default_error_handler(e): - ExceptionHandler.print_stacktrace(e) - if isinstance(e, OctantException): - emit("exception", {"message": str(e.message)}) - else: - emit("exception", {"message": UNEXPECTED_EXCEPTION}) - - -def _serialize_project_rewards(project_rewards: List[ProjectRewardDTO]) -> List[dict]: - return [ - { - "address": project_reward.address, - "allocated": str(project_reward.allocated), - "matched": str(project_reward.matched), - } - for project_reward in project_rewards - ] - - -def _serialize_donors(donors: List[ProjectDonationDTO]) -> List[dict]: - return [ - { - "address": donor.donor, - "amount": str(donor.amount), - } - for donor in donors - ] diff --git a/backend/v2/epochs/contracts.py b/backend/v2/epochs/contracts.py index 565d353c20..58d919369f 100644 --- a/backend/v2/epochs/contracts.py +++ b/backend/v2/epochs/contracts.py @@ -26,7 +26,6 @@ async def get_current_epoch(self) -> int: async def get_pending_epoch(self) -> Optional[int]: try: logging.debug("[Epochs contract] Getting pending epoch") - # return 5 return await self.contract.functions.getPendingEpoch().call() except exceptions.ContractLogicError: logging.warning("[Epochs contract] No pending epoch") diff --git a/backend/v2/main.py b/backend/v2/main.py index 4fb429fed3..162aeb8338 100644 --- a/backend/v2/main.py +++ b/backend/v2/main.py @@ -2,7 +2,6 @@ import socketio from fastapi import FastAPI from v2.allocations.socket import AllocateNamespace -from v2.core.dependencies import create_tables fastapi_app = FastAPI() @@ -16,10 +15,9 @@ async def fastapi_endpoint(): sio.register_namespace(AllocateNamespace("/")) sio_asgi_app = socketio.ASGIApp(socketio_server=sio, other_asgi_app=fastapi_app) -# app.mount("/static", StaticFiles(directory="static"), name="static") -# fastapi_app.mount("/", sio_asgi_app) fastapi_app.add_route("/socket.io/", route=sio_asgi_app) fastapi_app.add_websocket_route("/socket.io/", sio_asgi_app) -fastapi_app.add_event_handler("startup", create_tables) +# from v2.core.dependencies import create_tables +# fastapi_app.add_event_handler("startup", create_tables) diff --git a/backend/v2/projects/services.py b/backend/v2/projects/services.py index 7db0286f96..c9f2d04633 100644 --- a/backend/v2/projects/services.py +++ b/backend/v2/projects/services.py @@ -86,16 +86,13 @@ async def get_estimated_project_matched_rewards_pending( # Arguments epoch_number: int, ) -> int: - pending_snapshot = await get_pending_epoch_snapshot(session, epoch_number) - # if pending_snapshot is None: - # raise ValueError(f"No pending snapshot for epoch {epoch_number}") - - from app.infrastructure.database.models import PendingEpochSnapshot + """ + Get the estimated matched rewards for the pending epoch. + """ - pending_snapshot = PendingEpochSnapshot( - eth_proceeds="412042049081445321216", - locked_ratio="0.094755727584613854218098688", - ) + pending_snapshot = await get_pending_epoch_snapshot(session, epoch_number) + if pending_snapshot is None: + raise ValueError(f"No pending snapshot for epoch {epoch_number}") epoch_details = await epochs_subgraph.get_epoch_by_number(epoch_number) patrons_rewards = await get_patrons_rewards( From 501c1eb328615e0c4b251119e4b94509619c1496 Mon Sep 17 00:00:00 2001 From: adam-gf Date: Tue, 27 Aug 2024 21:13:28 +0200 Subject: [PATCH 05/31] Additional fixes to alighn with what was before --- backend/app/extensions.py | 6 ----- .../app/infrastructure/contracts/epochs.py | 1 - backend/startup.py | 26 ++++--------------- 3 files changed, 5 insertions(+), 28 deletions(-) diff --git a/backend/app/extensions.py b/backend/app/extensions.py index 78e0dc2f6b..8a749b7c8a 100644 --- a/backend/app/extensions.py +++ b/backend/app/extensions.py @@ -23,12 +23,6 @@ description="Octant REST API documentation", catch_all_404s=True, ) -# from flask import current_app as app - -# socketio = socketio.AsyncServer(async_mode="asgi", cors_allowed_origins="*", logger=True) - -# async_mode="asgi", cors_allowed_origins="*", client_manager=mgr -# ) socketio = SocketIO(cors_allowed_origins="*") db = SQLAlchemy() migrate = Migrate() diff --git a/backend/app/infrastructure/contracts/epochs.py b/backend/app/infrastructure/contracts/epochs.py index bb47df0f31..b2c95ada82 100644 --- a/backend/app/infrastructure/contracts/epochs.py +++ b/backend/app/infrastructure/contracts/epochs.py @@ -27,7 +27,6 @@ def get_current_epoch(self) -> int: def get_pending_epoch(self) -> Optional[int]: try: app.logger.debug("[Epochs contract] Getting pending epoch") - return 5 return self.contract.functions.getPendingEpoch().call() except exceptions.ContractLogicError: app.logger.warning("[Epochs contract] No pending epoch") diff --git a/backend/startup.py b/backend/startup.py index 622eee6b9a..c4ccecc60c 100644 --- a/backend/startup.py +++ b/backend/startup.py @@ -1,11 +1,7 @@ -import asyncio -from concurrent.futures import ThreadPoolExecutor -import io import os -from fastapi import FastAPI, Request +from fastapi import Request from fastapi.middleware.wsgi import WSGIMiddleware -from fastapi.responses import JSONResponse, RedirectResponse, StreamingResponse -from starlette.responses import Response + from starlette.middleware.base import BaseHTTPMiddleware @@ -58,17 +54,9 @@ def teardown_session(*args, **kwargs): from v2.main import fastapi_app -# Create FastAPI app -# fastapi_app = FastAPI() - -# @fastapi_app.get("/fastapi-endpoint") -# async def fastapi_endpoint(): -# return {"message": "This is a FastAPI endpoint."} - # Mount Flask app under a sub-path fastapi_app.mount("/flask", WSGIMiddleware(flask_app)) - # Middleware to check if the path exists in FastAPI class PathCheckMiddleware(BaseHTTPMiddleware): async def dispatch(self, request: Request, call_next): @@ -89,11 +77,7 @@ async def dispatch(self, request: Request, call_next): fastapi_app.add_middleware(PathCheckMiddleware) -# from app.extensions import socketio as our_socketio -# import socketio - -# sio_asgi_app = socketio.ASGIApp(socketio_server=our_socketio, other_asgi_app=fastapi_app) +if __name__ == "__main__": + import uvicorn -# # app.mount("/static", StaticFiles(directory="static"), name="static") -# fastapi_app.add_route("/socket.io/", route=sio_asgi_app) -# fastapi_app.add_websocket_route("/socket.io/", sio_asgi_app) + uvicorn.run(fastapi_app, host="0.0.0.0", port=5000) \ No newline at end of file From 3cbf5158276a2f592e12589ab7416e193ab72aba Mon Sep 17 00:00:00 2001 From: adam-gf Date: Tue, 3 Sep 2024 15:02:22 +0200 Subject: [PATCH 06/31] Updates based on pr comments and extracting dependencies --- backend/app/__init__.py | 1 - backend/startup.py | 5 +- backend/v2/allocations/dependencies.py | 55 ++++ backend/v2/allocations/repositories.py | 8 +- .../v2/allocations/{models.py => schemas.py} | 0 backend/v2/allocations/services.py | 276 ++++------------- backend/v2/allocations/socket.py | 277 +++++++++++++----- backend/v2/allocations/validators.py | 185 ++++++++++++ backend/v2/core/dependencies.py | 49 +++- backend/v2/epochs/dependencies.py | 28 +- backend/v2/gitcoin_passport/__init__.py | 0 backend/v2/gitcoin_passport/repositories.py | 19 -- backend/v2/gitcoin_passport/services.py | 25 -- .../v2/project_rewards/capped_quadriatic.py | 47 ++- backend/v2/projects/depdendencies.py | 85 +++++- backend/v2/projects/services.py | 123 +++++--- .../v2/uniqueness_quotients/dependencies.py | 36 +++ .../v2/uniqueness_quotients/repositories.py | 17 +- backend/v2/uniqueness_quotients/services.py | 60 +++- backend/v2/user_patron_mode/repositories.py | 2 +- 20 files changed, 857 insertions(+), 441 deletions(-) create mode 100644 backend/v2/allocations/dependencies.py rename backend/v2/allocations/{models.py => schemas.py} (100%) create mode 100644 backend/v2/allocations/validators.py delete mode 100644 backend/v2/gitcoin_passport/__init__.py delete mode 100644 backend/v2/gitcoin_passport/repositories.py delete mode 100644 backend/v2/gitcoin_passport/services.py create mode 100644 backend/v2/uniqueness_quotients/dependencies.py diff --git a/backend/app/__init__.py b/backend/app/__init__.py index 7d01ad42e7..e57ad3d07b 100644 --- a/backend/app/__init__.py +++ b/backend/app/__init__.py @@ -7,7 +7,6 @@ db, migrate, cors, - socketio, cache, init_web3, api, diff --git a/backend/startup.py b/backend/startup.py index c4ccecc60c..dc3596d3e8 100644 --- a/backend/startup.py +++ b/backend/startup.py @@ -52,11 +52,12 @@ def teardown_session(*args, **kwargs): db.session.remove() -from v2.main import fastapi_app +from v2.main import fastapi_app # noqa # Mount Flask app under a sub-path fastapi_app.mount("/flask", WSGIMiddleware(flask_app)) + # Middleware to check if the path exists in FastAPI class PathCheckMiddleware(BaseHTTPMiddleware): async def dispatch(self, request: Request, call_next): @@ -80,4 +81,4 @@ async def dispatch(self, request: Request, call_next): if __name__ == "__main__": import uvicorn - uvicorn.run(fastapi_app, host="0.0.0.0", port=5000) \ No newline at end of file + uvicorn.run(fastapi_app, host="0.0.0.0", port=5000) diff --git a/backend/v2/allocations/dependencies.py b/backend/v2/allocations/dependencies.py new file mode 100644 index 0000000000..9a58f3a92d --- /dev/null +++ b/backend/v2/allocations/dependencies.py @@ -0,0 +1,55 @@ +from typing import Annotated + +from fastapi import Depends +from pydantic import Field +from pydantic_settings import BaseSettings +from v2.projects.services import EstimatedProjectMatchedRewards +from v2.epochs.dependencies import get_epochs_subgraph +from v2.epochs.subgraphs import EpochsSubgraph +from v2.projects.contracts import ProjectsContracts +from v2.projects.depdendencies import ( + get_estimated_project_matched_rewards, + get_projects_contracts, +) +from v2.uniqueness_quotients.dependencies import get_uq_score_getter +from v2.uniqueness_quotients.services import UQScoreGetter +from v2.core.dependencies import AsyncDbSession + +from .services import Allocations +from .validators import SignatureVerifier + + +class SignatureVerifierSettings(BaseSettings): + chain_id: int = Field( + default=11155111, + description="The chain id to use for the signature verification.", + ) + + +def get_signature_verifier( + session: AsyncDbSession, + epochs_subgraph: Annotated[EpochsSubgraph, Depends(get_epochs_subgraph)], + projects_contracts: Annotated[ProjectsContracts, Depends(get_projects_contracts)], + settings: Annotated[SignatureVerifierSettings, Depends(SignatureVerifierSettings)], +) -> SignatureVerifier: + return SignatureVerifier( + session, epochs_subgraph, projects_contracts, settings.chain_id + ) + + +def get_allocations( + session: AsyncDbSession, + signature_verifier: SignatureVerifier, + uq_score_getter: Annotated[UQScoreGetter, Depends(get_uq_score_getter)], + projects: Annotated[ProjectsContracts, Depends(get_projects_contracts)], + estimated_project_matched_rewards: Annotated[ + EstimatedProjectMatchedRewards, Depends(get_estimated_project_matched_rewards) + ], +) -> Allocations: + return Allocations( + session, + signature_verifier, + uq_score_getter, + projects, + estimated_project_matched_rewards, + ) diff --git a/backend/v2/allocations/repositories.py b/backend/v2/allocations/repositories.py index 3dafd1311f..76e8374245 100644 --- a/backend/v2/allocations/repositories.py +++ b/backend/v2/allocations/repositories.py @@ -11,7 +11,7 @@ from sqlalchemy.sql.functions import coalesce from v2.users.repositories import get_user_by_address -from .models import AllocationWithUserUQScore, ProjectDonation, UserAllocationRequest +from .schemas import AllocationWithUserUQScore, ProjectDonation, UserAllocationRequest async def sum_allocations_by_epoch(session: AsyncSession, epoch_number: int) -> int: @@ -150,15 +150,17 @@ async def get_donations_by_project( project_address: str, epoch_number: int, ) -> list[ProjectDonation]: + """Get all donations for a project in a given epoch.""" + result = await session.execute( select(Allocation) + .options(joinedload(Allocation.user)) .filter(Allocation.project_address == project_address) .filter(Allocation.epoch == epoch_number) .filter(Allocation.deleted_at.is_(None)) - .options(joinedload(Allocation.user)) ) - allocations = result.all() + allocations = result.scalars().all() return [ ProjectDonation( diff --git a/backend/v2/allocations/models.py b/backend/v2/allocations/schemas.py similarity index 100% rename from backend/v2/allocations/models.py rename to backend/v2/allocations/schemas.py diff --git a/backend/v2/allocations/services.py b/backend/v2/allocations/services.py index 02cd2b1d1c..686819c667 100644 --- a/backend/v2/allocations/services.py +++ b/backend/v2/allocations/services.py @@ -1,74 +1,74 @@ -from decimal import Decimal +from dataclasses import dataclass from app import exceptions -from app.modules.common.crypto.signature import EncodingStandardFor, encode_for_signing from sqlalchemy.ext.asyncio import AsyncSession -from v2.crypto.signatures import verify_signed_message -from v2.epochs.subgraphs import EpochsSubgraph +from v2.uniqueness_quotients.dependencies import UQScoreGetter from v2.project_rewards.capped_quadriatic import ( - capped_quadriatic_funding, - cqf_calculate_individual_leverage, + cqf_simulate_leverage, ) from v2.projects.contracts import ProjectsContracts -from v2.projects.services import get_estimated_project_matched_rewards_pending -from v2.uniqueness_quotients.services import get_or_calculate_uq_score -from v2.user_patron_mode.repositories import ( - get_budget_by_user_address_and_epoch, - user_is_patron_with_budget, +from v2.projects.services import ( + EstimatedProjectMatchedRewards, ) from v2.users.repositories import get_user_by_address -from web3 import AsyncWeb3 -from .models import AllocationWithUserUQScore, UserAllocationRequest +from .validators import SignatureVerifier +from .schemas import AllocationWithUserUQScore, UserAllocationRequest from .repositories import ( get_allocations_with_user_uqs, - get_last_allocation_request_nonce, soft_delete_user_allocations_by_epoch, store_allocation_request, ) +@dataclass +class Allocations: + session: AsyncSession + signature_verifier: SignatureVerifier + uq_score_getter: UQScoreGetter + projects: ProjectsContracts + estimated_project_matched_rewards: EstimatedProjectMatchedRewards + + async def make( + self, + epoch_number: int, + request: UserAllocationRequest, + ) -> str: + """ + Make an allocation for the user. + """ + return await allocate( + session=self.session, + signature_verifier=self.signature_verifier, + uq_score_getter=self.uq_score_getter, + projects=self.projects, + estimated_project_matched_rewards=self.estimated_project_matched_rewards, + epoch_number=epoch_number, + request=request, + ) + + async def allocate( # Component dependencies session: AsyncSession, - projects_contracts: ProjectsContracts, - epochs_subgraph: EpochsSubgraph, + signature_verifier: SignatureVerifier, + uq_score_getter: UQScoreGetter, + projects: ProjectsContracts, + estimated_project_matched_rewards: EstimatedProjectMatchedRewards, # Arguments epoch_number: int, request: UserAllocationRequest, - # Settings - uq_score_threshold: float = 21.0, - low_uq_score: Decimal = Decimal("0.2"), - max_uq_score: Decimal = Decimal("1.0"), - chain_id: int = 11155111, ) -> str: - await verify_logic( - session=session, - epoch_subgraph=epochs_subgraph, - projects_contracts=projects_contracts, + # Verify the signature + await signature_verifier.verify( epoch_number=epoch_number, - payload=request, - ) - await verify_signature( - w3=projects_contracts.w3, - chain_id=chain_id, - user_address=request.user_address, - payload=request, + request=request, ) - # Get user - # ? Do we need to get the user here ? - # user = await get_user_by_address(session, request.user_address) - # Get or calculate UQ score of the user - # TODO: k=v arguments - user_uq_score = await get_or_calculate_uq_score( - session=session, - user_address=request.user_address, + user_uq_score = await uq_score_getter.get_or_calculate( epoch_number=epoch_number, - uq_score_threshold=uq_score_threshold, - max_uq_score=max_uq_score, - low_uq_score=low_uq_score, + user_address=request.user_address, ) # Calculate leverage by simulating the allocation @@ -81,12 +81,12 @@ async def allocate( ) for a in request.allocations ] - leverage = await calculate_leverage( + + leverage = await simulate_leverage( session=session, - projects=projects_contracts, - epochs_subgraph=epochs_subgraph, + projects=projects, + estimated_project_matched_rewards=estimated_project_matched_rewards, epoch_number=epoch_number, - user_address=request.user_address, new_allocations=new_allocations, ) @@ -117,14 +117,13 @@ async def allocate( return request.user_address -async def calculate_leverage( +async def simulate_leverage( # Component dependencies session: AsyncSession, projects: ProjectsContracts, - epochs_subgraph: EpochsSubgraph, + estimated_project_matched_rewards: EstimatedProjectMatchedRewards, # Arguments epoch_number: int, - user_address: str, new_allocations: list[AllocationWithUserUQScore], ) -> float: """ @@ -133,183 +132,14 @@ async def calculate_leverage( all_projects = await projects.get_project_addresses(epoch_number) - matched_rewards = await get_estimated_project_matched_rewards_pending( - session=session, - epochs_subgraph=epochs_subgraph, - epoch_number=epoch_number, - ) + matched_rewards = await estimated_project_matched_rewards.get(epoch_number) # Get all allocations before user's allocation existing_allocations = await get_allocations_with_user_uqs(session, epoch_number) - # Remove allocations made by this user (as they will be removed in a second) - allocations_without_user = [ - a for a in existing_allocations if a.user_address != user_address - ] - - # Calculate funding without user's allocations - before = capped_quadriatic_funding( - allocations=allocations_without_user, - matched_rewards=matched_rewards, - project_addresses=all_projects, - ) - # Calculate funding with user's allocations - after = capped_quadriatic_funding( - allocations=allocations_without_user + new_allocations, + return cqf_simulate_leverage( + existing_allocations=existing_allocations, + new_allocations=new_allocations, matched_rewards=matched_rewards, project_addresses=all_projects, ) - - # Calculate leverage based on the difference in funding - return cqf_calculate_individual_leverage( - new_allocations_amount=sum(a.amount for a in new_allocations), - project_addresses=[a.project_address for a in new_allocations], - before_allocation_matched=before.matched_by_project, - after_allocation_matched=after.matched_by_project, - ) - - -async def verify_logic( - # Component dependencies - session: AsyncSession, - epoch_subgraph: EpochsSubgraph, - projects_contracts: ProjectsContracts, - # Arguments - epoch_number: int, - payload: UserAllocationRequest, -): - # Check if the epoch is in the decision window - # epoch_details = await epoch_subgraph.get_epoch_by_number(epoch_number) - # if epoch_details.state != "PENDING": - # raise exceptions.NotInDecision - - # Check if the allocations are not empty - if not payload.allocations: - raise exceptions.EmptyAllocations() - - # Check if the nonce is as expected - expected_nonce = await get_next_user_nonce(session, payload.user_address) - if payload.nonce != expected_nonce: - raise exceptions.WrongAllocationsNonce(payload.nonce, expected_nonce) - - # Check if the user is not a patron - epoch_details = await epoch_subgraph.get_epoch_by_number(epoch_number) - is_patron = await user_is_patron_with_budget( - session, - payload.user_address, - epoch_number, - epoch_details.finalized_timestamp.datetime(), - ) - if is_patron: - raise exceptions.NotAllowedInPatronMode(payload.user_address) - - # Check if the user is not a project - all_projects = await projects_contracts.get_project_addresses(epoch_number) - if payload.user_address in all_projects: - raise exceptions.ProjectAllocationToSelf() - - project_addresses = [a.project_address for a in payload.allocations] - - # Check if the projects are valid - invalid_projects = set(project_addresses) - set(all_projects) - if invalid_projects: - raise exceptions.InvalidProjects(invalid_projects) - - # Check if there are no duplicates - duplicates = [p for p in project_addresses if project_addresses.count(p) > 1] - if duplicates: - raise exceptions.DuplicatedProjects(duplicates) - - # Get the user's budget - user_budget = await get_budget_by_user_address_and_epoch( - session, payload.user_address, epoch_number - ) - - if user_budget is None: - raise exceptions.BudgetNotFound(payload.user_address, epoch_number) - - # Check if the allocations are within the budget - if sum(a.amount for a in payload.allocations) > user_budget: - raise exceptions.RewardsBudgetExceeded() - - -async def get_next_user_nonce( - # Component dependencies - session: AsyncSession, - # Arguments - user_address: str, -) -> int: - """ - Get the next expected nonce for the user. - It's a simple increment of the last nonce, or 0 if there is no previous nonce. - """ - # Get the last allocation request of the user - last_allocation_request = await get_last_allocation_request_nonce( - session, user_address - ) - - # Calculate the next nonce - if last_allocation_request is None: - return 0 - - # Increment the last nonce - return last_allocation_request + 1 - - -async def verify_signature( - w3: AsyncWeb3, chain_id: int, user_address: str, payload: UserAllocationRequest -) -> None: - eip712_encoded = build_allocations_eip712_structure(chain_id, payload) - encoded_msg = encode_for_signing(EncodingStandardFor.DATA, eip712_encoded) - - # Verify the signature - is_valid = await verify_signed_message( - w3, user_address, encoded_msg, payload.signature - ) - if not is_valid: - raise exceptions.InvalidSignature(user_address, payload.signature) - - -def build_allocations_eip712_structure(chain_id: int, payload: UserAllocationRequest): - message = {} - message["allocations"] = [ - {"proposalAddress": a.project_address, "amount": a.amount} - for a in payload.allocations - ] - message["nonce"] = payload.nonce # type: ignore - return build_allocations_eip712_data(chain_id, message) - - -def build_allocations_eip712_data(chain_id: int, message: dict) -> dict: - # Convert amount value to int - message["allocations"] = [ - {**allocation, "amount": int(allocation["amount"])} - for allocation in message["allocations"] - ] - - allocation_types = { - "EIP712Domain": [ - {"name": "name", "type": "string"}, - {"name": "version", "type": "string"}, - {"name": "chainId", "type": "uint256"}, - ], - "Allocation": [ - {"name": "proposalAddress", "type": "address"}, - {"name": "amount", "type": "uint256"}, - ], - "AllocationPayload": [ - {"name": "allocations", "type": "Allocation[]"}, - {"name": "nonce", "type": "uint256"}, - ], - } - - return { - "types": allocation_types, - "domain": { - "name": "Octant", - "version": "1.0.0", - "chainId": chain_id, - }, - "primaryType": "AllocationPayload", - "message": message, - } diff --git a/backend/v2/allocations/socket.py b/backend/v2/allocations/socket.py index 7ea9718ee9..c95ea2899a 100644 --- a/backend/v2/allocations/socket.py +++ b/backend/v2/allocations/socket.py @@ -1,60 +1,103 @@ import logging +from typing import Tuple import socketio -# from app.extensions import socketio, epochs from eth_utils import to_checksum_address -from v2.allocations.repositories import get_donations_by_project -from v2.allocations.services import allocate -from v2.core.dependencies import db_getter -from v2.epochs.dependencies import epochs_getter, epochs_subgraph_getter -from v2.projects.depdendencies import projects_getter +from v2.allocations.dependencies import ( + SignatureVerifierSettings, + get_allocations, + get_signature_verifier, +) +from v2.epochs.contracts import EpochsContracts from v2.projects.services import ( + EstimatedProjectRewards, + ProjectsAllocationThresholdGetter, +) +from v2.uniqueness_quotients.dependencies import UQScoreSettings, get_uq_score_getter +from v2.allocations.repositories import get_donations_by_project +from v2.allocations.services import Allocations +from v2.core.dependencies import ( + DatabaseSettings, + Web3ProviderSettings, + get_db_session, + get_w3, +) +from v2.epochs.dependencies import ( + EpochsSettings, + EpochsSubgraphSettings, + get_epochs_contracts, + get_epochs_subgraph, +) +from v2.projects.depdendencies import ( + EstimatedProjectMatchedRewardsSettings, + ProjectsAllocationThresholdSettings, + ProjectsSettings, + get_estimated_project_matched_rewards, get_estimated_project_rewards, - get_projects_allocation_threshold, + get_projects_contracts, ) +from v2.projects.depdendencies import get_projects_allocation_threshold_getter + +from .schemas import AllocationRequest, UserAllocationRequest -from .models import AllocationRequest, UserAllocationRequest + +from sqlalchemy.ext.asyncio import AsyncSession class AllocateNamespace(socketio.AsyncNamespace): - def __init__(self, namespace: str): - super().__init__(namespace=namespace) + def create_dependencies_on_connect( + self, + session: AsyncSession, + ) -> Tuple[ + ProjectsAllocationThresholdGetter, EstimatedProjectRewards, EpochsContracts + ]: + """ + Create and return all service dependencies. + TODO: how could we cache this one ? + """ + w3 = get_w3(Web3ProviderSettings()) # type: ignore + projects_contracts = get_projects_contracts(w3, ProjectsSettings()) + threshold_getter = get_projects_allocation_threshold_getter( + session, projects_contracts, ProjectsAllocationThresholdSettings() + ) + epochs_contracts = get_epochs_contracts(w3, EpochsSettings()) + epochs_subgraph = get_epochs_subgraph(EpochsSubgraphSettings()) + estimated_matched_rewards = get_estimated_project_matched_rewards( + session, epochs_subgraph, EstimatedProjectMatchedRewardsSettings() + ) + estimated_project_rewards = get_estimated_project_rewards( + session, + projects_contracts, + estimated_matched_rewards, + ) - # self.w3 = w3_getter() - self.epochs_contracts = epochs_getter() - self.epochs_subgraph = epochs_subgraph_getter() - self.projects_contracts = projects_getter() - self.db_session = db_getter() + return (threshold_getter, estimated_project_rewards, epochs_contracts) - async def on_connect(self, sid: str, environ: dict): + async def handle_on_connect( + self, + epochs_contracts: EpochsContracts, + projects_allocation_threshold_getter: ProjectsAllocationThresholdGetter, + estimated_project_rewards: EstimatedProjectRewards, + ): """ Handle client connection """ logging.debug("Client connected") - # We send the data only in PENDING state - pending_epoch_number = await self.epochs_contracts.get_pending_epoch() + pending_epoch_number = await epochs_contracts.get_pending_epoch() if pending_epoch_number is None: return - async with self.db_session() as session: - threshold = await get_projects_allocation_threshold( - session=session, - projects=self.projects_contracts, - epoch_number=pending_epoch_number, - ) - - await self.emit("threshold", {"threshold": str(threshold)}) - - project_rewards = await get_estimated_project_rewards( - session=session, - projects=self.projects_contracts, - epochs_subgraph=self.epochs_subgraph, - epoch_number=pending_epoch_number, - ) + # Get the allocation threshold and send it to the client + allocation_threshold = await projects_allocation_threshold_getter.get( + epoch_number=pending_epoch_number + ) + await self.emit("threshold", {"threshold": str(allocation_threshold)}) + # Get the estimated project rewards and send them to the client + project_rewards = await estimated_project_rewards.get(pending_epoch_number) rewards = [ { "address": project_address, @@ -66,68 +109,142 @@ async def on_connect(self, sid: str, environ: dict): await self.emit("project_rewards", rewards) + async def on_connect(self, sid: str, environ: dict): + async with get_db_session(DatabaseSettings()) as session: + ( + projects_allocation_threshold_getter, + estimated_project_rewards, + epochs_contracts, + ) = self.create_dependencies_on_connect(session) + + await self.handle_on_connect( + epochs_contracts, + projects_allocation_threshold_getter, + estimated_project_rewards, + ) + async def on_disconnect(self, sid): logging.debug("Client disconnected") - async def on_allocate(self, sid: str, data: dict): + def create_dependencies_on_allocate( + self, + session: AsyncSession, + ) -> Tuple[ + Allocations, + EpochsContracts, + ProjectsAllocationThresholdGetter, + EstimatedProjectRewards, + ]: + """ + Create and return all service dependencies. + """ + + w3 = get_w3(Web3ProviderSettings()) + epochs_contracts = get_epochs_contracts(w3, EpochsSettings()) + projects_contracts = get_projects_contracts(w3, ProjectsSettings()) + epochs_subgraph = get_epochs_subgraph(EpochsSubgraphSettings()) + threshold_getter = get_projects_allocation_threshold_getter( + session, projects_contracts, ProjectsAllocationThresholdSettings() + ) + estimated_matched_rewards = get_estimated_project_matched_rewards( + session, epochs_subgraph, EstimatedProjectMatchedRewardsSettings() + ) + estimated_project_rewards = get_estimated_project_rewards( + session, + projects_contracts, + estimated_matched_rewards, + ) + + signature_verifier = get_signature_verifier( + session, epochs_subgraph, projects_contracts, SignatureVerifierSettings() + ) + + uq_score_getter = get_uq_score_getter(session, UQScoreSettings()) + + allocations = get_allocations( + session, + signature_verifier, + uq_score_getter, + projects_contracts, + estimated_matched_rewards, + ) + + return ( + allocations, + epochs_contracts, + threshold_getter, + estimated_project_rewards, + ) + + async def handle_on_allocate( + self, + session: AsyncSession, + epochs_contracts: EpochsContracts, + allocations: Allocations, + threshold_getter: ProjectsAllocationThresholdGetter, + estimated_project_rewards: EstimatedProjectRewards, + data: dict, + ): """ Handle allocation request """ # We do not handle requests outside of pending epoch state (Allocation Window) - pending_epoch_number = await self.epochs_contracts.get_pending_epoch() + pending_epoch_number = await epochs_contracts.get_pending_epoch() if pending_epoch_number is None: return + pending_epoch_number = 1 request = from_dict(data) - async with self.db_session() as session: - await allocate( - session=session, - projects_contracts=self.projects_contracts, - epochs_subgraph=self.epochs_subgraph, - epoch_number=pending_epoch_number, - request=request, - ) + await allocations.make(pending_epoch_number, request) - threshold = await get_projects_allocation_threshold( - session=session, - projects=self.projects_contracts, - epoch_number=pending_epoch_number, - ) + logging.debug("Allocation request handled") - await self.emit("threshold", {"threshold": str(threshold)}) - project_rewards = await get_estimated_project_rewards( + threshold = await threshold_getter.get(pending_epoch_number) + await self.emit("threshold", {"threshold": str(threshold)}) + + project_rewards = await estimated_project_rewards.get(pending_epoch_number) + rewards = [ + { + "address": project_address, + "allocated": str(project_rewards.amounts_by_project[project_address]), + "matched": str(project_rewards.matched_by_project[project_address]), + } + for project_address in project_rewards.amounts_by_project.keys() + ] + + await self.emit("project_rewards", rewards) + + for project_address in project_rewards.amounts_by_project.keys(): + donations = await get_donations_by_project( session=session, - projects=self.projects_contracts, - epochs_subgraph=self.epochs_subgraph, + project_address=project_address, epoch_number=pending_epoch_number, ) - rewards = [ - { - "address": project_address, - "allocated": str( - project_rewards.amounts_by_project[project_address] - ), - "matched": str(project_rewards.matched_by_project[project_address]), - } - for project_address in project_rewards.amounts_by_project.keys() - ] - - await self.emit("project_rewards", rewards) + await self.emit( + "project_donors", + {"project": project_address, "donors": donations}, + ) - for project_address in project_rewards.amounts_by_project.keys(): - donations = await get_donations_by_project( - session=session, - project_address=project_address, - epoch_number=pending_epoch_number, - ) + async def on_allocate(self, sid: str, data: dict): + async with get_db_session(DatabaseSettings()) as session: + ( + allocations, + epochs_contracts, + threshold_getter, + estimated_project_rewards, + ) = self.create_dependencies_on_allocate(session) - await self.emit( - "project_donors", - {"project": project_address, "donors": donations}, - ) + await self.handle_on_allocate( + session, + epochs_contracts, + allocations, + threshold_getter, + estimated_project_rewards, + data, + ) def from_dict(data: dict) -> UserAllocationRequest: @@ -165,12 +282,10 @@ def from_dict(data: dict) -> UserAllocationRequest: signature = payload.get("signature") is_manually_edited = data.get("isManuallyEdited", False) - # fmt: off return UserAllocationRequest( - user_address = user_address, - allocations = allocations, - nonce = nonce, - signature = signature, - is_manually_edited = is_manually_edited, + user_address=user_address, + allocations=allocations, + nonce=nonce, + signature=signature, + is_manually_edited=is_manually_edited, ) - # fmt: on diff --git a/backend/v2/allocations/validators.py b/backend/v2/allocations/validators.py new file mode 100644 index 0000000000..be17df9d5c --- /dev/null +++ b/backend/v2/allocations/validators.py @@ -0,0 +1,185 @@ +from dataclasses import dataclass +from web3 import AsyncWeb3 +from app import exceptions +from app.modules.common.crypto.signature import EncodingStandardFor, encode_for_signing +from .schemas import UserAllocationRequest +from .repositories import get_last_allocation_request_nonce +from v2.crypto.signatures import verify_signed_message +from v2.epochs.subgraphs import EpochsSubgraph +from v2.projects.contracts import ProjectsContracts + +from sqlalchemy.ext.asyncio import AsyncSession + +from v2.user_patron_mode.repositories import ( + get_budget_by_user_address_and_epoch, + user_is_patron_with_budget, +) + + +@dataclass +class SignatureVerifier: + session: AsyncSession + epochs_subgraph: EpochsSubgraph + projects_contracts: ProjectsContracts + chain_id: int + + async def verify(self, epoch_number: int, request: UserAllocationRequest) -> None: + await verify_logic( + session=self.session, + epoch_subgraph=self.epochs_subgraph, + projects_contracts=self.projects_contracts, + epoch_number=epoch_number, + payload=request, + ) + await verify_signature( + w3=self.projects_contracts.w3, + chain_id=self.chain_id, + user_address=request.user_address, + payload=request, + ) + + +async def verify_logic( + # Component dependencies + session: AsyncSession, + epoch_subgraph: EpochsSubgraph, + projects_contracts: ProjectsContracts, + # Arguments + epoch_number: int, + payload: UserAllocationRequest, +): + # Check if the epoch is in the decision window + # epoch_details = await epoch_subgraph.get_epoch_by_number(epoch_number) + # if epoch_details.state != "PENDING": + # raise exceptions.NotInDecision + + # Check if the allocations are not empty + if not payload.allocations: + raise exceptions.EmptyAllocations() + + # Check if the nonce is as expected + expected_nonce = await get_next_user_nonce(session, payload.user_address) + if payload.nonce != expected_nonce: + raise exceptions.WrongAllocationsNonce(payload.nonce, expected_nonce) + + # Check if the user is not a patron + epoch_details = await epoch_subgraph.get_epoch_by_number(epoch_number) + is_patron = await user_is_patron_with_budget( + session, + payload.user_address, + epoch_number, + epoch_details.finalized_timestamp.datetime(), + ) + if is_patron: + raise exceptions.NotAllowedInPatronMode(payload.user_address) + + # Check if the user is not a project + all_projects = await projects_contracts.get_project_addresses(epoch_number) + if payload.user_address in all_projects: + raise exceptions.ProjectAllocationToSelf() + + project_addresses = [a.project_address for a in payload.allocations] + + # Check if the projects are valid + invalid_projects = set(project_addresses) - set(all_projects) + if invalid_projects: + raise exceptions.InvalidProjects(invalid_projects) + + # Check if there are no duplicates + duplicates = [p for p in project_addresses if project_addresses.count(p) > 1] + if duplicates: + raise exceptions.DuplicatedProjects(duplicates) + + # Get the user's budget + user_budget = await get_budget_by_user_address_and_epoch( + session, payload.user_address, epoch_number + ) + + if user_budget is None: + raise exceptions.BudgetNotFound(payload.user_address, epoch_number) + + # Check if the allocations are within the budget + if sum(a.amount for a in payload.allocations) > user_budget: + raise exceptions.RewardsBudgetExceeded() + + +async def get_next_user_nonce( + # Component dependencies + session: AsyncSession, + # Arguments + user_address: str, +) -> int: + """ + Get the next expected nonce for the user. + It's a simple increment of the last nonce, or 0 if there is no previous nonce. + """ + # Get the last allocation request of the user + last_allocation_request = await get_last_allocation_request_nonce( + session, user_address + ) + + # Calculate the next nonce + if last_allocation_request is None: + return 0 + + # Increment the last nonce + return last_allocation_request + 1 + + +async def verify_signature( + w3: AsyncWeb3, chain_id: int, user_address: str, payload: UserAllocationRequest +) -> None: + eip712_encoded = build_allocations_eip712_structure(chain_id, payload) + encoded_msg = encode_for_signing(EncodingStandardFor.DATA, eip712_encoded) + + # Verify the signature + is_valid = await verify_signed_message( + w3, user_address, encoded_msg, payload.signature + ) + if not is_valid: + raise exceptions.InvalidSignature(user_address, payload.signature) + + +def build_allocations_eip712_structure(chain_id: int, payload: UserAllocationRequest): + message = {} + message["allocations"] = [ + {"proposalAddress": a.project_address, "amount": a.amount} + for a in payload.allocations + ] + message["nonce"] = payload.nonce # type: ignore + return build_allocations_eip712_data(chain_id, message) + + +def build_allocations_eip712_data(chain_id: int, message: dict) -> dict: + # Convert amount value to int + message["allocations"] = [ + {**allocation, "amount": int(allocation["amount"])} + for allocation in message["allocations"] + ] + + allocation_types = { + "EIP712Domain": [ + {"name": "name", "type": "string"}, + {"name": "version", "type": "string"}, + {"name": "chainId", "type": "uint256"}, + ], + "Allocation": [ + {"name": "proposalAddress", "type": "address"}, + {"name": "amount", "type": "uint256"}, + ], + "AllocationPayload": [ + {"name": "allocations", "type": "Allocation[]"}, + {"name": "nonce", "type": "uint256"}, + ], + } + + return { + "types": allocation_types, + "domain": { + "name": "Octant", + "version": "1.0.0", + "chainId": chain_id, + }, + "primaryType": "AllocationPayload", + "message": message, + } diff --git a/backend/v2/core/dependencies.py b/backend/v2/core/dependencies.py index b19f65cdad..84325af330 100644 --- a/backend/v2/core/dependencies.py +++ b/backend/v2/core/dependencies.py @@ -1,9 +1,12 @@ +from contextlib import asynccontextmanager +from typing import Annotated, AsyncGenerator + +from fastapi import Depends from app.infrastructure.database.models import BaseModel from pydantic import Field from pydantic_settings import BaseSettings from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine -from sqlalchemy.orm import sessionmaker from web3 import AsyncHTTPProvider, AsyncWeb3 from web3.middleware import async_geth_poa_middleware @@ -12,18 +15,17 @@ class Web3ProviderSettings(BaseSettings): eth_rpc_provider_url: str -# TODO: Cache? -def get_w3(eth_rpc_provider_url: str) -> AsyncWeb3: - w3 = AsyncWeb3(provider=AsyncHTTPProvider(eth_rpc_provider_url)) +def get_w3( + settings: Annotated[Web3ProviderSettings, Depends(Web3ProviderSettings)] +) -> AsyncWeb3: + w3 = AsyncWeb3(provider=AsyncHTTPProvider(settings.eth_rpc_provider_url)) if async_geth_poa_middleware not in w3.middleware_onion: w3.middleware_onion.inject(async_geth_poa_middleware, layer=0) return w3 -def w3_getter() -> AsyncWeb3: - settings = Web3ProviderSettings() - return get_w3(settings.eth_rpc_provider_url) +Web3 = Annotated[AsyncWeb3, Depends(get_w3)] class DatabaseSettings(BaseSettings): @@ -38,12 +40,33 @@ async def create_tables(): await conn.run_sync(BaseModel.metadata.create_all) -def get_db_engine(database_uri: str) -> async_sessionmaker[AsyncSession]: - engine = create_async_engine(database_uri) +@asynccontextmanager +async def get_db_session( + settings: Annotated[DatabaseSettings, Depends(DatabaseSettings)] +) -> AsyncGenerator[AsyncSession, None]: + # Create an async SQLAlchemy engine + + # logging.error("Creating database engine") - return sessionmaker(bind=engine, class_=AsyncSession) + engine = create_async_engine(settings.sqlalchemy_database_uri) + # Create a sessionmaker with AsyncSession class + async_session = async_sessionmaker( + autocommit=False, autoflush=False, bind=engine, class_=AsyncSession + ) -def db_getter() -> async_sessionmaker[AsyncSession]: - settings = DatabaseSettings() - return get_db_engine(settings.sqlalchemy_database_uri) + # logging.error("Opening session", async_session) + + # Create a new session + async with async_session() as session: + try: + yield session + await session.commit() + except Exception: + await session.rollback() + raise + finally: + await session.close() + + +AsyncDbSession = Annotated[AsyncSession, Depends(get_db_session)] diff --git a/backend/v2/epochs/dependencies.py b/backend/v2/epochs/dependencies.py index 712f2d9da6..4cc8acfc50 100644 --- a/backend/v2/epochs/dependencies.py +++ b/backend/v2/epochs/dependencies.py @@ -1,8 +1,8 @@ -from typing import Callable +from typing import Annotated +from fastapi import Depends from pydantic_settings import BaseSettings -from v2.core.dependencies import w3_getter -from web3 import AsyncWeb3 +from v2.core.dependencies import Web3 from .contracts import EPOCHS_ABI, EpochsContracts from .subgraphs import EpochsSubgraph @@ -12,25 +12,17 @@ class EpochsSettings(BaseSettings): epochs_contract_address: str -# TODO: cache -def get_epochs(w3: AsyncWeb3, epochs_contract_address: str) -> EpochsContracts: - return EpochsContracts(w3, EPOCHS_ABI, epochs_contract_address) # type: ignore - - -def epochs_getter() -> EpochsContracts: - settings = EpochsSettings() # type: ignore - return get_epochs(w3_getter(), settings.epochs_contract_address) - - -getter = Callable[[], EpochsContracts] +def get_epochs_contracts( + w3: Web3, settings: Annotated[EpochsSettings, Depends(EpochsSettings)] +) -> EpochsContracts: + return EpochsContracts(w3, EPOCHS_ABI, settings.epochs_contract_address) class EpochsSubgraphSettings(BaseSettings): subgraph_endpoint: str - # url = config["SUBGRAPH_ENDPOINT"] - -def epochs_subgraph_getter() -> EpochsSubgraph: - settings = EpochsSubgraphSettings() # type: ignore +def get_epochs_subgraph( + settings: Annotated[EpochsSubgraphSettings, Depends(EpochsSubgraphSettings)] +) -> EpochsSubgraph: return EpochsSubgraph(settings.subgraph_endpoint) diff --git a/backend/v2/gitcoin_passport/__init__.py b/backend/v2/gitcoin_passport/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/backend/v2/gitcoin_passport/repositories.py b/backend/v2/gitcoin_passport/repositories.py deleted file mode 100644 index 272a129908..0000000000 --- a/backend/v2/gitcoin_passport/repositories.py +++ /dev/null @@ -1,19 +0,0 @@ -from app.infrastructure.database.models import GPStamps, User -from eth_utils import to_checksum_address -from sqlalchemy import select -from sqlalchemy.ext.asyncio import AsyncSession - - -async def get_gp_stamps_by_address( - session: AsyncSession, user_address: str -) -> GPStamps | None: - """Gets the latest GitcoinPassport Stamps record for a user.""" - - result = await session.execute( - select(GPStamps) - .join(User) - .filter(User.address == to_checksum_address(user_address)) - .order_by(GPStamps.created_at.desc()) - ) - - return result.scalar_one_or_none() diff --git a/backend/v2/gitcoin_passport/services.py b/backend/v2/gitcoin_passport/services.py deleted file mode 100644 index 786c3cbe9f..0000000000 --- a/backend/v2/gitcoin_passport/services.py +++ /dev/null @@ -1,25 +0,0 @@ -from app.constants import GUEST_LIST -from app.modules.user.antisybil.service.initial import _has_guest_stamp_applied_by_gp -from eth_utils import to_checksum_address -from sqlalchemy.ext.asyncio import AsyncSession - -from .repositories import get_gp_stamps_by_address - - -async def get_gitcoin_passport_score(session: AsyncSession, user_address: str) -> float: - """Gets saved Gitcoin Passport score for a user. - Returns None if the score is not saved. - If the user is in the GUEST_LIST, the score will be adjusted to include the guest stamp. - """ - - user_address = to_checksum_address(user_address) - - stamps = await get_gp_stamps_by_address(session, user_address) - - if stamps is None: - return 0.0 - - if user_address in GUEST_LIST and not _has_guest_stamp_applied_by_gp(stamps): - return stamps.score + 21.0 - - return stamps.score diff --git a/backend/v2/project_rewards/capped_quadriatic.py b/backend/v2/project_rewards/capped_quadriatic.py index 4e5d1db12b..eed699bf41 100644 --- a/backend/v2/project_rewards/capped_quadriatic.py +++ b/backend/v2/project_rewards/capped_quadriatic.py @@ -3,7 +3,7 @@ from math import sqrt from typing import Dict, NamedTuple -from v2.allocations.models import AllocationWithUserUQScore +from v2.allocations.schemas import AllocationWithUserUQScore class CappedQuadriaticFunding(NamedTuple): @@ -136,3 +136,48 @@ def cqf_calculate_individual_leverage( leverage = total_difference / new_allocations_amount return float(leverage) + + +def cqf_simulate_leverage( + existing_allocations: list[AllocationWithUserUQScore], + new_allocations: list[AllocationWithUserUQScore], + matched_rewards: int, + project_addresses: list[str], + MR_FUNDING_CAP_PERCENT: Decimal = MR_FUNDING_CAP_PERCENT, +) -> float: + """Simulate the leverage of a user's new allocations in capped quadratic funding.""" + + if not new_allocations: + raise ValueError("No new allocations provided") + + # Get the user address associated with the allocations + user_address = new_allocations[0].user_address + + # Remove allocations made by this user (as they will be removed in a second) + allocations_without_user = [ + a for a in existing_allocations if a.user_address != user_address + ] + + # Calculate capped quadratic funding before and after the user's allocation + before_allocation = capped_quadriatic_funding( + allocations_without_user, + matched_rewards, + project_addresses, + MR_FUNDING_CAP_PERCENT, + ) + after_allocation = capped_quadriatic_funding( + allocations_without_user + new_allocations, + matched_rewards, + project_addresses, + MR_FUNDING_CAP_PERCENT, + ) + + # Calculate leverage + leverage = cqf_calculate_individual_leverage( + new_allocations_amount=sum(a.amount for a in new_allocations), + project_addresses=[a.project_address for a in new_allocations], + before_allocation_matched=before_allocation.matched_by_project, + after_allocation_matched=after_allocation.matched_by_project, + ) + + return leverage diff --git a/backend/v2/projects/depdendencies.py b/backend/v2/projects/depdendencies.py index 49869386d8..6b30e7c7a2 100644 --- a/backend/v2/projects/depdendencies.py +++ b/backend/v2/projects/depdendencies.py @@ -1,9 +1,19 @@ +from decimal import Decimal +from typing import Annotated +from fastapi import Depends from pydantic import Field from pydantic_settings import BaseSettings -from v2.core.dependencies import w3_getter -from web3 import AsyncWeb3 +from v2.epochs.dependencies import get_epochs_subgraph +from v2.epochs.subgraphs import EpochsSubgraph +from v2.core.dependencies import AsyncDbSession, Web3 + from .contracts import PROJECTS_ABI, ProjectsContracts +from .services import ( + EstimatedProjectMatchedRewards, + EstimatedProjectRewards, + ProjectsAllocationThresholdGetter, +) class ProjectsSettings(BaseSettings): @@ -12,11 +22,70 @@ class ProjectsSettings(BaseSettings): ) -# TODO: cache -def get_projects(w3: AsyncWeb3, projects_contract_address: str) -> ProjectsContracts: - return ProjectsContracts(w3, PROJECTS_ABI, projects_contract_address) # type: ignore +def get_projects_contracts( + w3: Web3, settings: Annotated[ProjectsSettings, Depends(ProjectsSettings)] +) -> ProjectsContracts: + return ProjectsContracts(w3, PROJECTS_ABI, settings.projects_contract_address) + + +class ProjectsAllocationThresholdSettings(BaseSettings): + project_count_multiplier: int = Field( + default=1, + description="The multiplier to the number of projects to calculate the allocation threshold.", + ) + + +def get_projects_allocation_threshold_getter( + session: AsyncDbSession, + projects: Annotated[ProjectsContracts, Depends(get_projects_contracts)], + settings: Annotated[ + ProjectsAllocationThresholdSettings, + Depends(ProjectsAllocationThresholdSettings), + ], +) -> ProjectsAllocationThresholdGetter: + return ProjectsAllocationThresholdGetter( + session, projects, settings.project_count_multiplier + ) + + +class EstimatedProjectMatchedRewardsSettings(BaseSettings): + TR_PERCENT: Decimal = Field( + default=Decimal("0.7"), description="The percentage of the TR rewards." + ) + IRE_PERCENT: Decimal = Field( + default=Decimal("0.35"), description="The percentage of the IRE rewards." + ) + MATCHED_REWARDS_PERCENT: Decimal = Field( + default=Decimal("0.35"), description="The percentage of the matched rewards." + ) + + +def get_estimated_project_matched_rewards( + session: AsyncDbSession, + epochs_subgraph: Annotated[EpochsSubgraph, Depends(get_epochs_subgraph)], + settings: Annotated[ + EstimatedProjectMatchedRewardsSettings, + Depends(EstimatedProjectMatchedRewardsSettings), + ], +) -> EstimatedProjectMatchedRewards: + return EstimatedProjectMatchedRewards( + session=session, + epochs_subgraph=epochs_subgraph, + tr_percent=settings.TR_PERCENT, + ire_percent=settings.IRE_PERCENT, + matched_rewards_percent=settings.MATCHED_REWARDS_PERCENT, + ) -def projects_getter() -> ProjectsContracts: - settings = ProjectsSettings() # type: ignore - return get_projects(w3_getter(), settings.projects_contract_address) +def get_estimated_project_rewards( + session: AsyncDbSession, + projects: Annotated[ProjectsContracts, Depends(get_projects_contracts)], + estimated_project_matched_rewards: Annotated[ + EstimatedProjectMatchedRewards, Depends(get_estimated_project_matched_rewards) + ], +) -> EstimatedProjectRewards: + return EstimatedProjectRewards( + session=session, + projects=projects, + estimated_matched_rewards=estimated_project_matched_rewards, + ) diff --git a/backend/v2/projects/services.py b/backend/v2/projects/services.py index c9f2d04633..94e787b9fb 100644 --- a/backend/v2/projects/services.py +++ b/backend/v2/projects/services.py @@ -1,3 +1,4 @@ +from dataclasses import dataclass from decimal import Decimal from sqlalchemy.ext.asyncio import AsyncSession @@ -15,6 +16,24 @@ from v2.user_patron_mode.repositories import get_patrons_rewards +@dataclass +class ProjectsAllocationThresholdGetter: + session: AsyncSession + projects: ProjectsContracts + project_count_multiplier: int = 1 + + async def get( + self, + epoch_number: int, + ) -> int: + return await get_projects_allocation_threshold( + session=self.session, + projects=self.projects, + epoch_number=epoch_number, + project_count_multiplier=self.project_count_multiplier, + ) + + async def get_projects_allocation_threshold( # Dependencies session: AsyncSession, @@ -48,41 +67,35 @@ def _calculate_threshold( ) -async def get_estimated_project_rewards( +@dataclass +class EstimatedProjectMatchedRewards: # Dependencies - session: AsyncSession, - projects: ProjectsContracts, - epochs_subgraph: EpochsSubgraph, - # Arguments - epoch_number: int, -) -> CappedQuadriaticFunding: - # project_settings project is ProjectSettings - all_projects = await projects.get_project_addresses(epoch_number) - - matched_rewards = await get_estimated_project_matched_rewards_pending( - session, - epochs_subgraph=epochs_subgraph, - epoch_number=epoch_number, - ) - allocations = await get_allocations_with_user_uqs(session, epoch_number) - - return capped_quadriatic_funding( - project_addresses=all_projects, - allocations=allocations, - matched_rewards=matched_rewards, - ) - - -TR_PERCENT = Decimal("0.7") -IRE_PERCENT = Decimal("0.35") -MATCHED_REWARDS_PERCENT = Decimal("0.35") + session: AsyncSession + epochs_subgraph: EpochsSubgraph + # Settings + tr_percent: Decimal + ire_percent: Decimal + matched_rewards_percent: Decimal + + async def get(self, epoch_number: int) -> int: + return await get_estimated_project_matched_rewards_pending( + session=self.session, + epochs_subgraph=self.epochs_subgraph, + tr_percent=self.tr_percent, + ire_percent=self.ire_percent, + matched_rewards_percent=self.matched_rewards_percent, + epoch_number=epoch_number, + ) async def get_estimated_project_matched_rewards_pending( # Dependencies session: AsyncSession, epochs_subgraph: EpochsSubgraph, - # projects: Projects, + # Settings + tr_percent: Decimal, + ire_percent: Decimal, + matched_rewards_percent: Decimal, # Arguments epoch_number: int, ) -> int: @@ -99,16 +112,14 @@ async def get_estimated_project_matched_rewards_pending( session, epoch_details.finalized_timestamp.datetime(), epoch_number ) - # fmt: off return _calculate_percentage_matched_rewards( - locked_ratio = Decimal(pending_snapshot.locked_ratio), - tr_percent = TR_PERCENT, - ire_percent = IRE_PERCENT, - staking_proceeds = int(pending_snapshot.eth_proceeds), - patrons_rewards = patrons_rewards, - matched_rewards_percent = MATCHED_REWARDS_PERCENT, + locked_ratio=Decimal(pending_snapshot.locked_ratio), + tr_percent=tr_percent, + ire_percent=ire_percent, + staking_proceeds=int(pending_snapshot.eth_proceeds), + patrons_rewards=patrons_rewards, + matched_rewards_percent=matched_rewards_percent, ) - # fmt: on def _calculate_percentage_matched_rewards( @@ -124,6 +135,44 @@ def _calculate_percentage_matched_rewards( if locked_ratio < ire_percent: return int(matched_rewards_percent * staking_proceeds + patrons_rewards) - elif ire_percent <= locked_ratio < tr_percent: + + if ire_percent <= locked_ratio < tr_percent: return int((tr_percent - locked_ratio) * staking_proceeds + patrons_rewards) + return patrons_rewards + + +@dataclass +class EstimatedProjectRewards: + # Dependencies + session: AsyncSession + projects: ProjectsContracts + estimated_matched_rewards: EstimatedProjectMatchedRewards + + async def get(self, epoch_number: int) -> CappedQuadriaticFunding: + return await estimate_project_rewards( + session=self.session, + projects=self.projects, + estimated_matched_rewards=self.estimated_matched_rewards, + epoch_number=epoch_number, + ) + + +async def estimate_project_rewards( + # Dependencies + session: AsyncSession, + projects: ProjectsContracts, + estimated_matched_rewards: EstimatedProjectMatchedRewards, + # Arguments + epoch_number: int, +) -> CappedQuadriaticFunding: + # project_settings project is ProjectSettings + all_projects = await projects.get_project_addresses(epoch_number) + matched_rewards = await estimated_matched_rewards.get(epoch_number) + allocations = await get_allocations_with_user_uqs(session, epoch_number) + + return capped_quadriatic_funding( + project_addresses=all_projects, + allocations=allocations, + matched_rewards=matched_rewards, + ) diff --git a/backend/v2/uniqueness_quotients/dependencies.py b/backend/v2/uniqueness_quotients/dependencies.py new file mode 100644 index 0000000000..46739bbb26 --- /dev/null +++ b/backend/v2/uniqueness_quotients/dependencies.py @@ -0,0 +1,36 @@ +from decimal import Decimal +from typing import Annotated +from fastapi import Depends + +from pydantic import Field +from pydantic_settings import BaseSettings + +from v2.core.dependencies import AsyncDbSession +from .services import UQScoreGetter + + +class UQScoreSettings(BaseSettings): + uq_score_threshold: float = Field( + default=21.0, + description="The Gitcoin Passport score threshold above which the UQ score is set to the maximum UQ score.", + ) + low_uq_score: Decimal = Field( + default=Decimal("0.2"), + description="The UQ score to be returned if the Gitcoin Passport score is below the threshold.", + ) + max_uq_score: Decimal = Field( + default=Decimal("1.0"), + description="The UQ score to be returned if the Gitcoin Passport score is above the threshold.", + ) + + +def get_uq_score_getter( + session: AsyncDbSession, + settings: Annotated[UQScoreSettings, Depends(UQScoreSettings)], +) -> UQScoreGetter: + return UQScoreGetter( + session=session, + uq_score_threshold=settings.uq_score_threshold, + max_uq_score=settings.max_uq_score, + low_uq_score=settings.low_uq_score, + ) diff --git a/backend/v2/uniqueness_quotients/repositories.py b/backend/v2/uniqueness_quotients/repositories.py index 14e485fc28..c9d3376ed9 100644 --- a/backend/v2/uniqueness_quotients/repositories.py +++ b/backend/v2/uniqueness_quotients/repositories.py @@ -1,7 +1,7 @@ from decimal import Decimal from typing import Optional -from app.infrastructure.database.models import UniquenessQuotient, User +from app.infrastructure.database.models import GPStamps, UniquenessQuotient, User from eth_utils import to_checksum_address from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession @@ -43,3 +43,18 @@ async def save_uq_score_for_user_address( ) session.add(uq_score) + + +async def get_gp_stamps_by_address( + session: AsyncSession, user_address: str +) -> GPStamps | None: + """Gets the latest GitcoinPassport Stamps record for a user.""" + + result = await session.execute( + select(GPStamps) + .join(User) + .filter(User.address == to_checksum_address(user_address)) + .order_by(GPStamps.created_at.desc()) + ) + + return result.scalar_one_or_none() diff --git a/backend/v2/uniqueness_quotients/services.py b/backend/v2/uniqueness_quotients/services.py index aaafd05228..56308d71fa 100644 --- a/backend/v2/uniqueness_quotients/services.py +++ b/backend/v2/uniqueness_quotients/services.py @@ -1,19 +1,42 @@ +from dataclasses import dataclass from decimal import Decimal from sqlalchemy.ext.asyncio import AsyncSession -from v2.gitcoin_passport.services import get_gitcoin_passport_score -from .repositories import get_uq_score_by_user_address, save_uq_score_for_user_address +from app.constants import GUEST_LIST +from app.modules.user.antisybil.service.initial import _has_guest_stamp_applied_by_gp +from eth_utils import to_checksum_address -LOW_UQ_SCORE = Decimal("0.2") -MAX_UQ_SCORE = Decimal("1.0") +from .repositories import ( + get_uq_score_by_user_address, + save_uq_score_for_user_address, + get_gp_stamps_by_address, +) + + +@dataclass +class UQScoreGetter: + session: AsyncSession + uq_score_threshold: float + max_uq_score: Decimal + low_uq_score: Decimal + + async def get_or_calculate(self, epoch_number: int, user_address: str) -> Decimal: + return await get_or_calculate_uq_score( + session=self.session, + user_address=user_address, + epoch_number=epoch_number, + uq_score_threshold=self.uq_score_threshold, + max_uq_score=self.max_uq_score, + low_uq_score=self.low_uq_score, + ) def calculate_uq_score( gp_score: float, uq_score_threshold: float, - max_uq_score: Decimal = MAX_UQ_SCORE, - low_uq_score: Decimal = LOW_UQ_SCORE, + max_uq_score: Decimal, + low_uq_score: Decimal, ) -> Decimal: """Calculate UQ score (multiplier) based on the GP score and the UQ score threshold. If the GP score is greater than or equal to the UQ score threshold, the UQ score is set to the maximum UQ score. @@ -22,6 +45,8 @@ def calculate_uq_score( Args: gp_score (float): The GitcoinPassport antisybil score. uq_score_threshold (int): Anything below this threshold will be considered low UQ score, and anything above will be considered maximum UQ score. + max_uq_score (Decimal): Score to be returned if the GP score is greater than or equal to the UQ score threshold. + low_uq_score (Decimal): Score to be returned if the GP score is less than the UQ score threshold. """ if gp_score >= uq_score_threshold: @@ -35,8 +60,8 @@ async def get_or_calculate_uq_score( user_address: str, epoch_number: int, uq_score_threshold: float, - max_uq_score: Decimal = MAX_UQ_SCORE, - low_uq_score: Decimal = LOW_UQ_SCORE, + max_uq_score: Decimal, + low_uq_score: Decimal, ) -> Decimal: """Get or calculate the UQ score for a user in a given epoch. If the UQ score is already calculated, it will be returned. @@ -58,3 +83,22 @@ async def get_or_calculate_uq_score( await save_uq_score_for_user_address(session, user_address, epoch_number, uq_score) return uq_score + + +async def get_gitcoin_passport_score(session: AsyncSession, user_address: str) -> float: + """Gets saved Gitcoin Passport score for a user. + Returns None if the score is not saved. + If the user is in the GUEST_LIST, the score will be adjusted to include the guest stamp. + """ + + user_address = to_checksum_address(user_address) + + stamps = await get_gp_stamps_by_address(session, user_address) + + if stamps is None: + return 0.0 + + if user_address in GUEST_LIST and not _has_guest_stamp_applied_by_gp(stamps): + return stamps.score + 21.0 + + return stamps.score diff --git a/backend/v2/user_patron_mode/repositories.py b/backend/v2/user_patron_mode/repositories.py index 38c16c174f..7ac9672b22 100644 --- a/backend/v2/user_patron_mode/repositories.py +++ b/backend/v2/user_patron_mode/repositories.py @@ -31,7 +31,7 @@ async def get_all_patrons_at_timestamp( result = await session.execute( select(alias.user_address) - .filter(alias.patron_mode_enabled == True) + .filter(alias.patron_mode_enabled) .group_by(alias.user_address) ) From b44f1aac0fa1564c6373ed46319b88a9e6edda2f Mon Sep 17 00:00:00 2001 From: adam-gf Date: Tue, 1 Oct 2024 11:32:05 +0200 Subject: [PATCH 07/31] wip - investigating why fastapi is so slow --- .../infrastructure/database/allocations.py | 9 +- .../database/pending_epoch_snapshot.py | 6 +- backend/app/settings.py | 3 +- backend/poetry.lock | 59 +++++++++++- backend/pyproject.toml | 1 + backend/startup.py | 3 +- backend/v2/allocations/dependencies.py | 27 ++++-- backend/v2/allocations/repositories.py | 9 ++ backend/v2/allocations/router.py | 39 ++++++++ backend/v2/allocations/schemas.py | 27 +++++- backend/v2/allocations/services.py | 30 ++++++ backend/v2/allocations/socket.py | 15 +++ backend/v2/allocations/validators.py | 30 ++++++ backend/v2/core/dependencies.py | 43 +++++++-- backend/v2/deposits/__init__.py | 0 backend/v2/deposits/contracts.py | 43 +++++++++ backend/v2/deposits/dependencies.py | 25 +++++ backend/v2/epochs/dependencies.py | 32 +++++-- backend/v2/epochs/subgraphs.py | 5 + backend/v2/glms/__init__.py | 0 backend/v2/glms/contracts.py | 93 +++++++++++++++++++ backend/v2/glms/dependencies.py | 25 +++++ backend/v2/main.py | 24 ++++- backend/v2/projects/depdendencies.py | 43 +++++++-- .../v2/uniqueness_quotients/dependencies.py | 17 +++- .../v2/uniqueness_quotients/repositories.py | 1 + backend/v2/user_patron_mode/repositories.py | 7 ++ 27 files changed, 570 insertions(+), 46 deletions(-) create mode 100644 backend/v2/allocations/router.py create mode 100644 backend/v2/deposits/__init__.py create mode 100644 backend/v2/deposits/contracts.py create mode 100644 backend/v2/deposits/dependencies.py create mode 100644 backend/v2/glms/__init__.py create mode 100644 backend/v2/glms/contracts.py create mode 100644 backend/v2/glms/dependencies.py diff --git a/backend/app/infrastructure/database/allocations.py b/backend/app/infrastructure/database/allocations.py index 9dec498d88..6ce55c621e 100644 --- a/backend/app/infrastructure/database/allocations.py +++ b/backend/app/infrastructure/database/allocations.py @@ -259,13 +259,20 @@ def get_allocation_request_by_user_and_epoch( def get_user_last_allocation_request(user_address: str) -> AllocationRequest | None: - return ( + import time + + start = time.time() + + result = ( AllocationRequest.query.join(User, User.id == AllocationRequest.user_id) .filter(User.address == user_address) .order_by(AllocationRequest.nonce.desc()) .first() ) + print("?????????get_user_last_allocation_request", time.time() - start) + return result + def get_user_allocation_epoch_count(user_address: str) -> int: epoch_count = ( diff --git a/backend/app/infrastructure/database/pending_epoch_snapshot.py b/backend/app/infrastructure/database/pending_epoch_snapshot.py index 60e8c18791..8733bb5f92 100644 --- a/backend/app/infrastructure/database/pending_epoch_snapshot.py +++ b/backend/app/infrastructure/database/pending_epoch_snapshot.py @@ -21,8 +21,10 @@ def get_by_epoch_num(epoch) -> PendingEpochSnapshot: def get_by_epoch(epoch: int) -> Optional[PendingEpochSnapshot]: - return PendingEpochSnapshot.query.filter_by(epoch=epoch).first() - + sp = PendingEpochSnapshot.query.filter_by(epoch=epoch).first() + print("Engine url", db.engine.url) + print(">sp", sp) + return sp def get_last_snapshot() -> PendingEpochSnapshot: snapshot = ( diff --git a/backend/app/settings.py b/backend/app/settings.py index d60eacf8df..5c9e73d87e 100644 --- a/backend/app/settings.py +++ b/backend/app/settings.py @@ -109,7 +109,8 @@ class DevConfig(Config): CHAIN_ID = int(os.getenv("CHAIN_ID", 1337)) # Put the db file in project root DB_PATH = os.path.join(Config.PROJECT_ROOT, DB_NAME) - SQLALCHEMY_DATABASE_URI = f"sqlite:///{DB_PATH}" + # SQLALCHEMY_DATABASE_URI = f"sqlite:///{DB_PATH}" + SQLALCHEMY_DATABASE_URI = os.getenv("DB_URI") SUBGRAPH_RETRY_TIMEOUT_SEC = 2 X_REAL_IP_REQUIRED = parse_bool(os.getenv("X_REAL_IP_REQUIRED", "false")) CACHE_TYPE = "SimpleCache" diff --git a/backend/poetry.lock b/backend/poetry.lock index 3d379525a8..83bf36a062 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -230,6 +230,63 @@ files = [ {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, ] +[[package]] +name = "asyncpg" +version = "0.29.0" +description = "An asyncio PostgreSQL driver" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "asyncpg-0.29.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72fd0ef9f00aeed37179c62282a3d14262dbbafb74ec0ba16e1b1864d8a12169"}, + {file = "asyncpg-0.29.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52e8f8f9ff6e21f9b39ca9f8e3e33a5fcdceaf5667a8c5c32bee158e313be385"}, + {file = "asyncpg-0.29.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e6823a7012be8b68301342ba33b4740e5a166f6bbda0aee32bc01638491a22"}, + {file = "asyncpg-0.29.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:746e80d83ad5d5464cfbf94315eb6744222ab00aa4e522b704322fb182b83610"}, + {file = "asyncpg-0.29.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ff8e8109cd6a46ff852a5e6bab8b0a047d7ea42fcb7ca5ae6eaae97d8eacf397"}, + {file = "asyncpg-0.29.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:97eb024685b1d7e72b1972863de527c11ff87960837919dac6e34754768098eb"}, + {file = "asyncpg-0.29.0-cp310-cp310-win32.whl", hash = "sha256:5bbb7f2cafd8d1fa3e65431833de2642f4b2124be61a449fa064e1a08d27e449"}, + {file = "asyncpg-0.29.0-cp310-cp310-win_amd64.whl", hash = "sha256:76c3ac6530904838a4b650b2880f8e7af938ee049e769ec2fba7cd66469d7772"}, + {file = "asyncpg-0.29.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4900ee08e85af01adb207519bb4e14b1cae8fd21e0ccf80fac6aa60b6da37b4"}, + {file = "asyncpg-0.29.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a65c1dcd820d5aea7c7d82a3fdcb70e096f8f70d1a8bf93eb458e49bfad036ac"}, + {file = "asyncpg-0.29.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b52e46f165585fd6af4863f268566668407c76b2c72d366bb8b522fa66f1870"}, + {file = "asyncpg-0.29.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc600ee8ef3dd38b8d67421359779f8ccec30b463e7aec7ed481c8346decf99f"}, + {file = "asyncpg-0.29.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:039a261af4f38f949095e1e780bae84a25ffe3e370175193174eb08d3cecab23"}, + {file = "asyncpg-0.29.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6feaf2d8f9138d190e5ec4390c1715c3e87b37715cd69b2c3dfca616134efd2b"}, + {file = "asyncpg-0.29.0-cp311-cp311-win32.whl", hash = "sha256:1e186427c88225ef730555f5fdda6c1812daa884064bfe6bc462fd3a71c4b675"}, + {file = "asyncpg-0.29.0-cp311-cp311-win_amd64.whl", hash = "sha256:cfe73ffae35f518cfd6e4e5f5abb2618ceb5ef02a2365ce64f132601000587d3"}, + {file = "asyncpg-0.29.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6011b0dc29886ab424dc042bf9eeb507670a3b40aece3439944006aafe023178"}, + {file = "asyncpg-0.29.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b544ffc66b039d5ec5a7454667f855f7fec08e0dfaf5a5490dfafbb7abbd2cfb"}, + {file = "asyncpg-0.29.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d84156d5fb530b06c493f9e7635aa18f518fa1d1395ef240d211cb563c4e2364"}, + {file = "asyncpg-0.29.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54858bc25b49d1114178d65a88e48ad50cb2b6f3e475caa0f0c092d5f527c106"}, + {file = "asyncpg-0.29.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bde17a1861cf10d5afce80a36fca736a86769ab3579532c03e45f83ba8a09c59"}, + {file = "asyncpg-0.29.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:37a2ec1b9ff88d8773d3eb6d3784dc7e3fee7756a5317b67f923172a4748a175"}, + {file = "asyncpg-0.29.0-cp312-cp312-win32.whl", hash = "sha256:bb1292d9fad43112a85e98ecdc2e051602bce97c199920586be83254d9dafc02"}, + {file = "asyncpg-0.29.0-cp312-cp312-win_amd64.whl", hash = "sha256:2245be8ec5047a605e0b454c894e54bf2ec787ac04b1cb7e0d3c67aa1e32f0fe"}, + {file = "asyncpg-0.29.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0009a300cae37b8c525e5b449233d59cd9868fd35431abc470a3e364d2b85cb9"}, + {file = "asyncpg-0.29.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cad1324dbb33f3ca0cd2074d5114354ed3be2b94d48ddfd88af75ebda7c43cc"}, + {file = "asyncpg-0.29.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:012d01df61e009015944ac7543d6ee30c2dc1eb2f6b10b62a3f598beb6531548"}, + {file = "asyncpg-0.29.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000c996c53c04770798053e1730d34e30cb645ad95a63265aec82da9093d88e7"}, + {file = "asyncpg-0.29.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e0bfe9c4d3429706cf70d3249089de14d6a01192d617e9093a8e941fea8ee775"}, + {file = "asyncpg-0.29.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:642a36eb41b6313ffa328e8a5c5c2b5bea6ee138546c9c3cf1bffaad8ee36dd9"}, + {file = "asyncpg-0.29.0-cp38-cp38-win32.whl", hash = "sha256:a921372bbd0aa3a5822dd0409da61b4cd50df89ae85150149f8c119f23e8c408"}, + {file = "asyncpg-0.29.0-cp38-cp38-win_amd64.whl", hash = "sha256:103aad2b92d1506700cbf51cd8bb5441e7e72e87a7b3a2ca4e32c840f051a6a3"}, + {file = "asyncpg-0.29.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5340dd515d7e52f4c11ada32171d87c05570479dc01dc66d03ee3e150fb695da"}, + {file = "asyncpg-0.29.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e17b52c6cf83e170d3d865571ba574577ab8e533e7361a2b8ce6157d02c665d3"}, + {file = "asyncpg-0.29.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f100d23f273555f4b19b74a96840aa27b85e99ba4b1f18d4ebff0734e78dc090"}, + {file = "asyncpg-0.29.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48e7c58b516057126b363cec8ca02b804644fd012ef8e6c7e23386b7d5e6ce83"}, + {file = "asyncpg-0.29.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f9ea3f24eb4c49a615573724d88a48bd1b7821c890c2effe04f05382ed9e8810"}, + {file = "asyncpg-0.29.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8d36c7f14a22ec9e928f15f92a48207546ffe68bc412f3be718eedccdf10dc5c"}, + {file = "asyncpg-0.29.0-cp39-cp39-win32.whl", hash = "sha256:797ab8123ebaed304a1fad4d7576d5376c3a006a4100380fb9d517f0b59c1ab2"}, + {file = "asyncpg-0.29.0-cp39-cp39-win_amd64.whl", hash = "sha256:cce08a178858b426ae1aa8409b5cc171def45d4293626e7aa6510696d46decd8"}, + {file = "asyncpg-0.29.0.tar.gz", hash = "sha256:d1c49e1f44fffafd9a55e1a9b101590859d881d639ea2922516f5d9c512d354e"}, +] + +[package.dependencies] +async-timeout = {version = ">=4.0.3", markers = "python_version < \"3.12.0\""} + +[package.extras] +docs = ["Sphinx (>=5.3.0,<5.4.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["flake8 (>=6.1,<7.0)", "uvloop (>=0.15.3)"] + [[package]] name = "attrs" version = "23.2.0" @@ -3702,4 +3759,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "16f114d4cb7ff5c5e93c64c3feb4a4a27ca7e7998c556380dbec82b6d2668d77" +content-hash = "dbf09ab41544d32f718ee8267f411059f064357d9cd3cf0a263d1ecababb8f24" diff --git a/backend/pyproject.toml b/backend/pyproject.toml index b57281164e..f1aed379ed 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -34,6 +34,7 @@ mypy = "^1.11.2" isort = "^5.13.2" pydantic-settings = "^2.4.0" uvicorn = "^0.30.6" +asyncpg = "^0.29.0" [tool.poetry.group.dev.dependencies] pytest = "^7.3.1" diff --git a/backend/startup.py b/backend/startup.py index dc3596d3e8..26b6c7185a 100644 --- a/backend/startup.py +++ b/backend/startup.py @@ -81,4 +81,5 @@ async def dispatch(self, request: Request, call_next): if __name__ == "__main__": import uvicorn - uvicorn.run(fastapi_app, host="0.0.0.0", port=5000) + # uvicorn.run(fastapi_app, host="0.0.0.0", port=5000) + uvicorn.run(fastapi_app, port=5000) diff --git a/backend/v2/allocations/dependencies.py b/backend/v2/allocations/dependencies.py index 9a58f3a92d..f1533c366c 100644 --- a/backend/v2/allocations/dependencies.py +++ b/backend/v2/allocations/dependencies.py @@ -13,33 +13,42 @@ ) from v2.uniqueness_quotients.dependencies import get_uq_score_getter from v2.uniqueness_quotients.services import UQScoreGetter -from v2.core.dependencies import AsyncDbSession +from v2.core.dependencies import GetSession, OctantSettings from .services import Allocations from .validators import SignatureVerifier -class SignatureVerifierSettings(BaseSettings): +class SignatureVerifierSettings(OctantSettings): + chain_id: int = Field( default=11155111, description="The chain id to use for the signature verification.", ) +def get_signature_verifier_settings() -> SignatureVerifierSettings: + return SignatureVerifierSettings() + def get_signature_verifier( - session: AsyncDbSession, + session: GetSession, epochs_subgraph: Annotated[EpochsSubgraph, Depends(get_epochs_subgraph)], projects_contracts: Annotated[ProjectsContracts, Depends(get_projects_contracts)], - settings: Annotated[SignatureVerifierSettings, Depends(SignatureVerifierSettings)], + settings: Annotated[SignatureVerifierSettings, Depends(get_signature_verifier_settings)], ) -> SignatureVerifier: return SignatureVerifier( session, epochs_subgraph, projects_contracts, settings.chain_id ) +GetSignatureVerifier = Annotated[ + SignatureVerifier, + Depends(get_signature_verifier) +] + def get_allocations( - session: AsyncDbSession, - signature_verifier: SignatureVerifier, + session: GetSession, + signature_verifier: GetSignatureVerifier, uq_score_getter: Annotated[UQScoreGetter, Depends(get_uq_score_getter)], projects: Annotated[ProjectsContracts, Depends(get_projects_contracts)], estimated_project_matched_rewards: Annotated[ @@ -53,3 +62,9 @@ def get_allocations( projects, estimated_project_matched_rewards, ) + + +GetAllocations = Annotated[ + Allocations, + Depends(get_allocations) +] \ No newline at end of file diff --git a/backend/v2/allocations/repositories.py b/backend/v2/allocations/repositories.py index 76e8374245..133b023c9e 100644 --- a/backend/v2/allocations/repositories.py +++ b/backend/v2/allocations/repositories.py @@ -132,6 +132,15 @@ async def get_last_allocation_request_nonce( ) -> int | None: """Get the last nonce of the allocation requests for a user.""" + result = await session.execute( + select(AllocationRequestDB.nonce). + join(User, AllocationRequestDB.user_id == User.id). + filter(User.address == user_address). + order_by(AllocationRequestDB.nonce.desc()). + limit(1) + ) + return result.scalar() + user = await get_user_by_address(session, user_address) if user is None: return None diff --git a/backend/v2/allocations/router.py b/backend/v2/allocations/router.py new file mode 100644 index 0000000000..323b53fe2c --- /dev/null +++ b/backend/v2/allocations/router.py @@ -0,0 +1,39 @@ +from fastapi import APIRouter + +from v2.epochs.dependencies import GetEpochsContracts + +from .dependencies import GetAllocations +from .schemas import UserAllocationRequest, UserAllocationRequestV1 + +api = APIRouter(prefix="/allocations", tags=["allocations"]) + + +@api.post("/allocate", status_code=201) +async def allocate( + # Component dependencies + epochs_contracts: GetEpochsContracts, + allocations: GetAllocations, + # Arguments + allocation_request: UserAllocationRequestV1, +) -> None: + """ + Make an allocation for the user. + """ + + request = UserAllocationRequest( + user_address=allocation_request.user_address, + allocations=allocation_request.payload.allocations, + nonce=allocation_request.payload.nonce, + signature=allocation_request.signature, + is_manually_edited=allocation_request.is_manually_edited, + ) + + print("allocation_request", allocation_request) + current_epoch = await epochs_contracts.get_current_epoch() + print("current_epoch", current_epoch) + # get pending epoch + pending_epoch = await epochs_contracts.get_pending_epoch() + print("pending_epoch", pending_epoch) + + await allocations.make(pending_epoch, request) + diff --git a/backend/v2/allocations/schemas.py b/backend/v2/allocations/schemas.py index 8f1a4a9fe6..9ed60b366f 100644 --- a/backend/v2/allocations/schemas.py +++ b/backend/v2/allocations/schemas.py @@ -1,7 +1,7 @@ from decimal import Decimal -from pydantic import BaseModel, ConfigDict - +from pydantic import BaseModel, ConfigDict, Field +from pydantic.alias_generators import to_camel class AllocationWithUserUQScore(BaseModel): model_config = ConfigDict(frozen=True) @@ -13,12 +13,31 @@ class AllocationWithUserUQScore(BaseModel): class AllocationRequest(BaseModel): - model_config = ConfigDict(frozen=True) + model_config = ConfigDict(frozen=True, alias_generator=to_camel) - project_address: str + project_address: str = Field(..., alias='proposalAddress') amount: int + # first_name: str = Field(..., alias='firstName') + # last_name: str = Field(..., alias='lastName') + # age: int = Field(..., alias='age') + +class UserAllocationRequestPayloadV1(BaseModel): + model_config = ConfigDict(frozen=True, alias_generator=to_camel) + + allocations: list[AllocationRequest] + nonce: int + +class UserAllocationRequestV1(BaseModel): + model_config = ConfigDict(frozen=True, alias_generator=to_camel) + + user_address: str + payload: UserAllocationRequestPayloadV1 + signature: str + is_manually_edited: bool + + class UserAllocationRequest(BaseModel): model_config = ConfigDict(frozen=True) diff --git a/backend/v2/allocations/services.py b/backend/v2/allocations/services.py index 686819c667..da36f7c608 100644 --- a/backend/v2/allocations/services.py +++ b/backend/v2/allocations/services.py @@ -1,4 +1,5 @@ from dataclasses import dataclass +import time from app import exceptions from sqlalchemy.ext.asyncio import AsyncSession @@ -59,18 +60,31 @@ async def allocate( epoch_number: int, request: UserAllocationRequest, ) -> str: + + import time + + allocation_time = time.time() # Verify the signature await signature_verifier.verify( epoch_number=epoch_number, request=request, ) + print("signature verified in", time.time() - allocation_time) + + + uq_score_time = time.time() + # Get or calculate UQ score of the user user_uq_score = await uq_score_getter.get_or_calculate( epoch_number=epoch_number, user_address=request.user_address, ) + print("uq score retrieved in", time.time() - uq_score_time) + + + new_allocations_time = time.time() # Calculate leverage by simulating the allocation new_allocations = [ AllocationWithUserUQScore( @@ -90,6 +104,16 @@ async def allocate( new_allocations=new_allocations, ) + print("new allocations calculated in", time.time() - new_allocations_time) + + print("leverage", leverage) + print("request.user_address", request.user_address) + + # print("I'm here") + # return "I'm here" + + soft_delete_time = time.time() + await soft_delete_user_allocations_by_epoch( session, user_address=request.user_address, @@ -114,6 +138,8 @@ async def allocate( # Commit the transaction await session.commit() + print("soft delete and store allocation request in", time.time() - soft_delete_time) + return request.user_address @@ -130,6 +156,8 @@ async def simulate_leverage( Calculate leverage of the allocation made by the user. """ + start_time = time.time() + all_projects = await projects.get_project_addresses(epoch_number) matched_rewards = await estimated_project_matched_rewards.get(epoch_number) @@ -137,6 +165,8 @@ async def simulate_leverage( # Get all allocations before user's allocation existing_allocations = await get_allocations_with_user_uqs(session, epoch_number) + print("existing allocations retrieved in", time.time() - start_time) + return cqf_simulate_leverage( existing_allocations=existing_allocations, new_allocations=new_allocations, diff --git a/backend/v2/allocations/socket.py b/backend/v2/allocations/socket.py index c95ea2899a..4a1804992e 100644 --- a/backend/v2/allocations/socket.py +++ b/backend/v2/allocations/socket.py @@ -76,6 +76,7 @@ def create_dependencies_on_connect( async def handle_on_connect( self, + session: AsyncSession, epochs_contracts: EpochsContracts, projects_allocation_threshold_getter: ProjectsAllocationThresholdGetter, estimated_project_rewards: EstimatedProjectRewards, @@ -109,6 +110,19 @@ async def handle_on_connect( await self.emit("project_rewards", rewards) + for project_address in project_rewards.amounts_by_project.keys(): + donations = await get_donations_by_project( + session=session, + project_address=project_address, + epoch_number=pending_epoch_number, + ) + + await self.emit( + "project_donors", + {"project": project_address, "donors": donations}, + ) + + async def on_connect(self, sid: str, environ: dict): async with get_db_session(DatabaseSettings()) as session: ( @@ -118,6 +132,7 @@ async def on_connect(self, sid: str, environ: dict): ) = self.create_dependencies_on_connect(session) await self.handle_on_connect( + session, epochs_contracts, projects_allocation_threshold_getter, estimated_project_rewards, diff --git a/backend/v2/allocations/validators.py b/backend/v2/allocations/validators.py index be17df9d5c..131aace12f 100644 --- a/backend/v2/allocations/validators.py +++ b/backend/v2/allocations/validators.py @@ -24,6 +24,9 @@ class SignatureVerifier: chain_id: int async def verify(self, epoch_number: int, request: UserAllocationRequest) -> None: + import time + start = time.time() + await verify_logic( session=self.session, epoch_subgraph=self.epochs_subgraph, @@ -31,6 +34,9 @@ async def verify(self, epoch_number: int, request: UserAllocationRequest) -> Non epoch_number=epoch_number, payload=request, ) + + print("verify_logic", time.time() - start) + await verify_signature( w3=self.projects_contracts.w3, chain_id=self.chain_id, @@ -38,6 +44,8 @@ async def verify(self, epoch_number: int, request: UserAllocationRequest) -> Non payload=request, ) + print("verify_signature", time.time() - start) + async def verify_logic( # Component dependencies @@ -53,15 +61,21 @@ async def verify_logic( # if epoch_details.state != "PENDING": # raise exceptions.NotInDecision + import time + # Check if the allocations are not empty if not payload.allocations: raise exceptions.EmptyAllocations() + start = time.time() # Check if the nonce is as expected expected_nonce = await get_next_user_nonce(session, payload.user_address) if payload.nonce != expected_nonce: raise exceptions.WrongAllocationsNonce(payload.nonce, expected_nonce) + print("get_next_user_nonce", time.time() - start) + + start = time.time() # Check if the user is not a patron epoch_details = await epoch_subgraph.get_epoch_by_number(epoch_number) is_patron = await user_is_patron_with_budget( @@ -73,6 +87,11 @@ async def verify_logic( if is_patron: raise exceptions.NotAllowedInPatronMode(payload.user_address) + print("user_is_patron_with_budget", time.time() - start) + + + start = time.time() + # Check if the user is not a project all_projects = await projects_contracts.get_project_addresses(epoch_number) if payload.user_address in all_projects: @@ -80,6 +99,10 @@ async def verify_logic( project_addresses = [a.project_address for a in payload.allocations] + print("get_project_addresses", time.time() - start) + + start = time.time() + # Check if the projects are valid invalid_projects = set(project_addresses) - set(all_projects) if invalid_projects: @@ -90,6 +113,10 @@ async def verify_logic( if duplicates: raise exceptions.DuplicatedProjects(duplicates) + print("invalid_projects", time.time() - start) + + + start = time.time() # Get the user's budget user_budget = await get_budget_by_user_address_and_epoch( session, payload.user_address, epoch_number @@ -102,6 +129,7 @@ async def verify_logic( if sum(a.amount for a in payload.allocations) > user_budget: raise exceptions.RewardsBudgetExceeded() + print("get_budget_by_user_address_and_epoch", time.time() - start) async def get_next_user_nonce( # Component dependencies @@ -118,6 +146,8 @@ async def get_next_user_nonce( session, user_address ) + print("last_allocation_request", last_allocation_request) + # Calculate the next nonce if last_allocation_request is None: return 0 diff --git a/backend/v2/core/dependencies.py b/backend/v2/core/dependencies.py index 84325af330..e4f6e01b39 100644 --- a/backend/v2/core/dependencies.py +++ b/backend/v2/core/dependencies.py @@ -4,19 +4,26 @@ from fastapi import Depends from app.infrastructure.database.models import BaseModel from pydantic import Field -from pydantic_settings import BaseSettings +from pydantic_settings import BaseSettings, SettingsConfigDict from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine from web3 import AsyncHTTPProvider, AsyncWeb3 from web3.middleware import async_geth_poa_middleware -class Web3ProviderSettings(BaseSettings): +class OctantSettings(BaseSettings): + model_config = SettingsConfigDict(env_file='.env', extra='ignore') + + +class Web3ProviderSettings(OctantSettings): eth_rpc_provider_url: str +def get_web3_provider_settings() -> Web3ProviderSettings: + return Web3ProviderSettings() + def get_w3( - settings: Annotated[Web3ProviderSettings, Depends(Web3ProviderSettings)] + settings: Annotated[Web3ProviderSettings, Depends(get_web3_provider_settings)] ) -> AsyncWeb3: w3 = AsyncWeb3(provider=AsyncHTTPProvider(settings.eth_rpc_provider_url)) if async_geth_poa_middleware not in w3.middleware_onion: @@ -28,10 +35,18 @@ def get_w3( Web3 = Annotated[AsyncWeb3, Depends(get_w3)] -class DatabaseSettings(BaseSettings): - sqlalchemy_database_uri: str = Field(validation_alias="db_uri") +class DatabaseSettings(OctantSettings): + db_uri: str = Field(..., alias="db_uri") # TODO other settings of the database + @property + def sqlalchemy_database_uri(self) -> str: + return self.db_uri.replace("postgresql://", "postgresql+asyncpg://") + + +def get_database_settings() -> DatabaseSettings: + return DatabaseSettings() + async def create_tables(): settings = DatabaseSettings() @@ -40,15 +55,25 @@ async def create_tables(): await conn.run_sync(BaseModel.metadata.create_all) -@asynccontextmanager +# @asynccontextmanager async def get_db_session( - settings: Annotated[DatabaseSettings, Depends(DatabaseSettings)] + settings: Annotated[DatabaseSettings, Depends(get_database_settings)] ) -> AsyncGenerator[AsyncSession, None]: # Create an async SQLAlchemy engine # logging.error("Creating database engine") - engine = create_async_engine(settings.sqlalchemy_database_uri) + engine = create_async_engine( + settings.sqlalchemy_database_uri, + echo=False, # Disable SQL query logging (for performance) + pool_size=20, # Initial pool size (default is 5) + max_overflow=10, # Extra connections if pool is exhausted + pool_timeout=30, # Timeout before giving up on a connection + pool_recycle=3600, # Recycle connections after 1 hour (for long-lived connections) + pool_pre_ping=True, # Check if the connection is alive before using it + future=True, # Use the future-facing SQLAlchemy 2.0 style + # connect_args={"options": "-c timezone=utc"} # Ensures timezone is UTC + ) # Create a sessionmaker with AsyncSession class async_session = async_sessionmaker( @@ -69,4 +94,4 @@ async def get_db_session( await session.close() -AsyncDbSession = Annotated[AsyncSession, Depends(get_db_session)] +GetSession = Annotated[AsyncSession, Depends(get_db_session)] diff --git a/backend/v2/deposits/__init__.py b/backend/v2/deposits/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/v2/deposits/contracts.py b/backend/v2/deposits/contracts.py new file mode 100644 index 0000000000..df4b6025b4 --- /dev/null +++ b/backend/v2/deposits/contracts.py @@ -0,0 +1,43 @@ + + + +from v2.core.contracts import SmartContract + + +from typing import Protocol + +class AddressKey(Protocol): + address: str + key: str + + +class DepositsContracts(SmartContract): + + def lock(self, account: AddressKey, amount: int): + nonce = self.w3.eth.get_transaction_count(account.address) + transaction = self.contract.functions.lock(amount).build_transaction( + {"from": account.address, "nonce": nonce} + ) + signed_tx = self.w3.eth.account.sign_transaction(transaction, account.key) + return self.w3.eth.send_raw_transaction(signed_tx.rawTransaction) + + def balance_of(self, owner_address: str) -> int: + return self.contract.functions.deposits(owner_address).call() + + +DEPOSITS_ABI = [ + { + "inputs": [{"internalType": "uint256", "name": "amount", "type": "uint256"}], + "name": "lock", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function", + }, + { + "inputs": [{"internalType": "address", "name": "", "type": "address"}], + "name": "deposits", + "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], + "stateMutability": "view", + "type": "function", + }, +] diff --git a/backend/v2/deposits/dependencies.py b/backend/v2/deposits/dependencies.py new file mode 100644 index 0000000000..095561ede1 --- /dev/null +++ b/backend/v2/deposits/dependencies.py @@ -0,0 +1,25 @@ + + + +from typing import Annotated +from fastapi import Depends +from pydantic_settings import BaseSettings + +from v2.core.dependencies import OctantSettings, Web3 + + +from .contracts import DepositsContracts, DEPOSITS_ABI + + +class DepositsSettings(OctantSettings): + deposits_contract_address: str + + +def get_deposits_settings() -> DepositsSettings: + return DepositsSettings() + + +def get_deposits_contracts( + w3: Web3, settings: Annotated[DepositsSettings, Depends(get_deposits_settings)] +) -> DepositsContracts: + return DepositsContracts(w3, DEPOSITS_ABI, settings.deposits_contract_address) diff --git a/backend/v2/epochs/dependencies.py b/backend/v2/epochs/dependencies.py index 4cc8acfc50..47f9dda7ee 100644 --- a/backend/v2/epochs/dependencies.py +++ b/backend/v2/epochs/dependencies.py @@ -1,28 +1,48 @@ from typing import Annotated from fastapi import Depends -from pydantic_settings import BaseSettings -from v2.core.dependencies import Web3 +from v2.core.dependencies import OctantSettings, Web3 from .contracts import EPOCHS_ABI, EpochsContracts from .subgraphs import EpochsSubgraph -class EpochsSettings(BaseSettings): + +class EpochsSettings(OctantSettings): epochs_contract_address: str +def get_epochs_settings() -> EpochsSettings: + return EpochsSettings() + + def get_epochs_contracts( - w3: Web3, settings: Annotated[EpochsSettings, Depends(EpochsSettings)] + w3: Web3, settings: Annotated[EpochsSettings, Depends(get_epochs_settings)] ) -> EpochsContracts: return EpochsContracts(w3, EPOCHS_ABI, settings.epochs_contract_address) -class EpochsSubgraphSettings(BaseSettings): +GetEpochsContracts = Annotated[ + EpochsContracts, + Depends(get_epochs_contracts), +] + + +class EpochsSubgraphSettings(OctantSettings): subgraph_endpoint: str +def get_epochs_subgraph_settings() -> EpochsSubgraphSettings: + return EpochsSubgraphSettings() + + def get_epochs_subgraph( - settings: Annotated[EpochsSubgraphSettings, Depends(EpochsSubgraphSettings)] + settings: Annotated[EpochsSubgraphSettings, Depends(get_epochs_subgraph_settings)] ) -> EpochsSubgraph: return EpochsSubgraph(settings.subgraph_endpoint) + + +GetEpochsSubgraph = Annotated[ + EpochsSubgraph, + Depends(get_epochs_subgraph), +] \ No newline at end of file diff --git a/backend/v2/epochs/subgraphs.py b/backend/v2/epochs/subgraphs.py index d7f8e6cf72..66e78d2390 100644 --- a/backend/v2/epochs/subgraphs.py +++ b/backend/v2/epochs/subgraphs.py @@ -67,6 +67,9 @@ def __init__( async def get_epoch_by_number(self, epoch_number: int) -> EpochDetails: """Get EpochDetails from the subgraph for a given epoch number.""" + import time + + start = time.time() logging.debug( f"[Subgraph] Getting epoch properties for epoch number: {epoch_number}" @@ -104,6 +107,8 @@ async def get_epoch_by_number(self, epoch_number: int) -> EpochDetails: epoch_details = data[0] + print(f"Time taken to get epoch details: {time.time() - start}") + return EpochDetails( epoch_num=epoch_details["epoch"], start=epoch_details["fromTs"], diff --git a/backend/v2/glms/__init__.py b/backend/v2/glms/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/v2/glms/contracts.py b/backend/v2/glms/contracts.py new file mode 100644 index 0000000000..4e1dc32681 --- /dev/null +++ b/backend/v2/glms/contracts.py @@ -0,0 +1,93 @@ + + + +from v2.core.contracts import SmartContract + + +from typing import Protocol + +class AddressKey(Protocol): + address: str + key: str + + +class GLMContracts(SmartContract): + + + # def glm_fund(self, to_address, nonce): + # transaction = self.contract.functions.transfer( + # to_address, app.config["GLM_WITHDRAWAL_AMOUNT"] + # ).build_transaction({"from": app.config["GLM_SENDER_ADDRESS"], "nonce": nonce}) + # signed_tx = self.w3.eth.account.sign_transaction( + # transaction, app.config["GLM_SENDER_PRIVATE_KEY"] + # ) + # return self.w3.eth.send_raw_transaction(signed_tx.rawTransaction) + + # def transfer(self, sender, receiver: str, amount: int): + # async def transfer(self, sender_address: str, receiver: str, amount: int): + async def transfer( + self, + sender: AddressKey, + receiver_address: str, + amount: int + ) -> None: + + nonce = await self.w3.eth.get_transaction_count(sender) + transaction = self.contract.functions.transfer( + receiver_address, amount + ).build_transaction({"from": sender.address, "nonce": nonce}) + signed_tx = self.w3.eth.account.sign_transaction(transaction, sender.key) + await self.w3.eth.send_raw_transaction(signed_tx.rawTransaction) + + async def approve(self, owner: AddressKey, benefactor_address, wad: int): + print("owner of lock: ", owner) + print("owner address: ", owner.address) + print("owner key: ", owner.key) + print("benefactor of lock: ", benefactor_address) + nonce = self.w3.eth.get_transaction_count(owner.address) + transaction = self.contract.functions.approve( + benefactor_address, wad + ).build_transaction({"from": owner.address, "nonce": nonce}) + signed_tx = self.w3.eth.account.sign_transaction(transaction, owner.key) + return self.w3.eth.send_raw_transaction(signed_tx.rawTransaction) + + # def balance_of(self, owner: str) -> int: + # return self.contract.functions.balanceOf(owner).call() + + +ERC20_ABI = [ + { + "inputs": [], + "name": "totalSupply", + "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], + "stateMutability": "view", + "type": "function", + }, + { + "inputs": [{"internalType": "address", "name": "account", "type": "address"}], + "name": "balanceOf", + "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], + "stateMutability": "view", + "type": "function", + }, + { + "inputs": [ + {"internalType": "address", "name": "to", "type": "address"}, + {"internalType": "uint256", "name": "amount", "type": "uint256"}, + ], + "name": "transfer", + "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], + "stateMutability": "nonpayable", + "type": "function", + }, + { + "inputs": [ + {"internalType": "address", "name": "usr", "type": "address"}, + {"internalType": "uint256", "name": "wad", "type": "uint256"}, + ], + "name": "approve", + "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], + "stateMutability": "nonpayable", + "type": "function", + }, +] diff --git a/backend/v2/glms/dependencies.py b/backend/v2/glms/dependencies.py new file mode 100644 index 0000000000..4c7ca272d8 --- /dev/null +++ b/backend/v2/glms/dependencies.py @@ -0,0 +1,25 @@ + + + +from typing import Annotated +from fastapi import Depends +from pydantic_settings import BaseSettings + +from v2.core.dependencies import OctantSettings, Web3 + + +from .contracts import GLMContracts, ERC20_ABI + + +class GLMSettings(OctantSettings): + glm_contract_address: str + + +def get_glm_settings() -> GLMSettings: + return GLMSettings() + + +def get_glm_contracts( + w3: Web3, settings: Annotated[GLMSettings, Depends(get_glm_settings)] +) -> GLMContracts: + return GLMContracts(w3, ERC20_ABI, settings.glm_contract_address) diff --git a/backend/v2/main.py b/backend/v2/main.py index 162aeb8338..a816b84119 100644 --- a/backend/v2/main.py +++ b/backend/v2/main.py @@ -1,11 +1,33 @@ # Create FastAPI app +import logging +from fastapi.responses import JSONResponse import socketio from fastapi import FastAPI +from app.exceptions import OctantException from v2.allocations.socket import AllocateNamespace +from sqlalchemy.exc import SQLAlchemyError + +from v2.allocations.router import api as allocations_api fastapi_app = FastAPI() +@fastapi_app.exception_handler(OctantException) +async def handle_octant_exception(request, ex: OctantException): + return JSONResponse( + status_code=ex.status_code, + content={"message": ex.message}, + ) + +@fastapi_app.exception_handler(SQLAlchemyError) +async def handle_sqlalchemy_exception(request, ex: SQLAlchemyError): + logging.error(f"SQLAlchemyError: {ex}") + return JSONResponse( + status_code=500, + content={"message": "Internal server error"}, + ) + + @fastapi_app.get("/fastapi-endpoint") async def fastapi_endpoint(): return {"message": "This is a FastAPI endpoint."} @@ -18,6 +40,6 @@ async def fastapi_endpoint(): fastapi_app.add_route("/socket.io/", route=sio_asgi_app) fastapi_app.add_websocket_route("/socket.io/", sio_asgi_app) - +fastapi_app.include_router(allocations_api) # from v2.core.dependencies import create_tables # fastapi_app.add_event_handler("startup", create_tables) diff --git a/backend/v2/projects/depdendencies.py b/backend/v2/projects/depdendencies.py index 6b30e7c7a2..c5f61772f5 100644 --- a/backend/v2/projects/depdendencies.py +++ b/backend/v2/projects/depdendencies.py @@ -5,7 +5,7 @@ from pydantic_settings import BaseSettings from v2.epochs.dependencies import get_epochs_subgraph from v2.epochs.subgraphs import EpochsSubgraph -from v2.core.dependencies import AsyncDbSession, Web3 +from v2.core.dependencies import GetSession, OctantSettings, Web3 from .contracts import PROJECTS_ABI, ProjectsContracts @@ -16,31 +16,44 @@ ) -class ProjectsSettings(BaseSettings): +class ProjectsSettings(OctantSettings): projects_contract_address: str = Field( validation_alias="proposals_contract_address" ) +def get_projects_settings() -> ProjectsSettings: + return ProjectsSettings() + + def get_projects_contracts( - w3: Web3, settings: Annotated[ProjectsSettings, Depends(ProjectsSettings)] + w3: Web3, settings: Annotated[ProjectsSettings, Depends(get_projects_settings)] ) -> ProjectsContracts: return ProjectsContracts(w3, PROJECTS_ABI, settings.projects_contract_address) -class ProjectsAllocationThresholdSettings(BaseSettings): +GetProjectsContracts = Annotated[ + ProjectsContracts, + Depends(get_projects_contracts), +] + + +class ProjectsAllocationThresholdSettings(OctantSettings): project_count_multiplier: int = Field( default=1, description="The multiplier to the number of projects to calculate the allocation threshold.", ) +def get_projects_allocation_threshold_settings() -> ProjectsAllocationThresholdSettings: + return ProjectsAllocationThresholdSettings() + def get_projects_allocation_threshold_getter( - session: AsyncDbSession, + session: GetSession, projects: Annotated[ProjectsContracts, Depends(get_projects_contracts)], settings: Annotated[ ProjectsAllocationThresholdSettings, - Depends(ProjectsAllocationThresholdSettings), + Depends(get_projects_allocation_threshold_settings), ], ) -> ProjectsAllocationThresholdGetter: return ProjectsAllocationThresholdGetter( @@ -48,7 +61,7 @@ def get_projects_allocation_threshold_getter( ) -class EstimatedProjectMatchedRewardsSettings(BaseSettings): +class EstimatedProjectMatchedRewardsSettings(OctantSettings): TR_PERCENT: Decimal = Field( default=Decimal("0.7"), description="The percentage of the TR rewards." ) @@ -60,12 +73,16 @@ class EstimatedProjectMatchedRewardsSettings(BaseSettings): ) +def get_estimated_project_matched_rewards_settings() -> EstimatedProjectMatchedRewardsSettings: + return EstimatedProjectMatchedRewardsSettings() + + def get_estimated_project_matched_rewards( - session: AsyncDbSession, + session: GetSession, epochs_subgraph: Annotated[EpochsSubgraph, Depends(get_epochs_subgraph)], settings: Annotated[ EstimatedProjectMatchedRewardsSettings, - Depends(EstimatedProjectMatchedRewardsSettings), + Depends(get_estimated_project_matched_rewards_settings), ], ) -> EstimatedProjectMatchedRewards: return EstimatedProjectMatchedRewards( @@ -78,7 +95,7 @@ def get_estimated_project_matched_rewards( def get_estimated_project_rewards( - session: AsyncDbSession, + session: GetSession, projects: Annotated[ProjectsContracts, Depends(get_projects_contracts)], estimated_project_matched_rewards: Annotated[ EstimatedProjectMatchedRewards, Depends(get_estimated_project_matched_rewards) @@ -89,3 +106,9 @@ def get_estimated_project_rewards( projects=projects, estimated_matched_rewards=estimated_project_matched_rewards, ) + + +GetEstimatedProjectMatchedRewards = Annotated[ + EstimatedProjectMatchedRewards, + Depends(get_estimated_project_matched_rewards), +] \ No newline at end of file diff --git a/backend/v2/uniqueness_quotients/dependencies.py b/backend/v2/uniqueness_quotients/dependencies.py index 46739bbb26..966fa9eccd 100644 --- a/backend/v2/uniqueness_quotients/dependencies.py +++ b/backend/v2/uniqueness_quotients/dependencies.py @@ -5,11 +5,11 @@ from pydantic import Field from pydantic_settings import BaseSettings -from v2.core.dependencies import AsyncDbSession +from v2.core.dependencies import GetSession, OctantSettings from .services import UQScoreGetter -class UQScoreSettings(BaseSettings): +class UQScoreSettings(OctantSettings): uq_score_threshold: float = Field( default=21.0, description="The Gitcoin Passport score threshold above which the UQ score is set to the maximum UQ score.", @@ -24,9 +24,13 @@ class UQScoreSettings(BaseSettings): ) +def get_uq_score_settings() -> UQScoreSettings: + return UQScoreSettings() + + def get_uq_score_getter( - session: AsyncDbSession, - settings: Annotated[UQScoreSettings, Depends(UQScoreSettings)], + session: GetSession, + settings: Annotated[UQScoreSettings, Depends(get_uq_score_settings)], ) -> UQScoreGetter: return UQScoreGetter( session=session, @@ -34,3 +38,8 @@ def get_uq_score_getter( max_uq_score=settings.max_uq_score, low_uq_score=settings.low_uq_score, ) + +GetUQScoreGetter = Annotated[ + UQScoreGetter, + Depends(get_uq_score_getter) +] diff --git a/backend/v2/uniqueness_quotients/repositories.py b/backend/v2/uniqueness_quotients/repositories.py index c9d3376ed9..a134ef30b1 100644 --- a/backend/v2/uniqueness_quotients/repositories.py +++ b/backend/v2/uniqueness_quotients/repositories.py @@ -55,6 +55,7 @@ async def get_gp_stamps_by_address( .join(User) .filter(User.address == to_checksum_address(user_address)) .order_by(GPStamps.created_at.desc()) + .limit(1) ) return result.scalar_one_or_none() diff --git a/backend/v2/user_patron_mode/repositories.py b/backend/v2/user_patron_mode/repositories.py index 7ac9672b22..3295d5f5df 100644 --- a/backend/v2/user_patron_mode/repositories.py +++ b/backend/v2/user_patron_mode/repositories.py @@ -79,9 +79,14 @@ async def get_budget_by_user_address_and_epoch( """ user = await get_user_by_address(session, user_address) + + print("user", user) if user is None: return None + + print("epoch", epoch) + # epoch -= 1 result = await session.execute( select(Budget.budget) .filter(Budget.user_id == user.id) @@ -90,6 +95,8 @@ async def get_budget_by_user_address_and_epoch( budget = result.scalar() + print("budget", budget) + if budget is None: return None From ffdc06e223996ce09f905302ebeaca3447e84e1a Mon Sep 17 00:00:00 2001 From: adam-gf Date: Tue, 1 Oct 2024 11:58:39 +0200 Subject: [PATCH 08/31] adds metric files --- backend/socket_client.py | 180 ++++++++++++++++++++++++- backend/ws_allocation_tester.py | 229 ++++++++++++++++++++++++++++++++ backend/ws_metrics.py | 126 ++++++++++++++++++ backend/ws_req_metrics.py | 179 +++++++++++++++++++++++++ 4 files changed, 713 insertions(+), 1 deletion(-) create mode 100644 backend/ws_allocation_tester.py create mode 100644 backend/ws_metrics.py create mode 100644 backend/ws_req_metrics.py diff --git a/backend/socket_client.py b/backend/socket_client.py index b49fff7767..f34dcc09a7 100644 --- a/backend/socket_client.py +++ b/backend/socket_client.py @@ -1,6 +1,9 @@ import asyncio import socketio +from app.legacy.crypto.eip712 import sign +from v2.allocations.validators import build_allocations_eip712_data + # Create a Socket.IO client sio = socketio.AsyncClient(logger=True, engineio_logger=True) @@ -81,5 +84,180 @@ async def emit_event(event_name, data): # Run the client +# if __name__ == "__main__": +# asyncio.run(main()) + + +from tests.helpers.constants import ALICE, BOB +from v2.projects.depdendencies import ProjectsSettings, get_projects_contracts +from v2.core.dependencies import Web3ProviderSettings, get_w3 +from v2.epochs.dependencies import EpochsSettings, get_epochs_contracts +from v2.deposits.dependencies import DepositsSettings, get_deposits_contracts +from v2.glms.dependencies import GLMSettings, get_glm_contracts + +w3 = get_w3(Web3ProviderSettings()) +epochs_contracts = get_epochs_contracts(w3, EpochsSettings()) +projects_contracts = get_projects_contracts(w3, ProjectsSettings()) +deposits_contracts = get_deposits_contracts(w3, DepositsSettings()) +glm_contracts = get_glm_contracts(w3, GLMSettings()) +# epochs_subgraph = get_epochs_subgraph(EpochsSubgraphSettings()) + + +from eth_account.signers.local import LocalAccount +from eth_account import Account as EthAccount + + +async def move_to_next_epoch(target: int): + + assert await epochs_contracts.get_current_epoch() == target - 1 + now = (await w3.eth.get_block("latest")).timestamp + nextEpochAt = await epochs_contracts.get_current_epoch_end() + forward = nextEpochAt - now + 30 + # await w3.provider.make_request("evm_increaseTime", [forward]) + # await w3.provider.make_request("evm_mine", []) + # assert await epochs_contracts.get_current_epoch() == target + +chain_id = 11155111 # Sepolia + +mine = "a184bdfb5f83fcd76d7f6ac4ae530c69fa941845283b23aee8db411e31c8a367" +me: LocalAccount = EthAccount.from_key(mine) + +project_addresses = [ + "0x0B7246eF74Ca7b37Fdc3D15be4f0b49876622F95", + "0x0c9dc7622aE5f56491aB4cCe060d6002450B79D2", +] + +async def mine(): + current = await epochs_contracts.get_current_epoch() + ts = await epochs_contracts.get_current_epoch_end() + pending = await epochs_contracts.get_pending_epoch() + duration = await epochs_contracts.get_epoch_duration() + finalized = await epochs_contracts.get_finalized_epoch() + decision_window = await epochs_contracts.get_decision_window() + is_open = await epochs_contracts.is_decision_window_open() + + print("current: ", current) + print("ts: ", ts) + print("pending: ", pending) + print("duration: ", duration) + print("finalized: ", finalized) + print("decision_window: ", decision_window) + print("is_open: ", is_open) + + return ts + + +def allocate(): + + asyncio.run(mine()) + + # rv = self._flask_client.get(f"/allocations/users/{address}/allocation_nonce") + nonce = 0 + + + payload = { + "allocations": [ + { + "proposalAddress": address, + "amount": 10, + } + for address in project_addresses + ], + "nonce": nonce, + } + + data = build_allocations_eip712_data(chain_id, payload) + + signature = sign(me, data) + + + import requests + import time + + start = time.time() + resp = requests.post( + "https://uat-backend.octant.wildland.dev/allocations/allocate", + json={ + "payload": payload, + "userAddress": me.address, + "signature": signature, + }, + ) + print("time taken: ", time.time() - start) + + print("after request") + print(resp.status_code) + print(resp.json()) + + # rv = self._flask_client.post( + # "/allocations/allocate", + # json={ + # "payload": { + # "allocations": [ + # {"proposalAddress": address, "amount": amount} + # for address in addresses + # ], + # "nonce": nonce, + # }, + # "userAddress": account.address, + # "signature": signature, + # }, + # ) + # return rv.status_code + if __name__ == "__main__": - asyncio.run(main()) + allocate() + +# async def test_allocation(): + +# pending = await epochs_contracts.get_pending_epoch() + +# alice_proposals = await projects_contracts.get_project_addresses(1)[:3] + +# # alice_proposals = get_projects_addresses(1)[:3] +# alice: LocalAccount = EthAccount.from_key(ALICE) +# bob: LocalAccount = EthAccount.from_key(BOB) + +# await glm_contracts.approve(alice, deposits_contracts.contract.address, w3.to_wei(10000, "ether")) +# await deposits_contracts.lock(alice, w3.to_wei(10000, "ether")) + +# await glm_contracts.approve(bob, deposits_contracts.contract.address, w3.to_wei(15000, "ether")) +# await deposits_contracts.lock(bob, w3.to_wei(15000, "ether")) + +# # glm.approve(alice, deposits.contract.address, w3.to_wei(10000, "ether")) + +# # # lock GLM from two accounts +# # UserAccount(CryptoAccount.from_key(ALICE), client) +# # glm.approve(self._account, deposits.contract.address, w3.to_wei(value, "ether")) +# # deposits.lock(self._account, w3.to_wei(value, "ether")) + +# # ua_alice.lock(10000) +# # ua_bob.lock(15000) + +# # forward time to the beginning of the epoch 2 +# client.move_to_next_epoch(STARTING_EPOCH + 1) + +# # wait for indexer to catch up +# epoch_no = client.wait_for_sync(STARTING_EPOCH + 1) +# app.logger.debug(f"indexed epoch: {epoch_no}") + +# # make a snapshot +# res = client.pending_snapshot() +# assert res["epoch"] > 0 + +# ua_alice.allocate(1000, alice_proposals) +# ua_bob.allocate(1000, alice_proposals[:1]) + +# allocations, _ = client.get_epoch_allocations(STARTING_EPOCH) +# unique_donors = set() +# unique_proposals = set() +# app.logger.debug(f"Allocations: \n {allocations}") + +# assert len(allocations["allocations"]) == 4 +# for allocation in allocations["allocations"]: +# unique_donors.add(allocation["donor"]) +# unique_proposals.add(allocation["project"]) +# assert int(allocation["amount"]) > 0 +# app.logger.debug(f"Allocations: {allocations}") +# assert len(unique_donors) == 2 +# assert len(unique_proposals) == 3 diff --git a/backend/ws_allocation_tester.py b/backend/ws_allocation_tester.py new file mode 100644 index 0000000000..8e283af777 --- /dev/null +++ b/backend/ws_allocation_tester.py @@ -0,0 +1,229 @@ +import asyncio +import json +import multiprocessing +import os +import random +import sys +import socketio + +import time +import requests +from app.legacy.crypto.eip712 import sign +from v2.allocations.validators import build_allocations_eip712_data + +from eth_account.signers.local import LocalAccount +from eth_account import Account as EthAccount + + + +events = [] + +pre_allocate = True +donors_count = 0 + + +sio = socketio.AsyncClient(logger=False, engineio_logger=False) + + +# Define event handlers +@sio.on("connect") +async def connect(): + events.append({"event": "connect", "time": time.time()}) + +@sio.event +async def connect_error(data): + events.append({"event": "connect_error", "data": data, "time": time.time()}) + await sio.disconnect() + +@sio.on("connect_error") +async def connect_error(data): + events.append({"event": "connect_error", "data": data, "time": time.time()}) + await sio.disconnect() + +@sio.on("disconnect") +async def disconnect(): + events.append({"event": "disconnect", "time": time.time()}) + +@sio.on("project_rewards") +async def project_rewards(data): + events.append({"event": "project_rewards_received", "data": data, "time": time.time()}) + +@sio.on("project_donors") +async def project_donors(data): + global donors_count + if pre_allocate: + donors_count += 1 + else: + donors_count -= 1 + events.append({"event": "project_donors_received", "data": data, "time": time.time()}) + +@sio.on("threshold") +async def threshold(data): + events.append({"event": "threshold_received", "data": data, "time": time.time()}) + +chain_id = 11155111 # Sepolia + + + + +mine0 = os.getenv("MINE_KEY", None) +me: LocalAccount = EthAccount.from_key(mine0) + +# UAT projects +# project_addresses = [ +# "0xc6FD734790E83820e311211B6d9A682BCa4ac97b", +# "0x242ba6d68FfEb4a098B591B32d370F973FF882B7", +# ] +project_addresses = [ + "0x0B7246eF74Ca7b37Fdc3D15be4f0b49876622F95", + "0x0cbF31Ef6545EE30f47651D1A991Bf0aeB03DF29", + "0x1c01595f9534E33d411035AE99a4317faeC4f6Fe", +] + + +allocation_info = { + "request": None, + "signature_duration": None, + "send_time": None, +} + +async def allocate(): + # Emit a custom event + + events.append({"event": "nonce_request", "time": time.time()}) + + try: + # url = f"https://uat-backend.octant.wildland.dev/allocations/users/{me.address}/allocation_nonce" + # url = f"https://master-backend.octant.wildland.dev/allocations/users/{me.address}/allocation_nonce" + url = f"http://127.0.0.1:5000/allocations/users/{me.address}/allocation_nonce" + # url = f"http://127.0.0.1:5000/flask/allocations/users/{me.address}/allocation_nonce" + nonce = requests.get(url).json()['allocationNonce'] + + events.append({"event": "nonce_response", "data": nonce, "time": time.time()}) + + except Exception as e: + + events.append({"event": "error_nonce", "data": str(e), "time": time.time()}) + await sio.disconnect() + return + + sig_time = time.time() + + random_mult = random.random() + amout = int(12223333 * random_mult) + # print("Amount: ", amout) + + payload = { + "allocations": [ + { + "proposalAddress": address, + "amount": amout, + } + for address in project_addresses + ], + "nonce": nonce, + } + + events.append({"event": "payload_generated", "data": payload, "time": time.time()}) + + data = build_allocations_eip712_data(chain_id, payload) + + signature = sign(me, data) + + request_data = { + "userAddress": me.address, + "payload": payload, + "signature": signature, + "isManuallyEdited": False, + } + + # print("signature: ", signature) + # print("payload: ", payload) + + # print("time taken for signature: ", time.time() - sig_time) + + events.append({"event": "allocate_request", "data": request_data, "time": time.time()}) + + # await sio.emit("allocate", json.dumps(request_data)) + + resp = requests.post( + # "https://uat-backend.octant.wildland.dev/allocations/allocate", + # "https://master-backend.octant.wildland.dev/allocations/allocate", + "http://127.0.0.1:5000/allocations/allocate", + # "http://127.0.0.1:5000/flask/allocations/allocate", + json=request_data, + ) + + events.append({"event": "allocate_response", "data": resp.json(), "time": time.time(), "status_code": resp.status_code}) + + + # print("donors_count: ", donors_count) + + global pre_allocate + pre_allocate = False + + + +from uuid import uuid4 + +async def run_ws(): + + for i in range(10): + + global pre_allocate + global donors_count + global events + + donors_count = 0 + events = [] + pre_allocate = True + + print(f"Running test {i:02d} : {me.address[:6]}") + + try: + # await sio.connect('https://uat-backend.octant.wildland.dev/', wait_timeout=10) + await sio.connect('https://master-backend.octant.wildland.dev/', wait_timeout=10) + await allocate() + # Wait till donors count is 0 again + while donors_count > 0: + await asyncio.sleep(0.3) + await sio.disconnect() + except Exception as e: + events.append({"event": "error", "data": str(e), "time": time.time()}) + await sio.disconnect() + + with open(f"ws_logs/events_{uuid4()}.json", "w") as f: + json.dump(events, f, indent=4) + + +async def run_request(): + + for i in range(1): + + global pre_allocate + global donors_count + global events + + donors_count = 0 + events = [] + pre_allocate = True + + print(f"Running test {i:02d} : {me.address[:6]}") + + try: + await allocate() + except Exception as e: + events.append({"event": "error", "data": str(e), "time": time.time()}) + + with open(f"wr_logs/events_{uuid4()}.json", "w") as f: + json.dump(events, f, indent=4) + + + +# Main function to start the test +if __name__ == "__main__": + # num_connections = 2 # Number of concurrent connections + # metrics = run_concurrent_connections(num_connections) + # print_metrics(metrics) + + asyncio.run(run_request()) \ No newline at end of file diff --git a/backend/ws_metrics.py b/backend/ws_metrics.py new file mode 100644 index 0000000000..24cd6ddbab --- /dev/null +++ b/backend/ws_metrics.py @@ -0,0 +1,126 @@ +import json +import os +import logging +from datetime import datetime + +# Configure logging +logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s', filename='ws_metrics.log', filemode='w') + +def parse_timestamp(timestamp): + return datetime.fromtimestamp(timestamp) + +def calculate_durations(events): + first_event_time = parse_timestamp(events[0]['time']) + last_event_time = parse_timestamp(events[-1]['time']) + + allocation_start_time = None + allocate_sent_time = None + threshold_received_time = None + disconnect_time = None + non_201_allocate_responses = 0 + + for event in events: + event_time = parse_timestamp(event['time']) + if event['event'].startswith('error'): + logging.error(f"Error event found: {event}") + + if event['event'] == 'allocate_request': + allocate_sent_time = event_time + elif event['event'] == 'allocate_response': + threshold_received_time = event_time + if event.get('status_code') != 201: + non_201_allocate_responses += 1 + logging.error(f"Allocate response with status code {event.get('status_code')} found: {event}") + elif event['event'] == 'threshold_received': + threshold_received_time = event_time + elif event['event'] == 'disconnect': + disconnect_time = event_time + elif event['event'] == 'nonce_request': + allocation_start_time = event_time + + metrics = { + 'duration_from_first_to_last_event': (last_event_time - first_event_time).total_seconds(), + 'duration_before_allocation': (allocation_start_time - first_event_time).total_seconds() if allocation_start_time else None, + 'duration_allocate_to_threshold': (threshold_received_time - allocate_sent_time).total_seconds() if allocate_sent_time and threshold_received_time else None, + 'duration_threshold_to_disconnect': (disconnect_time - threshold_received_time).total_seconds() if threshold_received_time and disconnect_time else None, + 'non_201_allocate_responses': non_201_allocate_responses + } + + return metrics + +def aggregate_metrics(all_metrics): + aggregated = { + 'duration_from_first_to_last_event': {'min': float('inf'), 'max': float('-inf'), 'total': 0, 'count': 0}, + 'duration_before_allocation': {'min': float('inf'), 'max': float('-inf'), 'total': 0, 'count': 0}, + 'duration_allocate_to_threshold': {'min': float('inf'), 'max': float('-inf'), 'total': 0, 'count': 0}, + 'duration_threshold_to_disconnect': {'min': float('inf'), 'max': float('-inf'), 'total': 0, 'count': 0}, + 'non_201_allocate_responses': {'total': 0, 'count': 0} + } + + for metrics in all_metrics: + for key in aggregated.keys(): + if key == 'non_201_allocate_responses': + aggregated[key]['total'] += metrics[key] + aggregated[key]['count'] += 1 + elif metrics[key] is not None: + aggregated[key]['min'] = min(aggregated[key]['min'], metrics[key]) + aggregated[key]['max'] = max(aggregated[key]['max'], metrics[key]) + aggregated[key]['total'] += metrics[key] + aggregated[key]['count'] += 1 + + # Calculate averages + for key in aggregated.keys(): + if key != 'non_201_allocate_responses' and aggregated[key]['count'] > 0: + aggregated[key]['avg'] = aggregated[key]['total'] / aggregated[key]['count'] + elif key == 'non_201_allocate_responses': + aggregated[key]['avg'] = aggregated[key]['total'] / aggregated[key]['count'] if aggregated[key]['count'] > 0 else None + else: + aggregated[key]['avg'] = None + + + return aggregated + +def process_file(file_path): + try: + with open(file_path, 'r') as file: + events = json.load(file) + + metrics = calculate_durations(events) + return metrics + except Exception as e: + logging.error(f"Error processing file {file_path}: {e}") + return None + +def main(): + ws_logs_dir = 'wr_logs' + if not os.path.exists(ws_logs_dir): + print(f"Directory {ws_logs_dir} does not exist.") + return + + all_metrics = [] + for file_name in os.listdir(ws_logs_dir): + file_path = os.path.join(ws_logs_dir, file_name) + if os.path.isfile(file_path): + metrics = process_file(file_path) + if metrics: + all_metrics.append(metrics) + + aggregated_metrics = aggregate_metrics(all_metrics) + + if aggregated_metrics: + print("Aggregated Metrics:") + for key, value in aggregated_metrics.items(): + if key == 'non_201_allocate_responses': + print(f" {key.replace('_', ' ').title()}: {value['total']} occurrences") + else: + print("key", key) + print("value", value) + print(f" {key.replace('_', ' ').title()}:") + print(f" Min: {value['min']} seconds") + print(f" Max: {value['max']} seconds") + print(f" Avg: {value['avg']} seconds") + else: + print("Failed to aggregate metrics.") + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/backend/ws_req_metrics.py b/backend/ws_req_metrics.py new file mode 100644 index 0000000000..b1323cfef2 --- /dev/null +++ b/backend/ws_req_metrics.py @@ -0,0 +1,179 @@ +import json +import os +from datetime import datetime +import numpy as np + +def parse_timestamp(timestamp): + return datetime.fromtimestamp(timestamp) + +def calculate_metrics(events): + first_event_time = parse_timestamp(events[0]['time']) + last_event_time = parse_timestamp(events[-1]['time']) + + allocate_request_time = None + payload_generated_time = None + nonce_request_time = None + response_times = {} + response_counts = {} + + for event in events: + event_time = parse_timestamp(event['time']) + if event['event'] == 'nonce_response': + nonce_request_time = event_time + elif event['event'] == 'payload_generated': + payload_generated_time = event_time + elif event['event'] == 'allocate_request': + allocate_request_time = event_time + elif event['event'] == 'allocate_response': + allocate_response_time = event_time + status_code = event.get('status_code') + if status_code not in response_times: + response_times[status_code] = [] + response_counts[status_code] = 0 + response_times[status_code].append((allocate_response_time - allocate_request_time).total_seconds()) + response_counts[status_code] += 1 + + # If theres no "allocate_response" we should assume it's a 600 status code + if 'allocate_response' not in [e['event'] for e in events]: + print("No allocate response found", events) + status_code = 600 + if status_code not in response_times: + response_times[status_code] = [] + response_counts[status_code] = 0 + response_times[status_code].append((last_event_time - allocate_request_time).total_seconds()) + response_counts[status_code] + + metrics = { + 'response_times': response_times, + 'response_counts': response_counts, + 'payload_generation_duration': (payload_generated_time - nonce_request_time).total_seconds() if payload_generated_time and nonce_request_time else None, + 'nonce_request_duration': (nonce_request_time - first_event_time).total_seconds() if nonce_request_time else None, + 'first_event_time': first_event_time.timestamp(), + 'last_event_time': last_event_time.timestamp(), + } + + return metrics + +def aggregate_metrics(all_metrics): + aggregated = { + 'response_times': {}, + 'response_counts': {}, + 'payload_generation_duration': {'min': float('inf'), 'max': float('-inf'), 'total': 0, 'count': 0}, + 'nonce_request_duration': {'min': float('inf'), 'max': float('-inf'), 'total': 0, 'count': 0}, + 'first_event_time': float('inf'), + 'last_event_time': float('-inf'), + 'total_requests': 0, + } + + for metrics in all_metrics: + for status_code, times in metrics['response_times'].items(): + if status_code not in aggregated['response_times']: + aggregated['response_times'][status_code] = {'min': float('inf'), 'max': float('-inf'), 'total': 0, 'count': 0, 'times': []} + aggregated['response_counts'][status_code] = 0 + for time in times: + aggregated['response_times'][status_code]['min'] = min(aggregated['response_times'][status_code]['min'], time) + aggregated['response_times'][status_code]['max'] = max(aggregated['response_times'][status_code]['max'], time) + aggregated['response_times'][status_code]['total'] += time + aggregated['response_times'][status_code]['count'] += 1 + aggregated['response_times'][status_code]['times'].append(time) + aggregated['response_counts'][status_code] += metrics['response_counts'][status_code] + + for key in ['payload_generation_duration', 'nonce_request_duration']: + if metrics[key] is not None: + aggregated[key]['min'] = min(aggregated[key]['min'], metrics[key]) + aggregated[key]['max'] = max(aggregated[key]['max'], metrics[key]) + aggregated[key]['total'] += metrics[key] + aggregated[key]['count'] += 1 + + aggregated['first_event_time'] = min(aggregated['first_event_time'], metrics['first_event_time']) + aggregated['last_event_time'] = max(aggregated['last_event_time'], metrics['last_event_time']) + aggregated['total_requests'] += 1 + + # Calculate averages and additional statistics + for key in ['payload_generation_duration', 'nonce_request_duration']: + if aggregated[key]['count'] > 0: + aggregated[key]['avg'] = aggregated[key]['total'] / aggregated[key]['count'] + else: + aggregated[key]['avg'] = None + + for status_code, data in aggregated['response_times'].items(): + if data['count'] > 0: + data['avg'] = data['total'] / data['count'] + data['median'] = np.median(data['times']) + data['90th_percentile'] = np.percentile(data['times'], 90) + data['80th_percentile'] = np.percentile(data['times'], 80) + data['std_dev'] = np.std(data['times']) + else: + data['avg'] = data['median'] = data['90th_percentile'] = data['std_dev'] = None + + # Calculate requests per second + total_duration = aggregated['last_event_time'] - aggregated['first_event_time'] + if total_duration > 0: + aggregated['requests_per_second'] = aggregated['total_requests'] / total_duration + else: + aggregated['requests_per_second'] = None + + return aggregated + +def process_file(file_path): + try: + with open(file_path, 'r') as file: + events = json.load(file) + + metrics = calculate_metrics(events) + return metrics + except Exception as e: + print(f"Error processing file {file_path}: {e}") + return None + +def main(): + ws_logs_dir = 'wr_logs_flask_1' + if not os.path.exists(ws_logs_dir): + print(f"Directory {ws_logs_dir} does not exist.") + return + + all_metrics = [] + for file_name in os.listdir(ws_logs_dir): + file_path = os.path.join(ws_logs_dir, file_name) + if os.path.isfile(file_path): + metrics = process_file(file_path) + if metrics: + all_metrics.append(metrics) + + aggregated_metrics = aggregate_metrics(all_metrics) + + if aggregated_metrics: + print("Aggregated Metrics:") + for key, value in aggregated_metrics.items(): + if key in ['first_event_time', 'last_event_time']: + print(f" {key.replace('_', ' ').title()}: {datetime.fromtimestamp(value)}") + elif key == 'requests_per_second': + print(f" {key.replace('_', ' ').title()}: {value} requests/second") + elif key == 'response_counts': + print(" Response Counts:") + for status_code, count in value.items(): + print(f" {status_code}: {count} requests") + elif key == 'response_times': + print(" Response Times:") + for status_code, data in value.items(): + print(f" Status Code {status_code}:") + print(f" Min: {data['min']} seconds") + print(f" Max: {data['max']} seconds") + print(f" Avg: {data['avg']} seconds") + print(f" Median: {data['median']} seconds") + print(f" 90th Percentile: {data['90th_percentile']} seconds") + print(f" 80th Percentile: {data['80th_percentile']} seconds") + print(f" Std Dev: {data['std_dev']} seconds") + print(f" Num: {data['count']} requests") + elif isinstance(value, dict): + print(f" {key.replace('_', ' ').title()}:") + print(f" Min: {value.get('min')} seconds") + print(f" Max: {value.get('max')} seconds") + print(f" Avg: {value.get('avg')} seconds") + else: + print(f" {key.replace('_', ' ').title()}: {value}") + else: + print("Failed to aggregate metrics.") + +if __name__ == "__main__": + main() \ No newline at end of file From dc7cef793c4fe662d443a5bfc6048e54ebdb8c65 Mon Sep 17 00:00:00 2001 From: adam-gf Date: Fri, 4 Oct 2024 19:56:25 +0200 Subject: [PATCH 09/31] Updates --- backend/poetry.lock | 264 +++++++++++++++++- backend/pyproject.toml | 3 +- backend/startup.py | 2 +- backend/v2/allocations/repositories.py | 50 +++- backend/v2/allocations/router.py | 6 + backend/v2/allocations/services.py | 15 +- backend/v2/allocations/validators.py | 193 +++++++++---- backend/v2/core/dependencies.py | 56 +++- .../v2/uniqueness_quotients/repositories.py | 20 +- backend/v2/users/repositories.py | 14 +- backend/ws_allocation_tester.py | 11 +- backend/ws_req_metrics.py | 2 +- 12 files changed, 529 insertions(+), 107 deletions(-) diff --git a/backend/poetry.lock b/backend/poetry.lock index 83bf36a062..dab66e5228 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -1689,6 +1689,54 @@ doc = ["sphinx (>=5.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"] lint = ["black (>=22)", "flake8 (==6.0.0)", "flake8-bugbear (==23.3.23)", "isort (>=5.10.1)", "mypy (==0.971)", "pydocstyle (>=5.0.0)"] test = ["eth-utils (>=1.0.1,<3)", "hypothesis (>=3.44.24,<=6.31.6)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] +[[package]] +name = "httptools" +version = "0.6.1" +description = "A collection of framework independent HTTP protocol utils." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"}, + {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"}, + {file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"}, + {file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"}, + {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"}, + {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"}, + {file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"}, + {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"}, + {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"}, + {file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"}, + {file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"}, + {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"}, + {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"}, + {file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"}, + {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"}, + {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"}, + {file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"}, + {file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"}, + {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"}, + {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"}, + {file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"}, + {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3"}, + {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb"}, + {file = "httptools-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97"}, + {file = "httptools-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3"}, + {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4"}, + {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf"}, + {file = "httptools-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084"}, + {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3"}, + {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e"}, + {file = "httptools-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d"}, + {file = "httptools-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da"}, + {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81"}, + {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a"}, + {file = "httptools-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e"}, + {file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"}, +] + +[package.extras] +test = ["Cython (>=0.29.24,<0.30.0)"] + [[package]] name = "idna" version = "3.7" @@ -2906,6 +2954,68 @@ files = [ {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, ] +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + [[package]] name = "rapidfuzz" version = "3.9.3" @@ -3490,22 +3600,168 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" -version = "0.30.6" +version = "0.31.0" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.8" files = [ - {file = "uvicorn-0.30.6-py3-none-any.whl", hash = "sha256:65fd46fe3fda5bdc1b03b94eb634923ff18cd35b2f084813ea79d1f103f711b5"}, - {file = "uvicorn-0.30.6.tar.gz", hash = "sha256:4b15decdda1e72be08209e860a1e10e92439ad5b97cf44cc945fcbee66fc5788"}, + {file = "uvicorn-0.31.0-py3-none-any.whl", hash = "sha256:cac7be4dd4d891c363cd942160a7b02e69150dcbc7a36be04d5f4af4b17c8ced"}, + {file = "uvicorn-0.31.0.tar.gz", hash = "sha256:13bc21373d103859f68fe739608e2eb054a816dea79189bc3ca08ea89a275906"}, ] [package.dependencies] click = ">=7.0" +colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""} h11 = ">=0.8" +httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} +python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} [package.extras] standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] +[[package]] +name = "uvloop" +version = "0.20.0" +description = "Fast implementation of asyncio event loop on top of libuv" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "uvloop-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9ebafa0b96c62881d5cafa02d9da2e44c23f9f0cd829f3a32a6aff771449c996"}, + {file = "uvloop-0.20.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:35968fc697b0527a06e134999eef859b4034b37aebca537daeb598b9d45a137b"}, + {file = "uvloop-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b16696f10e59d7580979b420eedf6650010a4a9c3bd8113f24a103dfdb770b10"}, + {file = "uvloop-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b04d96188d365151d1af41fa2d23257b674e7ead68cfd61c725a422764062ae"}, + {file = "uvloop-0.20.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94707205efbe809dfa3a0d09c08bef1352f5d3d6612a506f10a319933757c006"}, + {file = "uvloop-0.20.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89e8d33bb88d7263f74dc57d69f0063e06b5a5ce50bb9a6b32f5fcbe655f9e73"}, + {file = "uvloop-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e50289c101495e0d1bb0bfcb4a60adde56e32f4449a67216a1ab2750aa84f037"}, + {file = "uvloop-0.20.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e237f9c1e8a00e7d9ddaa288e535dc337a39bcbf679f290aee9d26df9e72bce9"}, + {file = "uvloop-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:746242cd703dc2b37f9d8b9f173749c15e9a918ddb021575a0205ec29a38d31e"}, + {file = "uvloop-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82edbfd3df39fb3d108fc079ebc461330f7c2e33dbd002d146bf7c445ba6e756"}, + {file = "uvloop-0.20.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:80dc1b139516be2077b3e57ce1cb65bfed09149e1d175e0478e7a987863b68f0"}, + {file = "uvloop-0.20.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4f44af67bf39af25db4c1ac27e82e9665717f9c26af2369c404be865c8818dcf"}, + {file = "uvloop-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4b75f2950ddb6feed85336412b9a0c310a2edbcf4cf931aa5cfe29034829676d"}, + {file = "uvloop-0.20.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:77fbc69c287596880ecec2d4c7a62346bef08b6209749bf6ce8c22bbaca0239e"}, + {file = "uvloop-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6462c95f48e2d8d4c993a2950cd3d31ab061864d1c226bbf0ee2f1a8f36674b9"}, + {file = "uvloop-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:649c33034979273fa71aa25d0fe120ad1777c551d8c4cd2c0c9851d88fcb13ab"}, + {file = "uvloop-0.20.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a609780e942d43a275a617c0839d85f95c334bad29c4c0918252085113285b5"}, + {file = "uvloop-0.20.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aea15c78e0d9ad6555ed201344ae36db5c63d428818b4b2a42842b3870127c00"}, + {file = "uvloop-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0e94b221295b5e69de57a1bd4aeb0b3a29f61be6e1b478bb8a69a73377db7ba"}, + {file = "uvloop-0.20.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fee6044b64c965c425b65a4e17719953b96e065c5b7e09b599ff332bb2744bdf"}, + {file = "uvloop-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:265a99a2ff41a0fd56c19c3838b29bf54d1d177964c300dad388b27e84fd7847"}, + {file = "uvloop-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10c2956efcecb981bf9cfb8184d27d5d64b9033f917115a960b83f11bfa0d6b"}, + {file = "uvloop-0.20.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e7d61fe8e8d9335fac1bf8d5d82820b4808dd7a43020c149b63a1ada953d48a6"}, + {file = "uvloop-0.20.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2beee18efd33fa6fdb0976e18475a4042cd31c7433c866e8a09ab604c7c22ff2"}, + {file = "uvloop-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d8c36fdf3e02cec92aed2d44f63565ad1522a499c654f07935c8f9d04db69e95"}, + {file = "uvloop-0.20.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0fac7be202596c7126146660725157d4813aa29a4cc990fe51346f75ff8fde7"}, + {file = "uvloop-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d0fba61846f294bce41eb44d60d58136090ea2b5b99efd21cbdf4e21927c56a"}, + {file = "uvloop-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95720bae002ac357202e0d866128eb1ac82545bcf0b549b9abe91b5178d9b541"}, + {file = "uvloop-0.20.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:36c530d8fa03bfa7085af54a48f2ca16ab74df3ec7108a46ba82fd8b411a2315"}, + {file = "uvloop-0.20.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e97152983442b499d7a71e44f29baa75b3b02e65d9c44ba53b10338e98dedb66"}, + {file = "uvloop-0.20.0.tar.gz", hash = "sha256:4603ca714a754fc8d9b197e325db25b2ea045385e8a3ad05d3463de725fdf469"}, +] + +[package.extras] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] + +[[package]] +name = "watchfiles" +version = "0.24.0" +description = "Simple, modern and high performance file watching and code reload in python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "watchfiles-0.24.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:083dc77dbdeef09fa44bb0f4d1df571d2e12d8a8f985dccde71ac3ac9ac067a0"}, + {file = "watchfiles-0.24.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e94e98c7cb94cfa6e071d401ea3342767f28eb5a06a58fafdc0d2a4974f4f35c"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82ae557a8c037c42a6ef26c494d0631cacca040934b101d001100ed93d43f361"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:acbfa31e315a8f14fe33e3542cbcafc55703b8f5dcbb7c1eecd30f141df50db3"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b74fdffce9dfcf2dc296dec8743e5b0332d15df19ae464f0e249aa871fc1c571"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:449f43f49c8ddca87c6b3980c9284cab6bd1f5c9d9a2b00012adaaccd5e7decd"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4abf4ad269856618f82dee296ac66b0cd1d71450fc3c98532d93798e73399b7a"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f895d785eb6164678ff4bb5cc60c5996b3ee6df3edb28dcdeba86a13ea0465e"}, + {file = "watchfiles-0.24.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ae3e208b31be8ce7f4c2c0034f33406dd24fbce3467f77223d10cd86778471c"}, + {file = "watchfiles-0.24.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2efec17819b0046dde35d13fb8ac7a3ad877af41ae4640f4109d9154ed30a188"}, + {file = "watchfiles-0.24.0-cp310-none-win32.whl", hash = "sha256:6bdcfa3cd6fdbdd1a068a52820f46a815401cbc2cb187dd006cb076675e7b735"}, + {file = "watchfiles-0.24.0-cp310-none-win_amd64.whl", hash = "sha256:54ca90a9ae6597ae6dc00e7ed0a040ef723f84ec517d3e7ce13e63e4bc82fa04"}, + {file = "watchfiles-0.24.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:bdcd5538e27f188dd3c804b4a8d5f52a7fc7f87e7fd6b374b8e36a4ca03db428"}, + {file = "watchfiles-0.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2dadf8a8014fde6addfd3c379e6ed1a981c8f0a48292d662e27cabfe4239c83c"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6509ed3f467b79d95fc62a98229f79b1a60d1b93f101e1c61d10c95a46a84f43"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8360f7314a070c30e4c976b183d1d8d1585a4a50c5cb603f431cebcbb4f66327"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:316449aefacf40147a9efaf3bd7c9bdd35aaba9ac5d708bd1eb5763c9a02bef5"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73bde715f940bea845a95247ea3e5eb17769ba1010efdc938ffcb967c634fa61"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3770e260b18e7f4e576edca4c0a639f704088602e0bc921c5c2e721e3acb8d15"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa0fd7248cf533c259e59dc593a60973a73e881162b1a2f73360547132742823"}, + {file = "watchfiles-0.24.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d7a2e3b7f5703ffbd500dabdefcbc9eafeff4b9444bbdd5d83d79eedf8428fab"}, + {file = "watchfiles-0.24.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d831ee0a50946d24a53821819b2327d5751b0c938b12c0653ea5be7dea9c82ec"}, + {file = "watchfiles-0.24.0-cp311-none-win32.whl", hash = "sha256:49d617df841a63b4445790a254013aea2120357ccacbed00253f9c2b5dc24e2d"}, + {file = "watchfiles-0.24.0-cp311-none-win_amd64.whl", hash = "sha256:d3dcb774e3568477275cc76554b5a565024b8ba3a0322f77c246bc7111c5bb9c"}, + {file = "watchfiles-0.24.0-cp311-none-win_arm64.whl", hash = "sha256:9301c689051a4857d5b10777da23fafb8e8e921bcf3abe6448a058d27fb67633"}, + {file = "watchfiles-0.24.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7211b463695d1e995ca3feb38b69227e46dbd03947172585ecb0588f19b0d87a"}, + {file = "watchfiles-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4b8693502d1967b00f2fb82fc1e744df128ba22f530e15b763c8d82baee15370"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdab9555053399318b953a1fe1f586e945bc8d635ce9d05e617fd9fe3a4687d6"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:34e19e56d68b0dad5cff62273107cf5d9fbaf9d75c46277aa5d803b3ef8a9e9b"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:41face41f036fee09eba33a5b53a73e9a43d5cb2c53dad8e61fa6c9f91b5a51e"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5148c2f1ea043db13ce9b0c28456e18ecc8f14f41325aa624314095b6aa2e9ea"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e4bd963a935aaf40b625c2499f3f4f6bbd0c3776f6d3bc7c853d04824ff1c9f"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c79d7719d027b7a42817c5d96461a99b6a49979c143839fc37aa5748c322f234"}, + {file = "watchfiles-0.24.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:32aa53a9a63b7f01ed32e316e354e81e9da0e6267435c7243bf8ae0f10b428ef"}, + {file = "watchfiles-0.24.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ce72dba6a20e39a0c628258b5c308779b8697f7676c254a845715e2a1039b968"}, + {file = "watchfiles-0.24.0-cp312-none-win32.whl", hash = "sha256:d9018153cf57fc302a2a34cb7564870b859ed9a732d16b41a9b5cb2ebed2d444"}, + {file = "watchfiles-0.24.0-cp312-none-win_amd64.whl", hash = "sha256:551ec3ee2a3ac9cbcf48a4ec76e42c2ef938a7e905a35b42a1267fa4b1645896"}, + {file = "watchfiles-0.24.0-cp312-none-win_arm64.whl", hash = "sha256:b52a65e4ea43c6d149c5f8ddb0bef8d4a1e779b77591a458a893eb416624a418"}, + {file = "watchfiles-0.24.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:3d2e3ab79a1771c530233cadfd277fcc762656d50836c77abb2e5e72b88e3a48"}, + {file = "watchfiles-0.24.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327763da824817b38ad125dcd97595f942d720d32d879f6c4ddf843e3da3fe90"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd82010f8ab451dabe36054a1622870166a67cf3fce894f68895db6f74bbdc94"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d64ba08db72e5dfd5c33be1e1e687d5e4fcce09219e8aee893a4862034081d4e"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1cf1f6dd7825053f3d98f6d33f6464ebdd9ee95acd74ba2c34e183086900a827"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43e3e37c15a8b6fe00c1bce2473cfa8eb3484bbeecf3aefbf259227e487a03df"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88bcd4d0fe1d8ff43675360a72def210ebad3f3f72cabfeac08d825d2639b4ab"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:999928c6434372fde16c8f27143d3e97201160b48a614071261701615a2a156f"}, + {file = "watchfiles-0.24.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:30bbd525c3262fd9f4b1865cb8d88e21161366561cd7c9e1194819e0a33ea86b"}, + {file = "watchfiles-0.24.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:edf71b01dec9f766fb285b73930f95f730bb0943500ba0566ae234b5c1618c18"}, + {file = "watchfiles-0.24.0-cp313-none-win32.whl", hash = "sha256:f4c96283fca3ee09fb044f02156d9570d156698bc3734252175a38f0e8975f07"}, + {file = "watchfiles-0.24.0-cp313-none-win_amd64.whl", hash = "sha256:a974231b4fdd1bb7f62064a0565a6b107d27d21d9acb50c484d2cdba515b9366"}, + {file = "watchfiles-0.24.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ee82c98bed9d97cd2f53bdb035e619309a098ea53ce525833e26b93f673bc318"}, + {file = "watchfiles-0.24.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fd92bbaa2ecdb7864b7600dcdb6f2f1db6e0346ed425fbd01085be04c63f0b05"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f83df90191d67af5a831da3a33dd7628b02a95450e168785586ed51e6d28943c"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fca9433a45f18b7c779d2bae7beeec4f740d28b788b117a48368d95a3233ed83"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b995bfa6bf01a9e09b884077a6d37070464b529d8682d7691c2d3b540d357a0c"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed9aba6e01ff6f2e8285e5aa4154e2970068fe0fc0998c4380d0e6278222269b"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5171ef898299c657685306d8e1478a45e9303ddcd8ac5fed5bd52ad4ae0b69b"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4933a508d2f78099162da473841c652ad0de892719043d3f07cc83b33dfd9d91"}, + {file = "watchfiles-0.24.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95cf3b95ea665ab03f5a54765fa41abf0529dbaf372c3b83d91ad2cfa695779b"}, + {file = "watchfiles-0.24.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:01def80eb62bd5db99a798d5e1f5f940ca0a05986dcfae21d833af7a46f7ee22"}, + {file = "watchfiles-0.24.0-cp38-none-win32.whl", hash = "sha256:4d28cea3c976499475f5b7a2fec6b3a36208656963c1a856d328aeae056fc5c1"}, + {file = "watchfiles-0.24.0-cp38-none-win_amd64.whl", hash = "sha256:21ab23fdc1208086d99ad3f69c231ba265628014d4aed31d4e8746bd59e88cd1"}, + {file = "watchfiles-0.24.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b665caeeda58625c3946ad7308fbd88a086ee51ccb706307e5b1fa91556ac886"}, + {file = "watchfiles-0.24.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5c51749f3e4e269231510da426ce4a44beb98db2dce9097225c338f815b05d4f"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b2509f08761f29a0fdad35f7e1638b8ab1adfa2666d41b794090361fb8b855"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a60e2bf9dc6afe7f743e7c9b149d1fdd6dbf35153c78fe3a14ae1a9aee3d98b"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7d9b87c4c55e3ea8881dfcbf6d61ea6775fffed1fedffaa60bd047d3c08c430"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:78470906a6be5199524641f538bd2c56bb809cd4bf29a566a75051610bc982c3"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07cdef0c84c03375f4e24642ef8d8178e533596b229d32d2bbd69e5128ede02a"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d337193bbf3e45171c8025e291530fb7548a93c45253897cd764a6a71c937ed9"}, + {file = "watchfiles-0.24.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ec39698c45b11d9694a1b635a70946a5bad066b593af863460a8e600f0dff1ca"}, + {file = "watchfiles-0.24.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e28d91ef48eab0afb939fa446d8ebe77e2f7593f5f463fd2bb2b14132f95b6e"}, + {file = "watchfiles-0.24.0-cp39-none-win32.whl", hash = "sha256:7138eff8baa883aeaa074359daabb8b6c1e73ffe69d5accdc907d62e50b1c0da"}, + {file = "watchfiles-0.24.0-cp39-none-win_amd64.whl", hash = "sha256:b3ef2c69c655db63deb96b3c3e587084612f9b1fa983df5e0c3379d41307467f"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:632676574429bee8c26be8af52af20e0c718cc7f5f67f3fb658c71928ccd4f7f"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a2a9891723a735d3e2540651184be6fd5b96880c08ffe1a98bae5017e65b544b"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7fa2bc0efef3e209a8199fd111b8969fe9db9c711acc46636686331eda7dd4"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01550ccf1d0aed6ea375ef259706af76ad009ef5b0203a3a4cce0f6024f9b68a"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:96619302d4374de5e2345b2b622dc481257a99431277662c30f606f3e22f42be"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:85d5f0c7771dcc7a26c7a27145059b6bb0ce06e4e751ed76cdf123d7039b60b5"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951088d12d339690a92cef2ec5d3cfd957692834c72ffd570ea76a6790222777"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49fb58bcaa343fedc6a9e91f90195b20ccb3135447dc9e4e2570c3a39565853e"}, + {file = "watchfiles-0.24.0.tar.gz", hash = "sha256:afb72325b74fa7a428c009c1b8be4b4d7c2afedafb2982827ef2156646df2fe1"}, +] + +[package.dependencies] +anyio = ">=3.0.0" + [[package]] name = "web3" version = "6.20.3" @@ -3759,4 +4015,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "dbf09ab41544d32f718ee8267f411059f064357d9cd3cf0a263d1ecababb8f24" +content-hash = "542389a8ae25f12759a7c1e677983f8e9f0f1044641875d8c34a7182eefed4c8" diff --git a/backend/pyproject.toml b/backend/pyproject.toml index f1aed379ed..af9f1d6a91 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -33,8 +33,9 @@ fastapi = "^0.112.0" mypy = "^1.11.2" isort = "^5.13.2" pydantic-settings = "^2.4.0" -uvicorn = "^0.30.6" +uvicorn = {extras = ["standard"], version = "^0.31.0"} asyncpg = "^0.29.0" +uvloop = "^0.20.0" [tool.poetry.group.dev.dependencies] pytest = "^7.3.1" diff --git a/backend/startup.py b/backend/startup.py index 26b6c7185a..d26b2c2142 100644 --- a/backend/startup.py +++ b/backend/startup.py @@ -82,4 +82,4 @@ async def dispatch(self, request: Request, call_next): import uvicorn # uvicorn.run(fastapi_app, host="0.0.0.0", port=5000) - uvicorn.run(fastapi_app, port=5000) + uvicorn.run(fastapi_app, port=5000) \ No newline at end of file diff --git a/backend/v2/allocations/repositories.py b/backend/v2/allocations/repositories.py index 133b023c9e..660ef6fbf3 100644 --- a/backend/v2/allocations/repositories.py +++ b/backend/v2/allocations/repositories.py @@ -132,26 +132,56 @@ async def get_last_allocation_request_nonce( ) -> int | None: """Get the last nonce of the allocation requests for a user.""" - result = await session.execute( - select(AllocationRequestDB.nonce). - join(User, AllocationRequestDB.user_id == User.id). - filter(User.address == user_address). - order_by(AllocationRequestDB.nonce.desc()). - limit(1) - ) - return result.scalar() + import time + + + + # return result.scalar() + + start = time.time() user = await get_user_by_address(session, user_address) if user is None: return None - result = await session.execute( + # result = await session.execute( + # select(func.max(AllocationRequestDB.nonce)).filter( + # AllocationRequestDB.user_id == user.id + # ) + # ) + + print("get_last_allocation_request_nonce2", time.time() - start) + + start = time.time() + + result = await session.scalar( select(func.max(AllocationRequestDB.nonce)).filter( AllocationRequestDB.user_id == user.id ) ) - return result.scalar() + # result = await session.execute( + # select(AllocationRequestDB.nonce). + # join(User, AllocationRequestDB.user_id == User.id). + # filter(User.address == user_address). + # order_by(AllocationRequestDB.nonce.desc()). + # limit(1) + # ) + + print("get_last_allocation_request_nonce", time.time() - start) + + # start = time.time() + + # result = ( + # AllocationRequestDB.query.join(User, User.id == AllocationRequestDB.user_id) + # .filter(User.address == user_address) + # .order_by(AllocationRequestDB.nonce.desc()) + # .first() + # ) + + # print("?????????get_user_last_allocation_request", time.time() - start) + + return result async def get_donations_by_project( diff --git a/backend/v2/allocations/router.py b/backend/v2/allocations/router.py index 323b53fe2c..8bde27b335 100644 --- a/backend/v2/allocations/router.py +++ b/backend/v2/allocations/router.py @@ -20,6 +20,10 @@ async def allocate( Make an allocation for the user. """ + import time + + + start = time.time() request = UserAllocationRequest( user_address=allocation_request.user_address, allocations=allocation_request.payload.allocations, @@ -36,4 +40,6 @@ async def allocate( print("pending_epoch", pending_epoch) await allocations.make(pending_epoch, request) + + print("allocate took: ", time.time() - start) diff --git a/backend/v2/allocations/services.py b/backend/v2/allocations/services.py index da36f7c608..06cb27d23f 100644 --- a/backend/v2/allocations/services.py +++ b/backend/v2/allocations/services.py @@ -1,3 +1,4 @@ +import asyncio from dataclasses import dataclass import time @@ -158,12 +159,18 @@ async def simulate_leverage( start_time = time.time() - all_projects = await projects.get_project_addresses(epoch_number) + all_projects, matched_rewards, existing_allocations = await asyncio.gather( + projects.get_project_addresses(epoch_number), + estimated_project_matched_rewards.get(epoch_number), + get_allocations_with_user_uqs(session, epoch_number), + ) + + # all_projects = await projects.get_project_addresses(epoch_number) - matched_rewards = await estimated_project_matched_rewards.get(epoch_number) + # matched_rewards = await estimated_project_matched_rewards.get(epoch_number) - # Get all allocations before user's allocation - existing_allocations = await get_allocations_with_user_uqs(session, epoch_number) + # # Get all allocations before user's allocation + # existing_allocations = await get_allocations_with_user_uqs(session, epoch_number) print("existing allocations retrieved in", time.time() - start_time) diff --git a/backend/v2/allocations/validators.py b/backend/v2/allocations/validators.py index 131aace12f..28d7c2f253 100644 --- a/backend/v2/allocations/validators.py +++ b/backend/v2/allocations/validators.py @@ -1,3 +1,4 @@ +import asyncio from dataclasses import dataclass from web3 import AsyncWeb3 from app import exceptions @@ -27,21 +28,20 @@ async def verify(self, epoch_number: int, request: UserAllocationRequest) -> Non import time start = time.time() - await verify_logic( - session=self.session, - epoch_subgraph=self.epochs_subgraph, - projects_contracts=self.projects_contracts, - epoch_number=epoch_number, - payload=request, - ) - - print("verify_logic", time.time() - start) - - await verify_signature( - w3=self.projects_contracts.w3, - chain_id=self.chain_id, - user_address=request.user_address, - payload=request, + await asyncio.gather( + verify_logic( + session=self.session, + epoch_subgraph=self.epochs_subgraph, + projects_contracts=self.projects_contracts, + epoch_number=epoch_number, + payload=request, + ), + verify_signature( + w3=self.projects_contracts.w3, + chain_id=self.chain_id, + user_address=request.user_address, + payload=request, + ), ) print("verify_signature", time.time() - start) @@ -61,47 +61,141 @@ async def verify_logic( # if epoch_details.state != "PENDING": # raise exceptions.NotInDecision - import time - # Check if the allocations are not empty if not payload.allocations: raise exceptions.EmptyAllocations() - start = time.time() + print("already here") + + async def _check_database(): + await _provided_nonce_matches_expected(session, payload.user_address, payload.nonce) + await _user_is_not_patron(session, epoch_subgraph, payload.user_address, epoch_number) + await _user_has_budget(session, payload, epoch_number) + + await asyncio.gather( + _check_database(), + _provided_projects_are_correct(projects_contracts, epoch_number, payload) + ) + + + # try: + # async with asyncio.TaskGroup() as tg: + + # tg.create_task(_provided_nonce_matches_expected(session, payload.user_address, payload.nonce)) + # tg.create_task(_user_is_not_patron(session, epoch_subgraph, payload.user_address, epoch_number)) + # tg.create_task(_provided_projects_are_correct(projects_contracts, epoch_number, payload)) + # tg.create_task(_user_has_budget(session, payload, epoch_number)) + # except Exception as e: + # print("e", e) + # raise e + + + # summary = asyncio.gather( + # _provided_nonce_matches_expected(session, payload.user_address, payload.nonce), + # _user_is_not_patron( + # session, epoch_subgraph, payload.user_address, epoch_number + # ), + # _provided_projects_are_correct( + # projects_contracts, epoch_number, payload + # ), + # _user_has_budget(session, payload, epoch_number), + # return_exceptions=True, + # ) + + # print("maybe here?") + + # for i in await summary: + # if isinstance(i, Exception): + # raise i + + print("hehehehehe") + +async def _provided_nonce_matches_expected( + # Component dependencies + session: AsyncSession, + # Arguments + user_address: str, + nonce: int, +) -> None: + """ + Check if the nonce is as expected. + """ + # Get the next nonce + next_nonce = await get_next_user_nonce(session, user_address) + # Check if the nonce is as expected - expected_nonce = await get_next_user_nonce(session, payload.user_address) - if payload.nonce != expected_nonce: - raise exceptions.WrongAllocationsNonce(payload.nonce, expected_nonce) + if nonce != next_nonce: + raise exceptions.WrongAllocationsNonce(nonce, next_nonce) - print("get_next_user_nonce", time.time() - start) - start = time.time() +async def _user_is_not_patron( + # Component dependencies + session: AsyncSession, + epoch_subgraph: EpochsSubgraph, + # Arguments + user_address: str, + epoch_number: int, +) -> None: + """ + Check if the user is not a patron. + """ # Check if the user is not a patron epoch_details = await epoch_subgraph.get_epoch_by_number(epoch_number) is_patron = await user_is_patron_with_budget( session, - payload.user_address, + user_address, epoch_number, epoch_details.finalized_timestamp.datetime(), ) if is_patron: - raise exceptions.NotAllowedInPatronMode(payload.user_address) + raise exceptions.NotAllowedInPatronMode(user_address) - print("user_is_patron_with_budget", time.time() - start) +async def get_next_user_nonce( + # Component dependencies + session: AsyncSession, + # Arguments + user_address: str, +) -> int: + """ + Get the next expected nonce for the user. + It's a simple increment of the last nonce, or 0 if there is no previous nonce. + """ + # Get the last allocation request of the user + last_allocation_request = await get_last_allocation_request_nonce( + session, user_address + ) + print("last_allocation_request", last_allocation_request) - start = time.time() + # Calculate the next nonce + if last_allocation_request is None: + return 0 + # Increment the last nonce + return last_allocation_request + 1 + + +async def _provided_projects_are_correct( + # Component dependencies + projects_contracts: ProjectsContracts, + # Arguments + epoch_number: int, + payload: UserAllocationRequest, +) -> None: + """ + Check if the projects in the allocation request are correct. + """ + + import time + start = time.time() # Check if the user is not a project all_projects = await projects_contracts.get_project_addresses(epoch_number) if payload.user_address in all_projects: raise exceptions.ProjectAllocationToSelf() - project_addresses = [a.project_address for a in payload.allocations] - print("get_project_addresses", time.time() - start) - start = time.time() + project_addresses = [a.project_address for a in payload.allocations] # Check if the projects are valid invalid_projects = set(project_addresses) - set(all_projects) @@ -113,10 +207,19 @@ async def verify_logic( if duplicates: raise exceptions.DuplicatedProjects(duplicates) - print("invalid_projects", time.time() - start) +async def _user_has_budget( + # Component dependencies + session: AsyncSession, + # Arguments + payload: UserAllocationRequest, + epoch_number: int, +) -> None: + """ + Check if the user has enough budget for the allocation. + Check if the sum of the allocations is within the user's budget. + """ - start = time.time() # Get the user's budget user_budget = await get_budget_by_user_address_and_epoch( session, payload.user_address, epoch_number @@ -129,32 +232,6 @@ async def verify_logic( if sum(a.amount for a in payload.allocations) > user_budget: raise exceptions.RewardsBudgetExceeded() - print("get_budget_by_user_address_and_epoch", time.time() - start) - -async def get_next_user_nonce( - # Component dependencies - session: AsyncSession, - # Arguments - user_address: str, -) -> int: - """ - Get the next expected nonce for the user. - It's a simple increment of the last nonce, or 0 if there is no previous nonce. - """ - # Get the last allocation request of the user - last_allocation_request = await get_last_allocation_request_nonce( - session, user_address - ) - - print("last_allocation_request", last_allocation_request) - - # Calculate the next nonce - if last_allocation_request is None: - return 0 - - # Increment the last nonce - return last_allocation_request + 1 - async def verify_signature( w3: AsyncWeb3, chain_id: int, user_address: str, payload: UserAllocationRequest diff --git a/backend/v2/core/dependencies.py b/backend/v2/core/dependencies.py index e4f6e01b39..e615b4129f 100644 --- a/backend/v2/core/dependencies.py +++ b/backend/v2/core/dependencies.py @@ -1,4 +1,6 @@ +from asyncio import current_task from contextlib import asynccontextmanager +from functools import lru_cache from typing import Annotated, AsyncGenerator from fastapi import Depends @@ -6,13 +8,13 @@ from pydantic import Field from pydantic_settings import BaseSettings, SettingsConfigDict -from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine, async_scoped_session from web3 import AsyncHTTPProvider, AsyncWeb3 from web3.middleware import async_geth_poa_middleware class OctantSettings(BaseSettings): - model_config = SettingsConfigDict(env_file='.env', extra='ignore') + model_config = SettingsConfigDict(env_file='.env', extra='ignore', frozen=True) class Web3ProviderSettings(OctantSettings): @@ -55,18 +57,14 @@ async def create_tables(): await conn.run_sync(BaseModel.metadata.create_all) -# @asynccontextmanager -async def get_db_session( +@lru_cache(1) +def get_sessionmaker( settings: Annotated[DatabaseSettings, Depends(get_database_settings)] -) -> AsyncGenerator[AsyncSession, None]: - # Create an async SQLAlchemy engine - - # logging.error("Creating database engine") - +) -> async_sessionmaker[AsyncSession]: engine = create_async_engine( settings.sqlalchemy_database_uri, echo=False, # Disable SQL query logging (for performance) - pool_size=20, # Initial pool size (default is 5) + pool_size=100, # Initial pool size (default is 5) max_overflow=10, # Extra connections if pool is exhausted pool_timeout=30, # Timeout before giving up on a connection pool_recycle=3600, # Recycle connections after 1 hour (for long-lived connections) @@ -75,22 +73,52 @@ async def get_db_session( # connect_args={"options": "-c timezone=utc"} # Ensures timezone is UTC ) - # Create a sessionmaker with AsyncSession class - async_session = async_sessionmaker( + sessionmaker = async_sessionmaker( autocommit=False, autoflush=False, bind=engine, class_=AsyncSession ) + scoped_session = async_scoped_session(sessionmaker, scopefunc=current_task) + + return scoped_session + +# @asynccontextmanager +async def get_db_session( + sessionmaker: Annotated[async_sessionmaker[AsyncSession], Depends(get_sessionmaker)] +) -> AsyncGenerator[AsyncSession, None]: + # Create an async SQLAlchemy engine + + # logging.error("Creating database engine") + + # engine = create_async_engine( + # settings.sqlalchemy_database_uri, + # echo=False, # Disable SQL query logging (for performance) + # pool_size=20, # Initial pool size (default is 5) + # max_overflow=10, # Extra connections if pool is exhausted + # pool_timeout=30, # Timeout before giving up on a connection + # pool_recycle=3600, # Recycle connections after 1 hour (for long-lived connections) + # pool_pre_ping=True, # Check if the connection is alive before using it + # future=True, # Use the future-facing SQLAlchemy 2.0 style + # # connect_args={"options": "-c timezone=utc"} # Ensures timezone is UTC + # ) + + # # Create a sessionmaker with AsyncSession class + # async_session = async_sessionmaker( + # autocommit=False, autoflush=False, bind=engine, class_=AsyncSession + # ) + # logging.error("Opening session", async_session) # Create a new session - async with async_session() as session: + async with sessionmaker() as session: try: yield session await session.commit() - except Exception: + except Exception as e: + print("----Rolling back session, error:", e) await session.rollback() raise finally: + print("----Closing session") await session.close() diff --git a/backend/v2/uniqueness_quotients/repositories.py b/backend/v2/uniqueness_quotients/repositories.py index a134ef30b1..4eca57445f 100644 --- a/backend/v2/uniqueness_quotients/repositories.py +++ b/backend/v2/uniqueness_quotients/repositories.py @@ -50,12 +50,18 @@ async def get_gp_stamps_by_address( ) -> GPStamps | None: """Gets the latest GitcoinPassport Stamps record for a user.""" - result = await session.execute( - select(GPStamps) - .join(User) - .filter(User.address == to_checksum_address(user_address)) - .order_by(GPStamps.created_at.desc()) - .limit(1) + user = await get_user_by_address(session, user_address) + + result = await session.scalar( + select(GPStamps).filter(GPStamps.user_id == user.id).order_by(GPStamps.created_at.desc()).limit(1) ) - return result.scalar_one_or_none() + # result = await session.execute( + # select(GPStamps) + # .join(User) + # .filter(User.address == to_checksum_address(user_address)) + # .order_by(GPStamps.created_at.desc()) + # .limit(1) + # ) + + return result diff --git a/backend/v2/users/repositories.py b/backend/v2/users/repositories.py index b28dc4777b..9f8d5cbfb1 100644 --- a/backend/v2/users/repositories.py +++ b/backend/v2/users/repositories.py @@ -7,5 +7,15 @@ async def get_user_by_address(session: AsyncSession, user_address: str) -> User | None: user_address = to_checksum_address(user_address) - result = await session.execute(select(User).filter(User.address == user_address)) - return result.scalar_one_or_none() + import time + start = time.time() + + result = await session.scalar( + select(User).filter(User.address == user_address).limit(1) + ) + + # result = await session.execute(select(User).filter(User.address == user_address)) + print("get_user_by_address", time.time() - start) + print("result", result) + + return result diff --git a/backend/ws_allocation_tester.py b/backend/ws_allocation_tester.py index 8e283af777..799c7a110c 100644 --- a/backend/ws_allocation_tester.py +++ b/backend/ws_allocation_tester.py @@ -95,7 +95,7 @@ async def allocate(): try: # url = f"https://uat-backend.octant.wildland.dev/allocations/users/{me.address}/allocation_nonce" # url = f"https://master-backend.octant.wildland.dev/allocations/users/{me.address}/allocation_nonce" - url = f"http://127.0.0.1:5000/allocations/users/{me.address}/allocation_nonce" + url = f"http://127.0.0.1:5000/allocations/users/{me.address}/allocation_nonce" # forward to flask # url = f"http://127.0.0.1:5000/flask/allocations/users/{me.address}/allocation_nonce" nonce = requests.get(url).json()['allocationNonce'] @@ -110,14 +110,15 @@ async def allocate(): sig_time = time.time() random_mult = random.random() - amout = int(12223333 * random_mult) + amout = int(1222333312223333 * random_mult) + # amout = 827243882781739 # print("Amount: ", amout) payload = { "allocations": [ { "proposalAddress": address, - "amount": amout, + "amount": str(amout), } for address in project_addresses ], @@ -149,7 +150,7 @@ async def allocate(): resp = requests.post( # "https://uat-backend.octant.wildland.dev/allocations/allocate", # "https://master-backend.octant.wildland.dev/allocations/allocate", - "http://127.0.0.1:5000/allocations/allocate", + "http://127.0.0.1:5000/allocations/allocate", # async fapi # "http://127.0.0.1:5000/flask/allocations/allocate", json=request_data, ) @@ -168,7 +169,7 @@ async def allocate(): async def run_ws(): - for i in range(10): + for i in range(5): global pre_allocate global donors_count diff --git a/backend/ws_req_metrics.py b/backend/ws_req_metrics.py index b1323cfef2..5969a1ad9d 100644 --- a/backend/ws_req_metrics.py +++ b/backend/ws_req_metrics.py @@ -127,7 +127,7 @@ def process_file(file_path): return None def main(): - ws_logs_dir = 'wr_logs_flask_1' + ws_logs_dir = 'wr_logs' if not os.path.exists(ws_logs_dir): print(f"Directory {ws_logs_dir} does not exist.") return From d768ba3612f322e4a0ca36532151ab251466ec5f Mon Sep 17 00:00:00 2001 From: adam-gf Date: Sun, 6 Oct 2024 21:36:48 +0200 Subject: [PATCH 10/31] Adds fixes and other stuff --- backend/poetry.lock | 8 +- backend/pyproject.toml | 1 + backend/v2/allocations/dependencies.py | 64 ++- backend/v2/allocations/repositories.py | 54 +-- backend/v2/allocations/router.py | 29 +- backend/v2/allocations/schemas.py | 20 +- backend/v2/allocations/services.py | 62 +-- backend/v2/allocations/socket.py | 439 ++++++++++-------- backend/v2/allocations/validators.py | 31 +- backend/v2/core/dependencies.py | 39 +- backend/v2/core/exceptions.py | 9 + backend/v2/core/types.py | 7 + backend/v2/crypto/signatures.py | 9 +- backend/v2/deposits/contracts.py | 15 +- backend/v2/deposits/dependencies.py | 8 +- backend/v2/epochs/dependencies.py | 34 +- backend/v2/glms/contracts.py | 16 +- backend/v2/glms/dependencies.py | 8 +- backend/v2/main.py | 3 + backend/v2/matched_rewards/__init__.py | 0 backend/v2/matched_rewards/dependencies.py | 58 +++ backend/v2/matched_rewards/services.py | 84 ++++ .../v2/project_rewards/capped_quadriatic.py | 42 +- backend/v2/project_rewards/dependencies.py | 32 ++ backend/v2/project_rewards/router.py | 28 ++ backend/v2/project_rewards/schemas.py | 46 ++ backend/v2/project_rewards/services.py | 39 ++ backend/v2/projects/depdendencies.py | 114 ----- backend/v2/projects/dependencies.py | 56 +++ backend/v2/projects/services.py | 131 +----- .../v2/uniqueness_quotients/dependencies.py | 11 +- .../v2/uniqueness_quotients/repositories.py | 14 +- backend/v2/uniqueness_quotients/services.py | 34 +- backend/v2/user_patron_mode/repositories.py | 6 +- backend/v2/users/repositories.py | 15 +- 35 files changed, 885 insertions(+), 681 deletions(-) create mode 100644 backend/v2/core/exceptions.py create mode 100644 backend/v2/core/types.py create mode 100644 backend/v2/matched_rewards/__init__.py create mode 100644 backend/v2/matched_rewards/dependencies.py create mode 100644 backend/v2/matched_rewards/services.py create mode 100644 backend/v2/project_rewards/dependencies.py create mode 100644 backend/v2/project_rewards/router.py create mode 100644 backend/v2/project_rewards/schemas.py create mode 100644 backend/v2/project_rewards/services.py delete mode 100644 backend/v2/projects/depdendencies.py create mode 100644 backend/v2/projects/dependencies.py diff --git a/backend/poetry.lock b/backend/poetry.lock index dab66e5228..64a06f342d 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -2892,13 +2892,13 @@ docs = ["sphinx"] [[package]] name = "python-socketio" -version = "5.11.3" +version = "5.11.4" description = "Socket.IO server and client for Python" optional = false python-versions = ">=3.8" files = [ - {file = "python_socketio-5.11.3-py3-none-any.whl", hash = "sha256:2a923a831ff70664b7c502df093c423eb6aa93c1ce68b8319e840227a26d8b69"}, - {file = "python_socketio-5.11.3.tar.gz", hash = "sha256:194af8cdbb7b0768c2e807ba76c7abc288eb5bb85559b7cddee51a6bc7a65737"}, + {file = "python_socketio-5.11.4-py3-none-any.whl", hash = "sha256:42efaa3e3e0b166fc72a527488a13caaac2cefc76174252486503bd496284945"}, + {file = "python_socketio-5.11.4.tar.gz", hash = "sha256:8b0b8ff2964b2957c865835e936310190639c00310a47d77321a594d1665355e"}, ] [package.dependencies] @@ -4015,4 +4015,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "542389a8ae25f12759a7c1e677983f8e9f0f1044641875d8c34a7182eefed4c8" +content-hash = "bc7f7d04b03d2aeaafe48b29faba1ac5cce81d9d6ab1869452c170efffd91b47" diff --git a/backend/pyproject.toml b/backend/pyproject.toml index af9f1d6a91..a7d143fed7 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -36,6 +36,7 @@ pydantic-settings = "^2.4.0" uvicorn = {extras = ["standard"], version = "^0.31.0"} asyncpg = "^0.29.0" uvloop = "^0.20.0" +python-socketio = "^5.11.4" [tool.poetry.group.dev.dependencies] pytest = "^7.3.1" diff --git a/backend/v2/allocations/dependencies.py b/backend/v2/allocations/dependencies.py index f1533c366c..a665cb3604 100644 --- a/backend/v2/allocations/dependencies.py +++ b/backend/v2/allocations/dependencies.py @@ -2,69 +2,67 @@ from fastapi import Depends from pydantic import Field -from pydantic_settings import BaseSettings -from v2.projects.services import EstimatedProjectMatchedRewards -from v2.epochs.dependencies import get_epochs_subgraph -from v2.epochs.subgraphs import EpochsSubgraph -from v2.projects.contracts import ProjectsContracts -from v2.projects.depdendencies import ( - get_estimated_project_matched_rewards, - get_projects_contracts, +from v2.matched_rewards.dependencies import GetMatchedRewardsEstimator +from v2.project_rewards.services import ProjectRewardsEstimator +from v2.core.exceptions import AllocationWindowClosed +from v2.epochs.dependencies import ( + AssertAllocationWindowOpen, + GetEpochsContracts, + GetEpochsSubgraph, ) -from v2.uniqueness_quotients.dependencies import get_uq_score_getter -from v2.uniqueness_quotients.services import UQScoreGetter +from v2.projects.dependencies import ( + GetProjectsContracts, +) +from v2.uniqueness_quotients.dependencies import GetUQScoreGetter from v2.core.dependencies import GetSession, OctantSettings -from .services import Allocations +from .services import Allocator from .validators import SignatureVerifier class SignatureVerifierSettings(OctantSettings): - chain_id: int = Field( default=11155111, description="The chain id to use for the signature verification.", ) + def get_signature_verifier_settings() -> SignatureVerifierSettings: return SignatureVerifierSettings() def get_signature_verifier( session: GetSession, - epochs_subgraph: Annotated[EpochsSubgraph, Depends(get_epochs_subgraph)], - projects_contracts: Annotated[ProjectsContracts, Depends(get_projects_contracts)], - settings: Annotated[SignatureVerifierSettings, Depends(get_signature_verifier_settings)], + epochs_subgraph: GetEpochsSubgraph, + projects_contracts: GetProjectsContracts, + settings: Annotated[ + SignatureVerifierSettings, Depends(get_signature_verifier_settings) + ], ) -> SignatureVerifier: return SignatureVerifier( session, epochs_subgraph, projects_contracts, settings.chain_id ) -GetSignatureVerifier = Annotated[ - SignatureVerifier, - Depends(get_signature_verifier) -] +GetSignatureVerifier = Annotated[SignatureVerifier, Depends(get_signature_verifier)] -def get_allocations( + +async def get_allocator( + epoch_number: AssertAllocationWindowOpen, session: GetSession, signature_verifier: GetSignatureVerifier, - uq_score_getter: Annotated[UQScoreGetter, Depends(get_uq_score_getter)], - projects: Annotated[ProjectsContracts, Depends(get_projects_contracts)], - estimated_project_matched_rewards: Annotated[ - EstimatedProjectMatchedRewards, Depends(get_estimated_project_matched_rewards) - ], -) -> Allocations: - return Allocations( + uq_score_getter: GetUQScoreGetter, + projects_contracts: GetProjectsContracts, + matched_rewards_estimator: GetMatchedRewardsEstimator, +) -> Allocator: + return Allocator( session, signature_verifier, uq_score_getter, - projects, - estimated_project_matched_rewards, + projects_contracts, + matched_rewards_estimator, + epoch_number, ) -GetAllocations = Annotated[ - Allocations, - Depends(get_allocations) -] \ No newline at end of file +GetAllocator = Annotated[Allocator, Depends(get_allocator)] diff --git a/backend/v2/allocations/repositories.py b/backend/v2/allocations/repositories.py index 660ef6fbf3..95f3b2449c 100644 --- a/backend/v2/allocations/repositories.py +++ b/backend/v2/allocations/repositories.py @@ -5,10 +5,11 @@ from app.infrastructure.database.models import AllocationRequest as AllocationRequestDB from app.infrastructure.database.models import UniquenessQuotient, User from eth_utils import to_checksum_address -from sqlalchemy import func, select, update +from sqlalchemy import INTEGER, cast, func, select, update from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import joinedload from sqlalchemy.sql.functions import coalesce +from v2.core.types import Address from v2.users.repositories import get_user_by_address from .schemas import AllocationWithUserUQScore, ProjectDonation, UserAllocationRequest @@ -18,7 +19,7 @@ async def sum_allocations_by_epoch(session: AsyncSession, epoch_number: int) -> """Get the sum of all allocations for a given epoch. We only consider the allocations that have not been deleted.""" result = await session.execute( - select(coalesce(func.sum(Allocation.amount), 0)) + select(coalesce(func.sum(cast(Allocation.amount, INTEGER)), 0)) .filter(Allocation.epoch == epoch_number) .filter(Allocation.deleted_at.is_(None)) ) @@ -64,7 +65,7 @@ async def get_allocations_with_user_uqs( async def soft_delete_user_allocations_by_epoch( session: AsyncSession, - user_address: str, + user_address: Address, epoch_number: int, ) -> None: """Soft delete all user allocations for a given epoch.""" @@ -91,7 +92,7 @@ async def soft_delete_user_allocations_by_epoch( async def store_allocation_request( session: AsyncSession, - user_address: str, + user_address: Address, epoch_number: int, request: UserAllocationRequest, leverage: float, @@ -128,61 +129,20 @@ async def store_allocation_request( async def get_last_allocation_request_nonce( session: AsyncSession, - user_address: str, + user_address: Address, ) -> int | None: """Get the last nonce of the allocation requests for a user.""" - import time - - - - # return result.scalar() - - start = time.time() - user = await get_user_by_address(session, user_address) if user is None: return None - # result = await session.execute( - # select(func.max(AllocationRequestDB.nonce)).filter( - # AllocationRequestDB.user_id == user.id - # ) - # ) - - print("get_last_allocation_request_nonce2", time.time() - start) - - start = time.time() - - result = await session.scalar( + return await session.scalar( select(func.max(AllocationRequestDB.nonce)).filter( AllocationRequestDB.user_id == user.id ) ) - # result = await session.execute( - # select(AllocationRequestDB.nonce). - # join(User, AllocationRequestDB.user_id == User.id). - # filter(User.address == user_address). - # order_by(AllocationRequestDB.nonce.desc()). - # limit(1) - # ) - - print("get_last_allocation_request_nonce", time.time() - start) - - # start = time.time() - - # result = ( - # AllocationRequestDB.query.join(User, User.id == AllocationRequestDB.user_id) - # .filter(User.address == user_address) - # .order_by(AllocationRequestDB.nonce.desc()) - # .first() - # ) - - # print("?????????get_user_last_allocation_request", time.time() - start) - - return result - async def get_donations_by_project( session: AsyncSession, diff --git a/backend/v2/allocations/router.py b/backend/v2/allocations/router.py index 8bde27b335..4ed213c38a 100644 --- a/backend/v2/allocations/router.py +++ b/backend/v2/allocations/router.py @@ -1,29 +1,28 @@ from fastapi import APIRouter -from v2.epochs.dependencies import GetEpochsContracts - -from .dependencies import GetAllocations +from .dependencies import GetAllocator from .schemas import UserAllocationRequest, UserAllocationRequestV1 -api = APIRouter(prefix="/allocations", tags=["allocations"]) +api = APIRouter(prefix="/allocations", tags=["Allocations"]) @api.post("/allocate", status_code=201) async def allocate( # Component dependencies - epochs_contracts: GetEpochsContracts, - allocations: GetAllocations, - # Arguments + allocator: GetAllocator, + # Request Parameters allocation_request: UserAllocationRequestV1, ) -> None: """ - Make an allocation for the user. + Request an allocation for the user. + Only available during the allocation window. """ import time - start = time.time() + + # TODO: We should ideally move to the newer version of the schema as it's simpler request = UserAllocationRequest( user_address=allocation_request.user_address, allocations=allocation_request.payload.allocations, @@ -32,14 +31,6 @@ async def allocate( is_manually_edited=allocation_request.is_manually_edited, ) - print("allocation_request", allocation_request) - current_epoch = await epochs_contracts.get_current_epoch() - print("current_epoch", current_epoch) - # get pending epoch - pending_epoch = await epochs_contracts.get_pending_epoch() - print("pending_epoch", pending_epoch) - - await allocations.make(pending_epoch, request) + await allocator.handle(request) - print("allocate took: ", time.time() - start) - + print("Allocation took: ", time.time() - start) diff --git a/backend/v2/allocations/schemas.py b/backend/v2/allocations/schemas.py index 9ed60b366f..8163a66b11 100644 --- a/backend/v2/allocations/schemas.py +++ b/backend/v2/allocations/schemas.py @@ -3,36 +3,40 @@ from pydantic import BaseModel, ConfigDict, Field from pydantic.alias_generators import to_camel +from v2.core.types import Address + + class AllocationWithUserUQScore(BaseModel): model_config = ConfigDict(frozen=True) - project_address: str + project_address: Address amount: int - user_address: str + user_address: Address user_uq_score: Decimal class AllocationRequest(BaseModel): model_config = ConfigDict(frozen=True, alias_generator=to_camel) - project_address: str = Field(..., alias='proposalAddress') + project_address: Address = Field(..., alias="proposalAddress") amount: int - # first_name: str = Field(..., alias='firstName') # last_name: str = Field(..., alias='lastName') # age: int = Field(..., alias='age') + class UserAllocationRequestPayloadV1(BaseModel): model_config = ConfigDict(frozen=True, alias_generator=to_camel) allocations: list[AllocationRequest] nonce: int + class UserAllocationRequestV1(BaseModel): model_config = ConfigDict(frozen=True, alias_generator=to_camel) - user_address: str + user_address: Address payload: UserAllocationRequestPayloadV1 signature: str is_manually_edited: bool @@ -41,7 +45,7 @@ class UserAllocationRequestV1(BaseModel): class UserAllocationRequest(BaseModel): model_config = ConfigDict(frozen=True) - user_address: str + user_address: Address allocations: list[AllocationRequest] nonce: int signature: str @@ -53,5 +57,5 @@ class ProjectDonation(BaseModel): model_config = ConfigDict(frozen=True) amount: int - donor_address: str # user address - project_address: str + donor_address: Address # user address + project_address: Address diff --git a/backend/v2/allocations/services.py b/backend/v2/allocations/services.py index 06cb27d23f..18bb798733 100644 --- a/backend/v2/allocations/services.py +++ b/backend/v2/allocations/services.py @@ -4,14 +4,12 @@ from app import exceptions from sqlalchemy.ext.asyncio import AsyncSession +from v2.matched_rewards.services import MatchedRewardsEstimator from v2.uniqueness_quotients.dependencies import UQScoreGetter from v2.project_rewards.capped_quadriatic import ( cqf_simulate_leverage, ) from v2.projects.contracts import ProjectsContracts -from v2.projects.services import ( - EstimatedProjectMatchedRewards, -) from v2.users.repositories import get_user_by_address from .validators import SignatureVerifier @@ -24,16 +22,18 @@ @dataclass -class Allocations: +class Allocator: session: AsyncSession signature_verifier: SignatureVerifier uq_score_getter: UQScoreGetter - projects: ProjectsContracts - estimated_project_matched_rewards: EstimatedProjectMatchedRewards + projects_contracts: ProjectsContracts + matched_rewards_estimator: MatchedRewardsEstimator + + epoch_number: int - async def make( + async def handle( self, - epoch_number: int, + # epoch_number: int, request: UserAllocationRequest, ) -> str: """ @@ -43,9 +43,9 @@ async def make( session=self.session, signature_verifier=self.signature_verifier, uq_score_getter=self.uq_score_getter, - projects=self.projects, - estimated_project_matched_rewards=self.estimated_project_matched_rewards, - epoch_number=epoch_number, + projects_contracts=self.projects_contracts, + matched_rewards_estimator=self.matched_rewards_estimator, + epoch_number=self.epoch_number, request=request, ) @@ -55,13 +55,12 @@ async def allocate( session: AsyncSession, signature_verifier: SignatureVerifier, uq_score_getter: UQScoreGetter, - projects: ProjectsContracts, - estimated_project_matched_rewards: EstimatedProjectMatchedRewards, - # Arguments + projects_contracts: ProjectsContracts, + matched_rewards_estimator: MatchedRewardsEstimator, epoch_number: int, + # Arguments request: UserAllocationRequest, ) -> str: - import time allocation_time = time.time() @@ -73,7 +72,6 @@ async def allocate( print("signature verified in", time.time() - allocation_time) - uq_score_time = time.time() # Get or calculate UQ score of the user @@ -84,7 +82,6 @@ async def allocate( print("uq score retrieved in", time.time() - uq_score_time) - new_allocations_time = time.time() # Calculate leverage by simulating the allocation new_allocations = [ @@ -98,11 +95,11 @@ async def allocate( ] leverage = await simulate_leverage( - session=session, - projects=projects, - estimated_project_matched_rewards=estimated_project_matched_rewards, - epoch_number=epoch_number, - new_allocations=new_allocations, + session, + projects_contracts, + matched_rewards_estimator, + epoch_number, + new_allocations, ) print("new allocations calculated in", time.time() - new_allocations_time) @@ -116,9 +113,7 @@ async def allocate( soft_delete_time = time.time() await soft_delete_user_allocations_by_epoch( - session, - user_address=request.user_address, - epoch_number=epoch_number, + session, request.user_address, epoch_number ) # Get user and update allocation nonce @@ -133,7 +128,7 @@ async def allocate( request.user_address, epoch_number, request, - leverage=leverage, + leverage, ) # Commit the transaction @@ -147,8 +142,8 @@ async def allocate( async def simulate_leverage( # Component dependencies session: AsyncSession, - projects: ProjectsContracts, - estimated_project_matched_rewards: EstimatedProjectMatchedRewards, + projects_contracts: ProjectsContracts, + matched_rewards_estimator: MatchedRewardsEstimator, # Arguments epoch_number: int, new_allocations: list[AllocationWithUserUQScore], @@ -160,18 +155,11 @@ async def simulate_leverage( start_time = time.time() all_projects, matched_rewards, existing_allocations = await asyncio.gather( - projects.get_project_addresses(epoch_number), - estimated_project_matched_rewards.get(epoch_number), + projects_contracts.get_project_addresses(epoch_number), + matched_rewards_estimator.get(), get_allocations_with_user_uqs(session, epoch_number), ) - # all_projects = await projects.get_project_addresses(epoch_number) - - # matched_rewards = await estimated_project_matched_rewards.get(epoch_number) - - # # Get all allocations before user's allocation - # existing_allocations = await get_allocations_with_user_uqs(session, epoch_number) - print("existing allocations retrieved in", time.time() - start_time) return cqf_simulate_leverage( diff --git a/backend/v2/allocations/socket.py b/backend/v2/allocations/socket.py index 4a1804992e..60bafe98bf 100644 --- a/backend/v2/allocations/socket.py +++ b/backend/v2/allocations/socket.py @@ -1,266 +1,327 @@ +import asyncio +from contextlib import asynccontextmanager import logging -from typing import Tuple +from typing import AsyncGenerator, Tuple import socketio from eth_utils import to_checksum_address +from v2.core.exceptions import AllocationWindowClosed +from v2.matched_rewards.dependencies import ( + get_matched_rewards_estimator, + get_matched_rewards_estimator_settings, +) +from v2.project_rewards.dependencies import get_project_rewards_estimator +from v2.project_rewards.services import ProjectRewardsEstimator from v2.allocations.dependencies import ( SignatureVerifierSettings, - get_allocations, + get_allocator, get_signature_verifier, + get_signature_verifier_settings, ) from v2.epochs.contracts import EpochsContracts from v2.projects.services import ( - EstimatedProjectRewards, ProjectsAllocationThresholdGetter, ) -from v2.uniqueness_quotients.dependencies import UQScoreSettings, get_uq_score_getter +from v2.uniqueness_quotients.dependencies import UQScoreSettings, get_uq_score_getter, get_uq_score_settings from v2.allocations.repositories import get_donations_by_project -from v2.allocations.services import Allocations +from v2.allocations.services import Allocator from v2.core.dependencies import ( DatabaseSettings, Web3ProviderSettings, + get_database_settings, get_db_session, + get_sessionmaker, get_w3, + get_web3_provider_settings, ) from v2.epochs.dependencies import ( EpochsSettings, EpochsSubgraphSettings, + assert_allocation_window_open, get_epochs_contracts, + get_epochs_settings, get_epochs_subgraph, + get_epochs_subgraph_settings, ) -from v2.projects.depdendencies import ( - EstimatedProjectMatchedRewardsSettings, +from v2.projects.dependencies import ( ProjectsAllocationThresholdSettings, ProjectsSettings, - get_estimated_project_matched_rewards, - get_estimated_project_rewards, + get_projects_allocation_threshold_settings, get_projects_contracts, + get_projects_allocation_threshold_getter, + get_projects_settings, ) -from v2.projects.depdendencies import get_projects_allocation_threshold_getter -from .schemas import AllocationRequest, UserAllocationRequest +from .schemas import AllocationRequest, UserAllocationRequest, UserAllocationRequestV1 from sqlalchemy.ext.asyncio import AsyncSession -class AllocateNamespace(socketio.AsyncNamespace): - def create_dependencies_on_connect( - self, - session: AsyncSession, - ) -> Tuple[ - ProjectsAllocationThresholdGetter, EstimatedProjectRewards, EpochsContracts - ]: - """ - Create and return all service dependencies. - TODO: how could we cache this one ? - """ - w3 = get_w3(Web3ProviderSettings()) # type: ignore - projects_contracts = get_projects_contracts(w3, ProjectsSettings()) - threshold_getter = get_projects_allocation_threshold_getter( - session, projects_contracts, ProjectsAllocationThresholdSettings() - ) - epochs_contracts = get_epochs_contracts(w3, EpochsSettings()) - epochs_subgraph = get_epochs_subgraph(EpochsSubgraphSettings()) - estimated_matched_rewards = get_estimated_project_matched_rewards( - session, epochs_subgraph, EstimatedProjectMatchedRewardsSettings() - ) - estimated_project_rewards = get_estimated_project_rewards( - session, - projects_contracts, - estimated_matched_rewards, - ) +@asynccontextmanager +async def create_dependencies_on_connect() -> AsyncGenerator[ + Tuple[AsyncSession, ProjectsAllocationThresholdGetter, ProjectRewardsEstimator], + None, +]: + """ + Create and return all service dependencies. + """ + w3 = get_w3(get_web3_provider_settings()) + epochs_contracts = get_epochs_contracts(w3, get_epochs_settings()) - return (threshold_getter, estimated_project_rewards, epochs_contracts) + # We do not handle requests outside of pending epoch state (Allocation Window) + # This will raise an exception if the allocation window is closed and connection does not happen + # epoch_number = await assert_allocation_window_open(epochs_contracts) + epoch_number = 128 - async def handle_on_connect( - self, - session: AsyncSession, - epochs_contracts: EpochsContracts, - projects_allocation_threshold_getter: ProjectsAllocationThresholdGetter, - estimated_project_rewards: EstimatedProjectRewards, - ): - """ - Handle client connection - """ + projects_contracts = get_projects_contracts(w3, get_projects_settings()) + epochs_subgraph = get_epochs_subgraph(get_epochs_subgraph_settings()) - logging.debug("Client connected") + # For safety, we create separate sessions for each dependency + # (to avoid any potential issues with session sharing in async task context) - pending_epoch_number = await epochs_contracts.get_pending_epoch() - if pending_epoch_number is None: - return + sessionmaker = get_sessionmaker(get_database_settings()) - # Get the allocation threshold and send it to the client - allocation_threshold = await projects_allocation_threshold_getter.get( - epoch_number=pending_epoch_number - ) - await self.emit("threshold", {"threshold": str(allocation_threshold)}) - - # Get the estimated project rewards and send them to the client - project_rewards = await estimated_project_rewards.get(pending_epoch_number) - rewards = [ - { - "address": project_address, - "allocated": str(project_rewards.amounts_by_project[project_address]), - "matched": str(project_rewards.matched_by_project[project_address]), - } - for project_address in project_rewards.amounts_by_project.keys() - ] - - await self.emit("project_rewards", rewards) - - for project_address in project_rewards.amounts_by_project.keys(): - donations = await get_donations_by_project( - session=session, - project_address=project_address, - epoch_number=pending_epoch_number, + async with ( + sessionmaker() as s1, + sessionmaker() as s2, + sessionmaker() as s3, + sessionmaker() as s4, + ): + try: + threshold_getter = get_projects_allocation_threshold_getter( + epoch_number, + s1, + projects_contracts, + get_projects_allocation_threshold_settings(), ) - - await self.emit( - "project_donors", - {"project": project_address, "donors": donations}, + estimated_matched_rewards = await get_matched_rewards_estimator( + epoch_number, + s2, + epochs_subgraph, + get_matched_rewards_estimator_settings(), + ) + estimated_project_rewards = await get_project_rewards_estimator( + epoch_number, + s3, + projects_contracts, + estimated_matched_rewards, ) + # Yield the dependencies to the on_connect handler + yield (s4, threshold_getter, estimated_project_rewards) - async def on_connect(self, sid: str, environ: dict): - async with get_db_session(DatabaseSettings()) as session: - ( - projects_allocation_threshold_getter, - estimated_project_rewards, - epochs_contracts, - ) = self.create_dependencies_on_connect(session) - - await self.handle_on_connect( - session, - epochs_contracts, - projects_allocation_threshold_getter, - estimated_project_rewards, + except Exception as e: + await asyncio.gather( + s1.rollback(), + s2.rollback(), + s3.rollback(), + s4.rollback(), + ) + raise + finally: + await asyncio.gather( + s1.close(), + s2.close(), + s3.close(), + s4.close(), ) - async def on_disconnect(self, sid): - logging.debug("Client disconnected") - def create_dependencies_on_allocate( - self, - session: AsyncSession, - ) -> Tuple[ - Allocations, +@asynccontextmanager +async def create_dependencies_on_allocate() -> AsyncGenerator[ + Tuple[ + AsyncSession, + Allocator, EpochsContracts, ProjectsAllocationThresholdGetter, - EstimatedProjectRewards, - ]: - """ - Create and return all service dependencies. - """ - - w3 = get_w3(Web3ProviderSettings()) - epochs_contracts = get_epochs_contracts(w3, EpochsSettings()) - projects_contracts = get_projects_contracts(w3, ProjectsSettings()) - epochs_subgraph = get_epochs_subgraph(EpochsSubgraphSettings()) + ProjectRewardsEstimator, + ], + None, +]: + """ + Create and return all service dependencies. + """ + + w3 = get_w3(get_web3_provider_settings()) + epochs_contracts = get_epochs_contracts(w3, get_epochs_settings()) + + # We do not handle requests outside of pending epoch state (Allocation Window) + # This will raise an exception if the allocation window is closed and connection does not happen + epoch_number = await assert_allocation_window_open(epochs_contracts) + + projects_contracts = get_projects_contracts(w3, get_projects_settings()) + epochs_subgraph = get_epochs_subgraph(get_epochs_subgraph_settings()) + + # For safety, we create separate sessions for each dependency + # (to avoid any potential issues with session sharing in async task context) + sessionmaker = get_sessionmaker(get_database_settings()) + + async with ( + sessionmaker() as s1, + sessionmaker() as s2, + sessionmaker() as s3, + sessionmaker() as s4, + sessionmaker() as s5, + ): threshold_getter = get_projects_allocation_threshold_getter( - session, projects_contracts, ProjectsAllocationThresholdSettings() + epoch_number, + s1, + projects_contracts, + get_projects_allocation_threshold_settings(), ) - estimated_matched_rewards = get_estimated_project_matched_rewards( - session, epochs_subgraph, EstimatedProjectMatchedRewardsSettings() + estimated_matched_rewards = await get_matched_rewards_estimator( + epoch_number, s2, epochs_subgraph, get_matched_rewards_estimator_settings() ) - estimated_project_rewards = get_estimated_project_rewards( - session, + estimated_project_rewards = await get_project_rewards_estimator( + epoch_number, + s3, projects_contracts, estimated_matched_rewards, ) signature_verifier = get_signature_verifier( - session, epochs_subgraph, projects_contracts, SignatureVerifierSettings() + s4, epochs_subgraph, projects_contracts, get_signature_verifier_settings() ) - uq_score_getter = get_uq_score_getter(session, UQScoreSettings()) + uq_score_getter = get_uq_score_getter(s5, get_uq_score_settings()) - allocations = get_allocations( - session, + allocations = await get_allocator( + epoch_number, + s5, signature_verifier, uq_score_getter, projects_contracts, estimated_matched_rewards, ) - return ( + # Yield the dependencies to the on_allocate handler + yield ( + s5, allocations, epochs_contracts, threshold_getter, estimated_project_rewards, ) - async def handle_on_allocate( - self, - session: AsyncSession, - epochs_contracts: EpochsContracts, - allocations: Allocations, - threshold_getter: ProjectsAllocationThresholdGetter, - estimated_project_rewards: EstimatedProjectRewards, - data: dict, - ): - """ - Handle allocation request - """ - - # We do not handle requests outside of pending epoch state (Allocation Window) - pending_epoch_number = await epochs_contracts.get_pending_epoch() - if pending_epoch_number is None: - return - - pending_epoch_number = 1 - request = from_dict(data) - - await allocations.make(pending_epoch_number, request) - - logging.debug("Allocation request handled") - - threshold = await threshold_getter.get(pending_epoch_number) - await self.emit("threshold", {"threshold": str(threshold)}) - - project_rewards = await estimated_project_rewards.get(pending_epoch_number) - rewards = [ - { - "address": project_address, - "allocated": str(project_rewards.amounts_by_project[project_address]), - "matched": str(project_rewards.matched_by_project[project_address]), - } - for project_address in project_rewards.amounts_by_project.keys() - ] - - await self.emit("project_rewards", rewards) - - for project_address in project_rewards.amounts_by_project.keys(): - donations = await get_donations_by_project( - session=session, - project_address=project_address, - epoch_number=pending_epoch_number, + +class AllocateNamespace(socketio.AsyncNamespace): + async def handle_on_connect(self, sid: str, environ: dict): + async with create_dependencies_on_connect() as ( + session, + threshold_getter, + estimated_project_rewards, + ): + logging.debug("Client connected") + + # Get the allocation threshold and send it to the client + allocation_threshold = await threshold_getter.get() + + await self.emit( + "threshold", {"threshold": str(allocation_threshold)}, to=sid ) + # Get the estimated project rewards and send them to the client + project_rewards = await estimated_project_rewards.get() + # rewards = [ + # { + # "address": project_address, + # "allocated": str(project_rewards.amounts_by_project[project_address]), + # "matched": str(project_rewards.matched_by_project[project_address]), + # } + # for project_address in project_rewards.amounts_by_project.keys() + # ] + await self.emit( - "project_donors", - {"project": project_address, "donors": donations}, + "project_rewards", + [p.model_dump() for p in project_rewards.project_fundings.values()], + to=sid, ) - async def on_allocate(self, sid: str, data: dict): - async with get_db_session(DatabaseSettings()) as session: - ( - allocations, - epochs_contracts, - threshold_getter, - estimated_project_rewards, - ) = self.create_dependencies_on_allocate(session) - - await self.handle_on_allocate( - session, - epochs_contracts, - allocations, - threshold_getter, - estimated_project_rewards, - data, + for project_address in project_rewards.project_fundings: + donations = await get_donations_by_project( + session=session, + project_address=project_address, + epoch_number=estimated_project_rewards.epoch_number, + ) + + await self.emit( + "project_donors", + {"project": project_address, "donors": donations}, + ) + + async def on_connect(self, sid: str, environ: dict): + try: + await self.handle_on_connect(sid, environ) + except AllocationWindowClosed: + logging.info("Allocation window is closed, connection not established") + except Exception as e: + logging.error(f"Error handling on_connect: {e}") + + async def on_disconnect(self, sid): + logging.debug("Client disconnected") + + async def handle_on_allocate(self, sid: str, data: dict): + async with create_dependencies_on_allocate() as ( + session, + allocations, + epochs_contracts, + threshold_getter, + estimated_project_rewards, + ): + request = from_dict(data) + + await allocations.handle(request) + + logging.debug("Allocation request handled") + + # Get the allocation threshold and send it to the client + allocation_threshold = await threshold_getter.get() + + await self.emit( + "threshold", {"threshold": str(allocation_threshold)}, to=sid + ) + + # Get the estimated project rewards and send them to the client + project_rewards = await estimated_project_rewards.get() + # rewards = [ + # { + # "address": project_address, + # "allocated": str(project_rewards.amounts_by_project[project_address]), + # "matched": str(project_rewards.matched_by_project[project_address]), + # } + # for project_address in project_rewards.amounts_by_project.keys() + # ] + + await self.emit( + "project_rewards", + [p.model_dump() for p in project_rewards.project_fundings.values()], + to=sid, ) + for project_address in project_rewards.project_fundings: + donations = await get_donations_by_project( + session=session, + project_address=project_address, + epoch_number=estimated_project_rewards.epoch_number, + ) + + await self.emit( + "project_donors", + {"project": project_address, "donors": donations}, + ) + + async def on_allocate(self, sid: str, data: dict): + try: + await self.handle_on_allocate(sid, data) + + except AllocationWindowClosed: + logging.info("Allocation window is closed, allocation not processed") + + except Exception as e: + logging.error(f"Error handling on_allocate: {e}") + def from_dict(data: dict) -> UserAllocationRequest: """ @@ -284,6 +345,18 @@ def from_dict(data: dict) -> UserAllocationRequest: "isManuallyEdited": False } """ + + # parse the incoming data as UserAllocationRequestV1 + requestV1 = UserAllocationRequestV1.model_validate(data) + request = UserAllocationRequest( + user_address=requestV1.user_address, + allocations=requestV1.payload.allocations, + nonce=requestV1.payload.nonce, + signature=requestV1.signature, + is_manually_edited=requestV1.is_manually_edited, + ) + return request + user_address = to_checksum_address(data["userAddress"]) payload = data["payload"] allocations = [ diff --git a/backend/v2/allocations/validators.py b/backend/v2/allocations/validators.py index 28d7c2f253..7a066cb222 100644 --- a/backend/v2/allocations/validators.py +++ b/backend/v2/allocations/validators.py @@ -3,6 +3,7 @@ from web3 import AsyncWeb3 from app import exceptions from app.modules.common.crypto.signature import EncodingStandardFor, encode_for_signing +from v2.core.types import Address from .schemas import UserAllocationRequest from .repositories import get_last_allocation_request_nonce from v2.crypto.signatures import verify_signed_message @@ -26,8 +27,9 @@ class SignatureVerifier: async def verify(self, epoch_number: int, request: UserAllocationRequest) -> None: import time + start = time.time() - + await asyncio.gather( verify_logic( session=self.session, @@ -68,19 +70,22 @@ async def verify_logic( print("already here") async def _check_database(): - await _provided_nonce_matches_expected(session, payload.user_address, payload.nonce) - await _user_is_not_patron(session, epoch_subgraph, payload.user_address, epoch_number) + await _provided_nonce_matches_expected( + session, payload.user_address, payload.nonce + ) + await _user_is_not_patron( + session, epoch_subgraph, payload.user_address, epoch_number + ) await _user_has_budget(session, payload, epoch_number) - + await asyncio.gather( _check_database(), - _provided_projects_are_correct(projects_contracts, epoch_number, payload) + _provided_projects_are_correct(projects_contracts, epoch_number, payload), ) - # try: # async with asyncio.TaskGroup() as tg: - + # tg.create_task(_provided_nonce_matches_expected(session, payload.user_address, payload.nonce)) # tg.create_task(_user_is_not_patron(session, epoch_subgraph, payload.user_address, epoch_number)) # tg.create_task(_provided_projects_are_correct(projects_contracts, epoch_number, payload)) @@ -89,7 +94,6 @@ async def _check_database(): # print("e", e) # raise e - # summary = asyncio.gather( # _provided_nonce_matches_expected(session, payload.user_address, payload.nonce), # _user_is_not_patron( @@ -110,11 +114,12 @@ async def _check_database(): print("hehehehehe") + async def _provided_nonce_matches_expected( # Component dependencies session: AsyncSession, # Arguments - user_address: str, + user_address: Address, nonce: int, ) -> None: """ @@ -133,7 +138,7 @@ async def _user_is_not_patron( session: AsyncSession, epoch_subgraph: EpochsSubgraph, # Arguments - user_address: str, + user_address: Address, epoch_number: int, ) -> None: """ @@ -150,11 +155,12 @@ async def _user_is_not_patron( if is_patron: raise exceptions.NotAllowedInPatronMode(user_address) + async def get_next_user_nonce( # Component dependencies session: AsyncSession, # Arguments - user_address: str, + user_address: Address, ) -> int: """ Get the next expected nonce for the user. @@ -187,6 +193,7 @@ async def _provided_projects_are_correct( """ import time + start = time.time() # Check if the user is not a project all_projects = await projects_contracts.get_project_addresses(epoch_number) @@ -234,7 +241,7 @@ async def _user_has_budget( async def verify_signature( - w3: AsyncWeb3, chain_id: int, user_address: str, payload: UserAllocationRequest + w3: AsyncWeb3, chain_id: int, user_address: Address, payload: UserAllocationRequest ) -> None: eip712_encoded = build_allocations_eip712_structure(chain_id, payload) encoded_msg = encode_for_signing(EncodingStandardFor.DATA, eip712_encoded) diff --git a/backend/v2/core/dependencies.py b/backend/v2/core/dependencies.py index e615b4129f..acf6a8ec80 100644 --- a/backend/v2/core/dependencies.py +++ b/backend/v2/core/dependencies.py @@ -1,5 +1,4 @@ from asyncio import current_task -from contextlib import asynccontextmanager from functools import lru_cache from typing import Annotated, AsyncGenerator @@ -8,13 +7,18 @@ from pydantic import Field from pydantic_settings import BaseSettings, SettingsConfigDict -from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine, async_scoped_session +from sqlalchemy.ext.asyncio import ( + AsyncSession, + async_sessionmaker, + create_async_engine, + async_scoped_session, +) from web3 import AsyncHTTPProvider, AsyncWeb3 from web3.middleware import async_geth_poa_middleware class OctantSettings(BaseSettings): - model_config = SettingsConfigDict(env_file='.env', extra='ignore', frozen=True) + model_config = SettingsConfigDict(env_file=".env", extra="ignore", frozen=True) class Web3ProviderSettings(OctantSettings): @@ -22,7 +26,8 @@ class Web3ProviderSettings(OctantSettings): def get_web3_provider_settings() -> Web3ProviderSettings: - return Web3ProviderSettings() + return Web3ProviderSettings() # type: ignore[call-arg] + def get_w3( settings: Annotated[Web3ProviderSettings, Depends(get_web3_provider_settings)] @@ -47,7 +52,7 @@ def sqlalchemy_database_uri(self) -> str: def get_database_settings() -> DatabaseSettings: - return DatabaseSettings() + return DatabaseSettings() # type: ignore[call-arg] async def create_tables(): @@ -63,13 +68,13 @@ def get_sessionmaker( ) -> async_sessionmaker[AsyncSession]: engine = create_async_engine( settings.sqlalchemy_database_uri, - echo=False, # Disable SQL query logging (for performance) - pool_size=100, # Initial pool size (default is 5) - max_overflow=10, # Extra connections if pool is exhausted - pool_timeout=30, # Timeout before giving up on a connection - pool_recycle=3600, # Recycle connections after 1 hour (for long-lived connections) - pool_pre_ping=True, # Check if the connection is alive before using it - future=True, # Use the future-facing SQLAlchemy 2.0 style + echo=False, # Disable SQL query logging (for performance) + pool_size=100, # Initial pool size (default is 5) + max_overflow=10, # Extra connections if pool is exhausted + pool_timeout=30, # Timeout before giving up on a connection + pool_recycle=3600, # Recycle connections after 1 hour (for long-lived connections) + pool_pre_ping=True, # Check if the connection is alive before using it + future=True, # Use the future-facing SQLAlchemy 2.0 style # connect_args={"options": "-c timezone=utc"} # Ensures timezone is UTC ) @@ -77,9 +82,8 @@ def get_sessionmaker( autocommit=False, autoflush=False, bind=engine, class_=AsyncSession ) - scoped_session = async_scoped_session(sessionmaker, scopefunc=current_task) + return sessionmaker - return scoped_session # @asynccontextmanager async def get_db_session( @@ -108,8 +112,13 @@ async def get_db_session( # logging.error("Opening session", async_session) + # scoped_session = async_scoped_session(sessionmaker, scopefunc=current_task) + # Create a new session async with sessionmaker() as session: + print("in gettersession id", id(session)) + print("in gettersession identity", session) + try: yield session await session.commit() @@ -122,4 +131,4 @@ async def get_db_session( await session.close() -GetSession = Annotated[AsyncSession, Depends(get_db_session)] +GetSession = Annotated[AsyncSession, Depends(get_db_session, use_cache=False)] diff --git a/backend/v2/core/exceptions.py b/backend/v2/core/exceptions.py new file mode 100644 index 0000000000..cf58d0c51a --- /dev/null +++ b/backend/v2/core/exceptions.py @@ -0,0 +1,9 @@ +from app.exceptions import OctantException + + +class AllocationWindowClosed(OctantException): + code = 403 # Forbidden + description = "This action is available only during the allocation window." + + def __init__(self): + super().__init__(self.description, self.code) diff --git a/backend/v2/core/types.py b/backend/v2/core/types.py new file mode 100644 index 0000000000..76061ea9a5 --- /dev/null +++ b/backend/v2/core/types.py @@ -0,0 +1,7 @@ +from typing import Annotated +from eth_utils import to_checksum_address +from pydantic.functional_validators import AfterValidator + + +# Address is a checksummed Ethereum address. +Address = Annotated[str, AfterValidator(to_checksum_address)] diff --git a/backend/v2/crypto/signatures.py b/backend/v2/crypto/signatures.py index 85409203d2..372cb22390 100644 --- a/backend/v2/crypto/signatures.py +++ b/backend/v2/crypto/signatures.py @@ -2,6 +2,7 @@ from eth_account.messages import SignableMessage, _hash_eip191_message from eth_keys.exceptions import BadSignature from eth_utils import to_checksum_address +from v2.core.types import Address from v2.crypto.contracts import GNOSIS_SAFE, GnosisSafeContracts from web3 import AsyncWeb3 from web3.exceptions import ContractLogicError @@ -9,7 +10,7 @@ async def verify_signed_message( w3: AsyncWeb3, - user_address: str, + user_address: Address, encoded_msg: SignableMessage, signature: str, ) -> bool: @@ -43,18 +44,18 @@ def hash_signable_message(encoded_msg: SignableMessage) -> str: async def _verify_multisig( - w3: AsyncWeb3, user_address: str, encoded_msg: SignableMessage, signature: str + w3: AsyncWeb3, user_address: Address, encoded_msg: SignableMessage, signature: str ) -> bool: msg_hash = hash_signable_message(encoded_msg) try: - gnosis_safe = GnosisSafeContracts(w3=w3, abi=GNOSIS_SAFE, address=user_address) + gnosis_safe = GnosisSafeContracts(w3=w3, abi=GNOSIS_SAFE, address=user_address) # type: ignore[arg-type] return await gnosis_safe.is_valid_signature(msg_hash, signature) except ContractLogicError: return False def _verify_eoa( - user_address: str, encoded_msg: SignableMessage, signature: str + user_address: Address, encoded_msg: SignableMessage, signature: str ) -> bool: try: recovered_address = Account.recover_message(encoded_msg, signature=signature) diff --git a/backend/v2/deposits/contracts.py b/backend/v2/deposits/contracts.py index df4b6025b4..559b9b4bb8 100644 --- a/backend/v2/deposits/contracts.py +++ b/backend/v2/deposits/contracts.py @@ -1,28 +1,25 @@ - - - from v2.core.contracts import SmartContract from typing import Protocol + class AddressKey(Protocol): address: str key: str class DepositsContracts(SmartContract): - - def lock(self, account: AddressKey, amount: int): - nonce = self.w3.eth.get_transaction_count(account.address) - transaction = self.contract.functions.lock(amount).build_transaction( + async def lock(self, account: AddressKey, amount: int): + nonce = await self.w3.eth.get_transaction_count(account.address) + transaction = await self.contract.functions.lock(amount).build_transaction( {"from": account.address, "nonce": nonce} ) signed_tx = self.w3.eth.account.sign_transaction(transaction, account.key) return self.w3.eth.send_raw_transaction(signed_tx.rawTransaction) - def balance_of(self, owner_address: str) -> int: - return self.contract.functions.deposits(owner_address).call() + async def balance_of(self, owner_address: str) -> int: + return await self.contract.functions.deposits(owner_address).call() DEPOSITS_ABI = [ diff --git a/backend/v2/deposits/dependencies.py b/backend/v2/deposits/dependencies.py index 095561ede1..927c22876a 100644 --- a/backend/v2/deposits/dependencies.py +++ b/backend/v2/deposits/dependencies.py @@ -1,9 +1,5 @@ - - - from typing import Annotated from fastapi import Depends -from pydantic_settings import BaseSettings from v2.core.dependencies import OctantSettings, Web3 @@ -16,10 +12,10 @@ class DepositsSettings(OctantSettings): def get_deposits_settings() -> DepositsSettings: - return DepositsSettings() + return DepositsSettings() # type: ignore[call-arg] def get_deposits_contracts( w3: Web3, settings: Annotated[DepositsSettings, Depends(get_deposits_settings)] ) -> DepositsContracts: - return DepositsContracts(w3, DEPOSITS_ABI, settings.deposits_contract_address) + return DepositsContracts(w3, DEPOSITS_ABI, settings.deposits_contract_address) # type: ignore[arg-type] diff --git a/backend/v2/epochs/dependencies.py b/backend/v2/epochs/dependencies.py index 47f9dda7ee..6cb7bb3c2d 100644 --- a/backend/v2/epochs/dependencies.py +++ b/backend/v2/epochs/dependencies.py @@ -1,25 +1,25 @@ from typing import Annotated from fastapi import Depends +from v2.core.exceptions import AllocationWindowClosed from v2.core.dependencies import OctantSettings, Web3 from .contracts import EPOCHS_ABI, EpochsContracts from .subgraphs import EpochsSubgraph - class EpochsSettings(OctantSettings): epochs_contract_address: str def get_epochs_settings() -> EpochsSettings: - return EpochsSettings() + return EpochsSettings() # type: ignore[call-arg] def get_epochs_contracts( w3: Web3, settings: Annotated[EpochsSettings, Depends(get_epochs_settings)] ) -> EpochsContracts: - return EpochsContracts(w3, EPOCHS_ABI, settings.epochs_contract_address) + return EpochsContracts(w3, EPOCHS_ABI, settings.epochs_contract_address) # type: ignore[arg-type] GetEpochsContracts = Annotated[ @@ -28,12 +28,36 @@ def get_epochs_contracts( ] +async def assert_allocation_window_open( + epochs_contracts: GetEpochsContracts, +) -> int: + import time + + # print("assert_allocation_window_open called") + + start = time.time() + + epoch_number = await epochs_contracts.get_pending_epoch() + + print("assert_allocation_window_open took", time.time() - start, "seconds") + if epoch_number is None: + raise AllocationWindowClosed() + + return epoch_number + + +AssertAllocationWindowOpen = Annotated[ + int, + Depends(assert_allocation_window_open), +] + + class EpochsSubgraphSettings(OctantSettings): subgraph_endpoint: str def get_epochs_subgraph_settings() -> EpochsSubgraphSettings: - return EpochsSubgraphSettings() + return EpochsSubgraphSettings() # type: ignore[call-arg] def get_epochs_subgraph( @@ -45,4 +69,4 @@ def get_epochs_subgraph( GetEpochsSubgraph = Annotated[ EpochsSubgraph, Depends(get_epochs_subgraph), -] \ No newline at end of file +] diff --git a/backend/v2/glms/contracts.py b/backend/v2/glms/contracts.py index 4e1dc32681..fb0cfdaeb8 100644 --- a/backend/v2/glms/contracts.py +++ b/backend/v2/glms/contracts.py @@ -1,19 +1,15 @@ - - - from v2.core.contracts import SmartContract from typing import Protocol + class AddressKey(Protocol): address: str key: str class GLMContracts(SmartContract): - - # def glm_fund(self, to_address, nonce): # transaction = self.contract.functions.transfer( # to_address, app.config["GLM_WITHDRAWAL_AMOUNT"] @@ -26,12 +22,8 @@ class GLMContracts(SmartContract): # def transfer(self, sender, receiver: str, amount: int): # async def transfer(self, sender_address: str, receiver: str, amount: int): async def transfer( - self, - sender: AddressKey, - receiver_address: str, - amount: int + self, sender: AddressKey, receiver_address: str, amount: int ) -> None: - nonce = await self.w3.eth.get_transaction_count(sender) transaction = self.contract.functions.transfer( receiver_address, amount @@ -44,8 +36,8 @@ async def approve(self, owner: AddressKey, benefactor_address, wad: int): print("owner address: ", owner.address) print("owner key: ", owner.key) print("benefactor of lock: ", benefactor_address) - nonce = self.w3.eth.get_transaction_count(owner.address) - transaction = self.contract.functions.approve( + nonce = await self.w3.eth.get_transaction_count(owner.address) + transaction = await self.contract.functions.approve( benefactor_address, wad ).build_transaction({"from": owner.address, "nonce": nonce}) signed_tx = self.w3.eth.account.sign_transaction(transaction, owner.key) diff --git a/backend/v2/glms/dependencies.py b/backend/v2/glms/dependencies.py index 4c7ca272d8..3b7bab9482 100644 --- a/backend/v2/glms/dependencies.py +++ b/backend/v2/glms/dependencies.py @@ -1,9 +1,5 @@ - - - from typing import Annotated from fastapi import Depends -from pydantic_settings import BaseSettings from v2.core.dependencies import OctantSettings, Web3 @@ -16,10 +12,10 @@ class GLMSettings(OctantSettings): def get_glm_settings() -> GLMSettings: - return GLMSettings() + return GLMSettings() # type: ignore[call-arg] def get_glm_contracts( w3: Web3, settings: Annotated[GLMSettings, Depends(get_glm_settings)] ) -> GLMContracts: - return GLMContracts(w3, ERC20_ABI, settings.glm_contract_address) + return GLMContracts(w3, ERC20_ABI, settings.glm_contract_address) # type: ignore[arg-type] diff --git a/backend/v2/main.py b/backend/v2/main.py index a816b84119..20bb9d9b9e 100644 --- a/backend/v2/main.py +++ b/backend/v2/main.py @@ -8,6 +8,7 @@ from sqlalchemy.exc import SQLAlchemyError from v2.allocations.router import api as allocations_api +from v2.project_rewards.router import api as project_rewards_api fastapi_app = FastAPI() @@ -19,6 +20,7 @@ async def handle_octant_exception(request, ex: OctantException): content={"message": ex.message}, ) + @fastapi_app.exception_handler(SQLAlchemyError) async def handle_sqlalchemy_exception(request, ex: SQLAlchemyError): logging.error(f"SQLAlchemyError: {ex}") @@ -41,5 +43,6 @@ async def fastapi_endpoint(): fastapi_app.add_websocket_route("/socket.io/", sio_asgi_app) fastapi_app.include_router(allocations_api) +fastapi_app.include_router(project_rewards_api) # from v2.core.dependencies import create_tables # fastapi_app.add_event_handler("startup", create_tables) diff --git a/backend/v2/matched_rewards/__init__.py b/backend/v2/matched_rewards/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/v2/matched_rewards/dependencies.py b/backend/v2/matched_rewards/dependencies.py new file mode 100644 index 0000000000..992b4794e6 --- /dev/null +++ b/backend/v2/matched_rewards/dependencies.py @@ -0,0 +1,58 @@ +from decimal import Decimal +from typing import Annotated +from fastapi import Depends +from pydantic import Field +from v2.core.exceptions import AllocationWindowClosed +from v2.epochs.dependencies import ( + AssertAllocationWindowOpen, + GetEpochsContracts, + get_epochs_subgraph, +) +from v2.epochs.subgraphs import EpochsSubgraph +from v2.core.dependencies import GetSession, OctantSettings + +from .services import MatchedRewardsEstimator + + +class MatchedRewardsEstimatorSettings(OctantSettings): + TR_PERCENT: Decimal = Field( + default=Decimal("0.7"), description="The percentage of the TR rewards." + ) + IRE_PERCENT: Decimal = Field( + default=Decimal("0.35"), description="The percentage of the IRE rewards." + ) + MATCHED_REWARDS_PERCENT: Decimal = Field( + default=Decimal("0.35"), description="The percentage of the matched rewards." + ) + + +def get_matched_rewards_estimator_settings() -> MatchedRewardsEstimatorSettings: + return MatchedRewardsEstimatorSettings() + + +async def get_matched_rewards_estimator( + epoch_number: AssertAllocationWindowOpen, + session: GetSession, + epochs_subgraph: Annotated[EpochsSubgraph, Depends(get_epochs_subgraph)], + settings: Annotated[ + MatchedRewardsEstimatorSettings, + Depends(get_matched_rewards_estimator_settings), + ], +) -> MatchedRewardsEstimator: + print("session id", id(session)) + print("session identity", session) + + return MatchedRewardsEstimator( + session=session, + epochs_subgraph=epochs_subgraph, + tr_percent=settings.TR_PERCENT, + ire_percent=settings.IRE_PERCENT, + matched_rewards_percent=settings.MATCHED_REWARDS_PERCENT, + epoch_number=epoch_number, + ) + + +GetMatchedRewardsEstimator = Annotated[ + MatchedRewardsEstimator, + Depends(get_matched_rewards_estimator), +] diff --git a/backend/v2/matched_rewards/services.py b/backend/v2/matched_rewards/services.py new file mode 100644 index 0000000000..b6b38c0662 --- /dev/null +++ b/backend/v2/matched_rewards/services.py @@ -0,0 +1,84 @@ +from dataclasses import dataclass +from decimal import Decimal + +from sqlalchemy.ext.asyncio import AsyncSession +from v2.epoch_snapshots.repositories import get_pending_epoch_snapshot +from v2.epochs.subgraphs import EpochsSubgraph + +from v2.user_patron_mode.repositories import get_patrons_rewards + + +@dataclass +class MatchedRewardsEstimator: + # Dependencies + session: AsyncSession + epochs_subgraph: EpochsSubgraph + # Parameters + tr_percent: Decimal + ire_percent: Decimal + matched_rewards_percent: Decimal + epoch_number: int + + async def get(self) -> int: + return await get_estimated_project_matched_rewards_pending( + session=self.session, + epochs_subgraph=self.epochs_subgraph, + tr_percent=self.tr_percent, + ire_percent=self.ire_percent, + matched_rewards_percent=self.matched_rewards_percent, + epoch_number=self.epoch_number, + ) + + +async def get_estimated_project_matched_rewards_pending( + # Dependencies + session: AsyncSession, + epochs_subgraph: EpochsSubgraph, + # Settings + tr_percent: Decimal, + ire_percent: Decimal, + matched_rewards_percent: Decimal, + # Arguments + epoch_number: int, +) -> int: + """ + Get the estimated matched rewards for the pending epoch. + """ + + pending_snapshot = await get_pending_epoch_snapshot(session, epoch_number) + if pending_snapshot is None: + raise ValueError(f"No pending snapshot for epoch {epoch_number}") + + epoch_details = await epochs_subgraph.get_epoch_by_number(epoch_number) + patrons_rewards = await get_patrons_rewards( + session, epoch_details.finalized_timestamp.datetime(), epoch_number + ) + + return _calculate_percentage_matched_rewards( + locked_ratio=Decimal(pending_snapshot.locked_ratio), + tr_percent=tr_percent, + ire_percent=ire_percent, + staking_proceeds=int(pending_snapshot.eth_proceeds), + patrons_rewards=patrons_rewards, + matched_rewards_percent=matched_rewards_percent, + ) + + +def _calculate_percentage_matched_rewards( + locked_ratio: Decimal, + tr_percent: Decimal, + ire_percent: Decimal, + staking_proceeds: int, + patrons_rewards: int, + matched_rewards_percent: Decimal, # Config +) -> int: + if locked_ratio > tr_percent: + raise ValueError("Invalid Strategy - locked_ratio > tr_percent") + + if locked_ratio < ire_percent: + return int(matched_rewards_percent * staking_proceeds + patrons_rewards) + + if ire_percent <= locked_ratio < tr_percent: + return int((tr_percent - locked_ratio) * staking_proceeds + patrons_rewards) + + return patrons_rewards diff --git a/backend/v2/project_rewards/capped_quadriatic.py b/backend/v2/project_rewards/capped_quadriatic.py index eed699bf41..b19e907309 100644 --- a/backend/v2/project_rewards/capped_quadriatic.py +++ b/backend/v2/project_rewards/capped_quadriatic.py @@ -3,14 +3,14 @@ from math import sqrt from typing import Dict, NamedTuple +from v2.core.types import Address from v2.allocations.schemas import AllocationWithUserUQScore +from .schemas import ProjectFundingSummary + class CappedQuadriaticFunding(NamedTuple): - amounts_by_project: Dict[ - str, Decimal - ] # Sum of all allocation amounts for each project - matched_by_project: Dict[str, Decimal] # Sum of matched rewards for each project + project_fundings: dict[Address, ProjectFundingSummary] amounts_total: Decimal # Sum of all allocation amounts for all projects matched_total: Decimal # Sum of all matched rewards for all projects @@ -96,10 +96,18 @@ def capped_quadriatic_funding( matched_by_project[project_address] = matched_capped matched_total += matched_capped + project_fundings = { + project_address: ProjectFundingSummary( + address=project_address, + allocated=int(amount_by_project[project_address]), + matched=int(matched_by_project[project_address]), + ) + for project_address in project_addresses + } + return CappedQuadriaticFunding( - amounts_by_project=amount_by_project, - matched_by_project=matched_by_project, - amounts_total=total_qf, + project_fundings=project_fundings, + amounts_total=amounts_total, matched_total=matched_total, ) @@ -113,9 +121,9 @@ def cqf_calculate_total_leverage(matched_rewards: int, total_allocated: int) -> def cqf_calculate_individual_leverage( new_allocations_amount: int, - project_addresses: list[str], - before_allocation_matched: Dict[str, Decimal], - after_allocation_matched: Dict[str, Decimal], + project_addresses: list[Address], + before_allocation: CappedQuadriaticFunding, + after_allocation: CappedQuadriaticFunding, ) -> float: """Calculate the leverage of a user's new allocations in capped quadratic funding. @@ -127,8 +135,14 @@ def cqf_calculate_individual_leverage( total_difference = Decimal(0) for project_address in project_addresses: - before = before_allocation_matched.get(project_address, 0) - after = after_allocation_matched[project_address] + if project_address in before_allocation.project_fundings: + before = Decimal(before_allocation.project_fundings[project_address].matched) + else: + before = Decimal(0) + + # before = before_allocation_matched.get(project_address, 0) + after = after_allocation.project_fundings[project_address].matched + # after = after_allocation_matched[project_address] difference = abs(before - after) total_difference += difference @@ -176,8 +190,8 @@ def cqf_simulate_leverage( leverage = cqf_calculate_individual_leverage( new_allocations_amount=sum(a.amount for a in new_allocations), project_addresses=[a.project_address for a in new_allocations], - before_allocation_matched=before_allocation.matched_by_project, - after_allocation_matched=after_allocation.matched_by_project, + before_allocation=before_allocation, + after_allocation=after_allocation, ) return leverage diff --git a/backend/v2/project_rewards/dependencies.py b/backend/v2/project_rewards/dependencies.py new file mode 100644 index 0000000000..a729a2d5f5 --- /dev/null +++ b/backend/v2/project_rewards/dependencies.py @@ -0,0 +1,32 @@ +from typing import Annotated +from fastapi import Depends +from v2.core.exceptions import AllocationWindowClosed +from v2.epochs.dependencies import AssertAllocationWindowOpen, GetEpochsContracts +from v2.matched_rewards.dependencies import GetMatchedRewardsEstimator +from v2.projects.dependencies import GetProjectsContracts +from v2.core.dependencies import GetSession + +from .services import ProjectRewardsEstimator + + +async def get_project_rewards_estimator( + epoch_number: AssertAllocationWindowOpen, + session: GetSession, + projects_contracts: GetProjectsContracts, + estimated_project_matched_rewards: GetMatchedRewardsEstimator, +) -> ProjectRewardsEstimator: + print("session id", id(session)) + print("session identity", session) + + return ProjectRewardsEstimator( + session=session, + projects_contracts=projects_contracts, + matched_rewards_estimator=estimated_project_matched_rewards, + epoch_number=epoch_number, + ) + + +GetProjectRewardsEstimator = Annotated[ + ProjectRewardsEstimator, + Depends(get_project_rewards_estimator), +] diff --git a/backend/v2/project_rewards/router.py b/backend/v2/project_rewards/router.py new file mode 100644 index 0000000000..bec75df95a --- /dev/null +++ b/backend/v2/project_rewards/router.py @@ -0,0 +1,28 @@ +from fastapi import APIRouter + +from .schemas import EstimatedProjectRewardsResponse +from .dependencies import GetProjectRewardsEstimator + +api = APIRouter(prefix="/rewards", tags=["Allocations"]) + + +@api.get("/projects/estimated") +async def get_estimated_project_rewards( + project_rewards_estimator: GetProjectRewardsEstimator, +) -> EstimatedProjectRewardsResponse: + """ + Returns foreach project current allocation sum and estimated matched rewards. + + This endpoint is available only for the pending epoch state. + """ + + import time + + start = time.time() + estimated_funding = await project_rewards_estimator.get() + + print("get_estimated_project_rewards took", time.time() - start, "seconds") + + return EstimatedProjectRewardsResponse( + rewards=[f for f in estimated_funding.project_fundings.values()] + ) diff --git a/backend/v2/project_rewards/schemas.py b/backend/v2/project_rewards/schemas.py new file mode 100644 index 0000000000..25192816b2 --- /dev/null +++ b/backend/v2/project_rewards/schemas.py @@ -0,0 +1,46 @@ +from decimal import Decimal +from pydantic import BaseModel, Field + +from v2.core.types import Address + + +class ProjectFundingSummary(BaseModel): + address: Address = Field(..., description="The address of the project") + allocated: int = Field( + ..., description="Sum of all allocation amounts for the project" + ) + matched: int = Field(..., description="Sum of matched rewards for the project") + + +class EstimatedProjectRewardsResponse(BaseModel): + rewards: list[ProjectFundingSummary] = Field( + ..., description="List of project funding summaries" + ) + + +# project_rewards = await project_rewards_estimator.get(pending_epoch_number) +# rewards = [ +# { +# "address": project_address, +# "allocated": str(project_rewards.amounts_by_project[project_address]), +# "matched": str(project_rewards.matched_by_project[project_address]), +# } +# for project_address in project_rewards.amounts_by_project.keys() +# ] + +# @ns.doc( +# description="Returns project rewards with estimated matched rewards for the pending epoch" +# ) +# @ns.response( +# 200, +# "", +# ) +# @ns.route("/projects/estimated") +# class EstimatedProjectRewards(OctantResource): +# @ns.marshal_with(projects_rewards_model) +# def get(self): +# app.logger.debug("Getting project rewards for the pending epoch") +# project_rewards = get_estimated_project_rewards().rewards +# app.logger.debug(f"Project rewards in the pending epoch: {project_rewards}") + +# return {"rewards": project_rewards} diff --git a/backend/v2/project_rewards/services.py b/backend/v2/project_rewards/services.py new file mode 100644 index 0000000000..3a01ec9017 --- /dev/null +++ b/backend/v2/project_rewards/services.py @@ -0,0 +1,39 @@ +from dataclasses import dataclass + +from sqlalchemy.ext.asyncio import AsyncSession +from v2.matched_rewards.services import MatchedRewardsEstimator +from v2.allocations.repositories import ( + get_allocations_with_user_uqs, +) +from .capped_quadriatic import ( + CappedQuadriaticFunding, + capped_quadriatic_funding, +) +from v2.projects.contracts import ProjectsContracts +import asyncio + + +@dataclass +class ProjectRewardsEstimator: + # Dependencies + session: AsyncSession + projects_contracts: ProjectsContracts + matched_rewards_estimator: MatchedRewardsEstimator + + # Parameters + epoch_number: int + + async def get(self) -> CappedQuadriaticFunding: + # Gather all the necessary data for the calculation + all_projects, matched_rewards, allocations = await asyncio.gather( + self.projects_contracts.get_project_addresses(self.epoch_number), + self.matched_rewards_estimator.get(), + get_allocations_with_user_uqs(self.session, self.epoch_number), + ) + + # Calculate using the Capped Quadriatic Funding formula + return capped_quadriatic_funding( + project_addresses=all_projects, + allocations=allocations, + matched_rewards=matched_rewards, + ) diff --git a/backend/v2/projects/depdendencies.py b/backend/v2/projects/depdendencies.py deleted file mode 100644 index c5f61772f5..0000000000 --- a/backend/v2/projects/depdendencies.py +++ /dev/null @@ -1,114 +0,0 @@ -from decimal import Decimal -from typing import Annotated -from fastapi import Depends -from pydantic import Field -from pydantic_settings import BaseSettings -from v2.epochs.dependencies import get_epochs_subgraph -from v2.epochs.subgraphs import EpochsSubgraph -from v2.core.dependencies import GetSession, OctantSettings, Web3 - - -from .contracts import PROJECTS_ABI, ProjectsContracts -from .services import ( - EstimatedProjectMatchedRewards, - EstimatedProjectRewards, - ProjectsAllocationThresholdGetter, -) - - -class ProjectsSettings(OctantSettings): - projects_contract_address: str = Field( - validation_alias="proposals_contract_address" - ) - - -def get_projects_settings() -> ProjectsSettings: - return ProjectsSettings() - - -def get_projects_contracts( - w3: Web3, settings: Annotated[ProjectsSettings, Depends(get_projects_settings)] -) -> ProjectsContracts: - return ProjectsContracts(w3, PROJECTS_ABI, settings.projects_contract_address) - - -GetProjectsContracts = Annotated[ - ProjectsContracts, - Depends(get_projects_contracts), -] - - -class ProjectsAllocationThresholdSettings(OctantSettings): - project_count_multiplier: int = Field( - default=1, - description="The multiplier to the number of projects to calculate the allocation threshold.", - ) - - -def get_projects_allocation_threshold_settings() -> ProjectsAllocationThresholdSettings: - return ProjectsAllocationThresholdSettings() - -def get_projects_allocation_threshold_getter( - session: GetSession, - projects: Annotated[ProjectsContracts, Depends(get_projects_contracts)], - settings: Annotated[ - ProjectsAllocationThresholdSettings, - Depends(get_projects_allocation_threshold_settings), - ], -) -> ProjectsAllocationThresholdGetter: - return ProjectsAllocationThresholdGetter( - session, projects, settings.project_count_multiplier - ) - - -class EstimatedProjectMatchedRewardsSettings(OctantSettings): - TR_PERCENT: Decimal = Field( - default=Decimal("0.7"), description="The percentage of the TR rewards." - ) - IRE_PERCENT: Decimal = Field( - default=Decimal("0.35"), description="The percentage of the IRE rewards." - ) - MATCHED_REWARDS_PERCENT: Decimal = Field( - default=Decimal("0.35"), description="The percentage of the matched rewards." - ) - - -def get_estimated_project_matched_rewards_settings() -> EstimatedProjectMatchedRewardsSettings: - return EstimatedProjectMatchedRewardsSettings() - - -def get_estimated_project_matched_rewards( - session: GetSession, - epochs_subgraph: Annotated[EpochsSubgraph, Depends(get_epochs_subgraph)], - settings: Annotated[ - EstimatedProjectMatchedRewardsSettings, - Depends(get_estimated_project_matched_rewards_settings), - ], -) -> EstimatedProjectMatchedRewards: - return EstimatedProjectMatchedRewards( - session=session, - epochs_subgraph=epochs_subgraph, - tr_percent=settings.TR_PERCENT, - ire_percent=settings.IRE_PERCENT, - matched_rewards_percent=settings.MATCHED_REWARDS_PERCENT, - ) - - -def get_estimated_project_rewards( - session: GetSession, - projects: Annotated[ProjectsContracts, Depends(get_projects_contracts)], - estimated_project_matched_rewards: Annotated[ - EstimatedProjectMatchedRewards, Depends(get_estimated_project_matched_rewards) - ], -) -> EstimatedProjectRewards: - return EstimatedProjectRewards( - session=session, - projects=projects, - estimated_matched_rewards=estimated_project_matched_rewards, - ) - - -GetEstimatedProjectMatchedRewards = Annotated[ - EstimatedProjectMatchedRewards, - Depends(get_estimated_project_matched_rewards), -] \ No newline at end of file diff --git a/backend/v2/projects/dependencies.py b/backend/v2/projects/dependencies.py new file mode 100644 index 0000000000..cddd007699 --- /dev/null +++ b/backend/v2/projects/dependencies.py @@ -0,0 +1,56 @@ +from typing import Annotated +from fastapi import Depends +from pydantic import Field +from v2.epochs.dependencies import AssertAllocationWindowOpen +from v2.core.dependencies import GetSession, OctantSettings, Web3 + + +from .contracts import PROJECTS_ABI, ProjectsContracts +from .services import ProjectsAllocationThresholdGetter + + +class ProjectsSettings(OctantSettings): + projects_contract_address: str = Field( + validation_alias="proposals_contract_address" + ) + + +def get_projects_settings() -> ProjectsSettings: + return ProjectsSettings() # type: ignore[call-arg] + + +def get_projects_contracts( + w3: Web3, settings: Annotated[ProjectsSettings, Depends(get_projects_settings)] +) -> ProjectsContracts: + return ProjectsContracts(w3, PROJECTS_ABI, settings.projects_contract_address) # type: ignore[arg-type] + + +GetProjectsContracts = Annotated[ + ProjectsContracts, + Depends(get_projects_contracts), +] + + +class ProjectsAllocationThresholdSettings(OctantSettings): + project_count_multiplier: int = Field( + default=1, + description="The multiplier to the number of projects to calculate the allocation threshold.", + ) + + +def get_projects_allocation_threshold_settings() -> ProjectsAllocationThresholdSettings: + return ProjectsAllocationThresholdSettings() + + +def get_projects_allocation_threshold_getter( + epoch_number: AssertAllocationWindowOpen, + session: GetSession, + projects: GetProjectsContracts, + settings: Annotated[ + ProjectsAllocationThresholdSettings, + Depends(get_projects_allocation_threshold_settings), + ], +) -> ProjectsAllocationThresholdGetter: + return ProjectsAllocationThresholdGetter( + epoch_number, session, projects, settings.project_count_multiplier + ) diff --git a/backend/v2/projects/services.py b/backend/v2/projects/services.py index 94e787b9fb..23e59b5d12 100644 --- a/backend/v2/projects/services.py +++ b/backend/v2/projects/services.py @@ -1,35 +1,27 @@ from dataclasses import dataclass -from decimal import Decimal from sqlalchemy.ext.asyncio import AsyncSession from v2.allocations.repositories import ( - get_allocations_with_user_uqs, sum_allocations_by_epoch, ) -from v2.epoch_snapshots.repositories import get_pending_epoch_snapshot -from v2.epochs.subgraphs import EpochsSubgraph -from v2.project_rewards.capped_quadriatic import ( - CappedQuadriaticFunding, - capped_quadriatic_funding, -) from v2.projects.contracts import ProjectsContracts -from v2.user_patron_mode.repositories import get_patrons_rewards @dataclass class ProjectsAllocationThresholdGetter: + # Parameters + epoch_number: int + + # Dependencies session: AsyncSession projects: ProjectsContracts project_count_multiplier: int = 1 - async def get( - self, - epoch_number: int, - ) -> int: + async def get(self) -> int: return await get_projects_allocation_threshold( session=self.session, projects=self.projects, - epoch_number=epoch_number, + epoch_number=self.epoch_number, project_count_multiplier=self.project_count_multiplier, ) @@ -65,114 +57,3 @@ def _calculate_threshold( if projects_count else 0 ) - - -@dataclass -class EstimatedProjectMatchedRewards: - # Dependencies - session: AsyncSession - epochs_subgraph: EpochsSubgraph - # Settings - tr_percent: Decimal - ire_percent: Decimal - matched_rewards_percent: Decimal - - async def get(self, epoch_number: int) -> int: - return await get_estimated_project_matched_rewards_pending( - session=self.session, - epochs_subgraph=self.epochs_subgraph, - tr_percent=self.tr_percent, - ire_percent=self.ire_percent, - matched_rewards_percent=self.matched_rewards_percent, - epoch_number=epoch_number, - ) - - -async def get_estimated_project_matched_rewards_pending( - # Dependencies - session: AsyncSession, - epochs_subgraph: EpochsSubgraph, - # Settings - tr_percent: Decimal, - ire_percent: Decimal, - matched_rewards_percent: Decimal, - # Arguments - epoch_number: int, -) -> int: - """ - Get the estimated matched rewards for the pending epoch. - """ - - pending_snapshot = await get_pending_epoch_snapshot(session, epoch_number) - if pending_snapshot is None: - raise ValueError(f"No pending snapshot for epoch {epoch_number}") - - epoch_details = await epochs_subgraph.get_epoch_by_number(epoch_number) - patrons_rewards = await get_patrons_rewards( - session, epoch_details.finalized_timestamp.datetime(), epoch_number - ) - - return _calculate_percentage_matched_rewards( - locked_ratio=Decimal(pending_snapshot.locked_ratio), - tr_percent=tr_percent, - ire_percent=ire_percent, - staking_proceeds=int(pending_snapshot.eth_proceeds), - patrons_rewards=patrons_rewards, - matched_rewards_percent=matched_rewards_percent, - ) - - -def _calculate_percentage_matched_rewards( - locked_ratio: Decimal, - tr_percent: Decimal, - ire_percent: Decimal, - staking_proceeds: int, - patrons_rewards: int, - matched_rewards_percent: Decimal, # Config -) -> int: - if locked_ratio > tr_percent: - raise ValueError("Invalid Strategy - locked_ratio > tr_percent") - - if locked_ratio < ire_percent: - return int(matched_rewards_percent * staking_proceeds + patrons_rewards) - - if ire_percent <= locked_ratio < tr_percent: - return int((tr_percent - locked_ratio) * staking_proceeds + patrons_rewards) - - return patrons_rewards - - -@dataclass -class EstimatedProjectRewards: - # Dependencies - session: AsyncSession - projects: ProjectsContracts - estimated_matched_rewards: EstimatedProjectMatchedRewards - - async def get(self, epoch_number: int) -> CappedQuadriaticFunding: - return await estimate_project_rewards( - session=self.session, - projects=self.projects, - estimated_matched_rewards=self.estimated_matched_rewards, - epoch_number=epoch_number, - ) - - -async def estimate_project_rewards( - # Dependencies - session: AsyncSession, - projects: ProjectsContracts, - estimated_matched_rewards: EstimatedProjectMatchedRewards, - # Arguments - epoch_number: int, -) -> CappedQuadriaticFunding: - # project_settings project is ProjectSettings - all_projects = await projects.get_project_addresses(epoch_number) - matched_rewards = await estimated_matched_rewards.get(epoch_number) - allocations = await get_allocations_with_user_uqs(session, epoch_number) - - return capped_quadriatic_funding( - project_addresses=all_projects, - allocations=allocations, - matched_rewards=matched_rewards, - ) diff --git a/backend/v2/uniqueness_quotients/dependencies.py b/backend/v2/uniqueness_quotients/dependencies.py index 966fa9eccd..57886c0f42 100644 --- a/backend/v2/uniqueness_quotients/dependencies.py +++ b/backend/v2/uniqueness_quotients/dependencies.py @@ -3,7 +3,6 @@ from fastapi import Depends from pydantic import Field -from pydantic_settings import BaseSettings from v2.core.dependencies import GetSession, OctantSettings from .services import UQScoreGetter @@ -11,11 +10,11 @@ class UQScoreSettings(OctantSettings): uq_score_threshold: float = Field( - default=21.0, + default=15.0, description="The Gitcoin Passport score threshold above which the UQ score is set to the maximum UQ score.", ) low_uq_score: Decimal = Field( - default=Decimal("0.2"), + default=Decimal("0.01"), description="The UQ score to be returned if the Gitcoin Passport score is below the threshold.", ) max_uq_score: Decimal = Field( @@ -39,7 +38,5 @@ def get_uq_score_getter( low_uq_score=settings.low_uq_score, ) -GetUQScoreGetter = Annotated[ - UQScoreGetter, - Depends(get_uq_score_getter) -] + +GetUQScoreGetter = Annotated[UQScoreGetter, Depends(get_uq_score_getter)] diff --git a/backend/v2/uniqueness_quotients/repositories.py b/backend/v2/uniqueness_quotients/repositories.py index 4eca57445f..4fb27c9f3e 100644 --- a/backend/v2/uniqueness_quotients/repositories.py +++ b/backend/v2/uniqueness_quotients/repositories.py @@ -5,11 +5,12 @@ from eth_utils import to_checksum_address from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession +from v2.core.types import Address from v2.users.repositories import get_user_by_address async def get_uq_score_by_user_address( - session: AsyncSession, user_address: str, epoch_number: int + session: AsyncSession, user_address: Address, epoch_number: int ) -> Optional[Decimal]: """Returns saved UQ score for a user in a given epoch. None if the UQ score is not saved (allocation not made yet). @@ -27,7 +28,7 @@ async def get_uq_score_by_user_address( async def save_uq_score_for_user_address( - session: AsyncSession, user_address: str, epoch_number: int, score: Decimal + session: AsyncSession, user_address: Address, epoch_number: int, score: Decimal ): """Saves UQ score for a user in a given epoch.""" @@ -46,14 +47,19 @@ async def save_uq_score_for_user_address( async def get_gp_stamps_by_address( - session: AsyncSession, user_address: str + session: AsyncSession, user_address: Address ) -> GPStamps | None: """Gets the latest GitcoinPassport Stamps record for a user.""" user = await get_user_by_address(session, user_address) + if user is None: + return None result = await session.scalar( - select(GPStamps).filter(GPStamps.user_id == user.id).order_by(GPStamps.created_at.desc()).limit(1) + select(GPStamps) + .filter(GPStamps.user_id == user.id) + .order_by(GPStamps.created_at.desc()) + .limit(1) ) # result = await session.execute( diff --git a/backend/v2/uniqueness_quotients/services.py b/backend/v2/uniqueness_quotients/services.py index 56308d71fa..f2f0f5d74b 100644 --- a/backend/v2/uniqueness_quotients/services.py +++ b/backend/v2/uniqueness_quotients/services.py @@ -3,10 +3,17 @@ from sqlalchemy.ext.asyncio import AsyncSession -from app.constants import GUEST_LIST -from app.modules.user.antisybil.service.initial import _has_guest_stamp_applied_by_gp +from app.constants import GUEST_LIST, TIMEOUT_LIST +from app.modules.user.antisybil.core import ( + _apply_gtc_staking_stamp_nullification, + _has_guest_stamp_applied_by_gp, +) + from eth_utils import to_checksum_address +from v2.core.types import Address + + from .repositories import ( get_uq_score_by_user_address, save_uq_score_for_user_address, @@ -21,7 +28,9 @@ class UQScoreGetter: max_uq_score: Decimal low_uq_score: Decimal - async def get_or_calculate(self, epoch_number: int, user_address: str) -> Decimal: + async def get_or_calculate( + self, epoch_number: int, user_address: Address + ) -> Decimal: return await get_or_calculate_uq_score( session=self.session, user_address=user_address, @@ -57,7 +66,7 @@ def calculate_uq_score( async def get_or_calculate_uq_score( session: AsyncSession, - user_address: str, + user_address: Address, epoch_number: int, uq_score_threshold: float, max_uq_score: Decimal, @@ -85,7 +94,9 @@ async def get_or_calculate_uq_score( return uq_score -async def get_gitcoin_passport_score(session: AsyncSession, user_address: str) -> float: +async def get_gitcoin_passport_score( + session: AsyncSession, user_address: Address +) -> float: """Gets saved Gitcoin Passport score for a user. Returns None if the score is not saved. If the user is in the GUEST_LIST, the score will be adjusted to include the guest stamp. @@ -95,10 +106,19 @@ async def get_gitcoin_passport_score(session: AsyncSession, user_address: str) - stamps = await get_gp_stamps_by_address(session, user_address) + # We have no information about the user's score if stamps is None: return 0.0 + # If the user is explicitly in the timeout list, return 0.0 Gitcoin Passport score + if user_address in TIMEOUT_LIST: + return 0.0 + + # We remove score associated with GTC staking + potential_score = _apply_gtc_staking_stamp_nullification(stamps.score, stamps) + + # If the user is in the guest list and has not been stamped by a guest list provider, increase the score by 21.0 if user_address in GUEST_LIST and not _has_guest_stamp_applied_by_gp(stamps): - return stamps.score + 21.0 + return potential_score + 21.0 - return stamps.score + return potential_score diff --git a/backend/v2/user_patron_mode/repositories.py b/backend/v2/user_patron_mode/repositories.py index 3295d5f5df..d041cfff66 100644 --- a/backend/v2/user_patron_mode/repositories.py +++ b/backend/v2/user_patron_mode/repositories.py @@ -6,6 +6,7 @@ from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select from sqlalchemy.orm import aliased +from v2.core.types import Address from v2.users.repositories import get_user_by_address @@ -72,7 +73,7 @@ async def get_patrons_rewards( async def get_budget_by_user_address_and_epoch( - session: AsyncSession, user_address: str, epoch: int + session: AsyncSession, user_address: Address, epoch: int ) -> int | None: """ Get the budget of a user for a given epoch. @@ -84,7 +85,6 @@ async def get_budget_by_user_address_and_epoch( if user is None: return None - print("epoch", epoch) # epoch -= 1 result = await session.execute( @@ -105,7 +105,7 @@ async def get_budget_by_user_address_and_epoch( async def user_is_patron_with_budget( session: AsyncSession, - user_address: str, + user_address: Address, epoch_number: int, finalized_timestamp: datetime, ) -> bool: diff --git a/backend/v2/users/repositories.py b/backend/v2/users/repositories.py index 9f8d5cbfb1..c85d5a2304 100644 --- a/backend/v2/users/repositories.py +++ b/backend/v2/users/repositories.py @@ -1,21 +1,22 @@ from app.infrastructure.database.models import User -from eth_utils import to_checksum_address from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select +from v2.core.types import Address -async def get_user_by_address(session: AsyncSession, user_address: str) -> User | None: - user_address = to_checksum_address(user_address) + +async def get_user_by_address( + session: AsyncSession, user_address: Address +) -> User | None: + """Get a user object by their address. Useful for all other operations related to a user.""" import time + start = time.time() result = await session.scalar( select(User).filter(User.address == user_address).limit(1) ) - # result = await session.execute(select(User).filter(User.address == user_address)) - print("get_user_by_address", time.time() - start) - print("result", result) - + print("USER BY ADDRESS", time.time() - start) return result From 19374e2259defb1ab0005aeebdb23e422c83a807 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marek=20S=C5=82omnicki?= Date: Mon, 7 Oct 2024 10:41:48 +0200 Subject: [PATCH 11/31] Bump argo app to 0.2.65 --- ci/argocd/templates/octant-application.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/argocd/templates/octant-application.yaml b/ci/argocd/templates/octant-application.yaml index 7d840e4c3c..39ff449510 100644 --- a/ci/argocd/templates/octant-application.yaml +++ b/ci/argocd/templates/octant-application.yaml @@ -15,7 +15,7 @@ spec: namespace: $DEPLOYMENT_ID sources: - repoURL: 'https://gitlab.com/api/v4/projects/48137258/packages/helm/devel' - targetRevision: 0.2.64 + targetRevision: 0.2.65 chart: octant helm: parameters: From 7ca1dae9e1115d42422dd16b3877393e9ef1431b Mon Sep 17 00:00:00 2001 From: adam-gf Date: Mon, 7 Oct 2024 11:00:47 +0200 Subject: [PATCH 12/31] Updates host --- backend/startup.py | 4 ++-- backend/v2/allocations/repositories.py | 4 ++-- backend/v2/user_patron_mode/repositories.py | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/backend/startup.py b/backend/startup.py index d26b2c2142..8920cecdd0 100644 --- a/backend/startup.py +++ b/backend/startup.py @@ -81,5 +81,5 @@ async def dispatch(self, request: Request, call_next): if __name__ == "__main__": import uvicorn - # uvicorn.run(fastapi_app, host="0.0.0.0", port=5000) - uvicorn.run(fastapi_app, port=5000) \ No newline at end of file + uvicorn.run(fastapi_app, host="0.0.0.0", port=5000) + # uvicorn.run(fastapi_app, port=5000) diff --git a/backend/v2/allocations/repositories.py b/backend/v2/allocations/repositories.py index 95f3b2449c..c7c20f430d 100644 --- a/backend/v2/allocations/repositories.py +++ b/backend/v2/allocations/repositories.py @@ -5,7 +5,7 @@ from app.infrastructure.database.models import AllocationRequest as AllocationRequestDB from app.infrastructure.database.models import UniquenessQuotient, User from eth_utils import to_checksum_address -from sqlalchemy import INTEGER, cast, func, select, update +from sqlalchemy import Numeric, cast, func, select, update from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import joinedload from sqlalchemy.sql.functions import coalesce @@ -19,7 +19,7 @@ async def sum_allocations_by_epoch(session: AsyncSession, epoch_number: int) -> """Get the sum of all allocations for a given epoch. We only consider the allocations that have not been deleted.""" result = await session.execute( - select(coalesce(func.sum(cast(Allocation.amount, INTEGER)), 0)) + select(coalesce(func.sum(cast(Allocation.amount, Numeric)), 0)) .filter(Allocation.epoch == epoch_number) .filter(Allocation.deleted_at.is_(None)) ) diff --git a/backend/v2/user_patron_mode/repositories.py b/backend/v2/user_patron_mode/repositories.py index d041cfff66..603b9a397a 100644 --- a/backend/v2/user_patron_mode/repositories.py +++ b/backend/v2/user_patron_mode/repositories.py @@ -2,7 +2,7 @@ from typing import List from app.infrastructure.database.models import Budget, PatronModeEvent, User -from sqlalchemy import Integer, cast, func +from sqlalchemy import Numeric, cast, func from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select from sqlalchemy.orm import aliased @@ -47,7 +47,7 @@ async def get_budget_sum_by_users_addresses_and_epoch( Sum the budgets of given users for a given epoch. """ result = await session.execute( - select(func.sum(cast(Budget.budget, Integer))) + select(func.sum(cast(Budget.budget, Numeric))) .join(User) .filter(User.address.in_(users_addresses), Budget.epoch == epoch_number) ) @@ -56,7 +56,7 @@ async def get_budget_sum_by_users_addresses_and_epoch( if total_budget is None: return 0 - return total_budget + return int(total_budget) async def get_patrons_rewards( From 1f80fe699f8e661b72ac192e53d3e6c107ec8618 Mon Sep 17 00:00:00 2001 From: Housekeeper Bot Date: Mon, 7 Oct 2024 13:20:49 +0000 Subject: [PATCH 13/31] [CI/CD] Update master.env contracts --- ci/argocd/contracts/master.env | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/ci/argocd/contracts/master.env b/ci/argocd/contracts/master.env index 6d3aa84242..1458614f28 100644 --- a/ci/argocd/contracts/master.env +++ b/ci/argocd/contracts/master.env @@ -1,8 +1,8 @@ -BLOCK_NUMBER=6153543 +BLOCK_NUMBER=6832379 GLM_CONTRACT_ADDRESS=0x71432DD1ae7DB41706ee6a22148446087BdD0906 -AUTH_CONTRACT_ADDRESS=0x738413d47E9670757D662497bEb38B69b26ddC4E -DEPOSITS_CONTRACT_ADDRESS=0x3Ba9caeAc79b784708DfdDF936F2aaAf9CF39884 -EPOCHS_CONTRACT_ADDRESS=0xb918ce1c1966208720C1F0F80767C534D227e164 -PROPOSALS_CONTRACT_ADDRESS=0x5454A1Fa39c16af307FDf0B2E9B3dbB97EcF98fD -WITHDRAWALS_TARGET_CONTRACT_ADDRESS=0x0f9C752bdB7A4727dD21F44f1CE4dA6413517CcB -VAULT_CONTRACT_ADDRESS=0x7af367B58d851cE54DB86F907c380a0C98102685 +AUTH_CONTRACT_ADDRESS=0xb813dC36cDc20B32A75bb929dF511c62D6AfF85D +DEPOSITS_CONTRACT_ADDRESS=0xC3DE6107Bb08Bea5E2b29b7eA23eF037A1a4f1E9 +EPOCHS_CONTRACT_ADDRESS=0xf25EB24C053A18BE5Acc8834cbFAdEc4969fd033 +PROPOSALS_CONTRACT_ADDRESS=0xB4d187BB199e55a8C152A4bA39e8495EB3bD548D +WITHDRAWALS_TARGET_CONTRACT_ADDRESS=0x698Fcc16dE7BC84b446Fe18F9A79c9f396dc11de +VAULT_CONTRACT_ADDRESS=0x5d31e7f8C16a1221216BBA2A540B141cF15F49B2 From 7394dc31cb72d6886f6d564d82b5931018fd0ba0 Mon Sep 17 00:00:00 2001 From: adam-gf Date: Tue, 8 Oct 2024 01:19:43 +0200 Subject: [PATCH 14/31] Updates with fixes --- .../infrastructure/database/allocations.py | 6 - .../database/pending_epoch_snapshot.py | 6 +- backend/v2/allocations/dependencies.py | 14 +- backend/v2/allocations/repositories.py | 14 +- backend/v2/allocations/router.py | 10 +- backend/v2/allocations/schemas.py | 36 +-- backend/v2/allocations/services.py | 43 +--- backend/v2/allocations/socket.py | 239 +++++++++--------- backend/v2/allocations/validators.py | 58 +---- backend/v2/core/dependencies.py | 18 +- backend/v2/core/types.py | 12 + backend/v2/deposits/contracts.py | 5 +- backend/v2/deposits/dependencies.py | 5 +- backend/v2/epochs/dependencies.py | 10 +- backend/v2/epochs/subgraphs.py | 5 - backend/v2/glms/contracts.py | 5 +- backend/v2/glms/dependencies.py | 5 +- backend/v2/main.py | 8 +- backend/v2/matched_rewards/dependencies.py | 13 +- backend/v2/matched_rewards/services.py | 1 - .../v2/project_rewards/capped_quadriatic.py | 6 +- backend/v2/project_rewards/dependencies.py | 9 +- backend/v2/project_rewards/router.py | 2 +- backend/v2/project_rewards/schemas.py | 16 +- backend/v2/project_rewards/services.py | 12 +- backend/v2/projects/dependencies.py | 4 +- backend/v2/projects/services.py | 14 +- .../v2/uniqueness_quotients/dependencies.py | 4 +- backend/v2/uniqueness_quotients/services.py | 8 +- backend/v2/user_patron_mode/repositories.py | 6 - backend/v2/users/repositories.py | 7 - 31 files changed, 221 insertions(+), 380 deletions(-) diff --git a/backend/app/infrastructure/database/allocations.py b/backend/app/infrastructure/database/allocations.py index 6ce55c621e..6307726c55 100644 --- a/backend/app/infrastructure/database/allocations.py +++ b/backend/app/infrastructure/database/allocations.py @@ -259,18 +259,12 @@ def get_allocation_request_by_user_and_epoch( def get_user_last_allocation_request(user_address: str) -> AllocationRequest | None: - import time - - start = time.time() - result = ( AllocationRequest.query.join(User, User.id == AllocationRequest.user_id) .filter(User.address == user_address) .order_by(AllocationRequest.nonce.desc()) .first() ) - - print("?????????get_user_last_allocation_request", time.time() - start) return result diff --git a/backend/app/infrastructure/database/pending_epoch_snapshot.py b/backend/app/infrastructure/database/pending_epoch_snapshot.py index 8733bb5f92..60e8c18791 100644 --- a/backend/app/infrastructure/database/pending_epoch_snapshot.py +++ b/backend/app/infrastructure/database/pending_epoch_snapshot.py @@ -21,10 +21,8 @@ def get_by_epoch_num(epoch) -> PendingEpochSnapshot: def get_by_epoch(epoch: int) -> Optional[PendingEpochSnapshot]: - sp = PendingEpochSnapshot.query.filter_by(epoch=epoch).first() - print("Engine url", db.engine.url) - print(">sp", sp) - return sp + return PendingEpochSnapshot.query.filter_by(epoch=epoch).first() + def get_last_snapshot() -> PendingEpochSnapshot: snapshot = ( diff --git a/backend/v2/allocations/dependencies.py b/backend/v2/allocations/dependencies.py index a665cb3604..065b48c7cc 100644 --- a/backend/v2/allocations/dependencies.py +++ b/backend/v2/allocations/dependencies.py @@ -2,19 +2,11 @@ from fastapi import Depends from pydantic import Field +from v2.core.dependencies import GetSession, OctantSettings +from v2.epochs.dependencies import AssertAllocationWindowOpen, GetEpochsSubgraph from v2.matched_rewards.dependencies import GetMatchedRewardsEstimator -from v2.project_rewards.services import ProjectRewardsEstimator -from v2.core.exceptions import AllocationWindowClosed -from v2.epochs.dependencies import ( - AssertAllocationWindowOpen, - GetEpochsContracts, - GetEpochsSubgraph, -) -from v2.projects.dependencies import ( - GetProjectsContracts, -) +from v2.projects.dependencies import GetProjectsContracts from v2.uniqueness_quotients.dependencies import GetUQScoreGetter -from v2.core.dependencies import GetSession, OctantSettings from .services import Allocator from .validators import SignatureVerifier diff --git a/backend/v2/allocations/repositories.py b/backend/v2/allocations/repositories.py index c7c20f430d..3bf37fb79a 100644 --- a/backend/v2/allocations/repositories.py +++ b/backend/v2/allocations/repositories.py @@ -28,7 +28,7 @@ async def sum_allocations_by_epoch(session: AsyncSession, epoch_number: int) -> if count is None: return 0 - return count + return int(count) async def get_allocations_with_user_uqs( @@ -54,10 +54,10 @@ async def get_allocations_with_user_uqs( return [ AllocationWithUserUQScore( - project_address=project_address, + projectAddress=project_address, amount=amount, - user_address=user_address, - user_uq_score=Decimal(uq_score), + userAddress=user_address, + userUqScore=Decimal(uq_score), ) for project_address, amount, user_address, uq_score in rows ] @@ -163,9 +163,9 @@ async def get_donations_by_project( return [ ProjectDonation( - amount=int(a.amount), - donor_address=a.user.address, - project_address=a.project_address, + amount=a.amount, + donorAddress=a.user.address, + projectAddress=a.project_address, ) for a in allocations ] diff --git a/backend/v2/allocations/router.py b/backend/v2/allocations/router.py index 4ed213c38a..b82b0f3ffb 100644 --- a/backend/v2/allocations/router.py +++ b/backend/v2/allocations/router.py @@ -18,19 +18,13 @@ async def allocate( Only available during the allocation window. """ - import time - - start = time.time() - # TODO: We should ideally move to the newer version of the schema as it's simpler request = UserAllocationRequest( - user_address=allocation_request.user_address, + userAddress=allocation_request.user_address, allocations=allocation_request.payload.allocations, nonce=allocation_request.payload.nonce, signature=allocation_request.signature, - is_manually_edited=allocation_request.is_manually_edited, + isManuallyEdited=allocation_request.is_manually_edited, ) await allocator.handle(request) - - print("Allocation took: ", time.time() - start) diff --git a/backend/v2/allocations/schemas.py b/backend/v2/allocations/schemas.py index 8163a66b11..04639eacb7 100644 --- a/backend/v2/allocations/schemas.py +++ b/backend/v2/allocations/schemas.py @@ -1,50 +1,38 @@ from decimal import Decimal -from pydantic import BaseModel, ConfigDict, Field -from pydantic.alias_generators import to_camel +from pydantic import Field +from v2.core.types import Address, BigInteger, OctantModel -from v2.core.types import Address - - -class AllocationWithUserUQScore(BaseModel): - model_config = ConfigDict(frozen=True) +class AllocationWithUserUQScore(OctantModel): project_address: Address - amount: int + amount: BigInteger user_address: Address user_uq_score: Decimal -class AllocationRequest(BaseModel): - model_config = ConfigDict(frozen=True, alias_generator=to_camel) - +class AllocationRequest(OctantModel): project_address: Address = Field(..., alias="proposalAddress") - amount: int + amount: BigInteger # first_name: str = Field(..., alias='firstName') # last_name: str = Field(..., alias='lastName') # age: int = Field(..., alias='age') -class UserAllocationRequestPayloadV1(BaseModel): - model_config = ConfigDict(frozen=True, alias_generator=to_camel) - +class UserAllocationRequestPayloadV1(OctantModel): allocations: list[AllocationRequest] nonce: int -class UserAllocationRequestV1(BaseModel): - model_config = ConfigDict(frozen=True, alias_generator=to_camel) - +class UserAllocationRequestV1(OctantModel): user_address: Address payload: UserAllocationRequestPayloadV1 signature: str is_manually_edited: bool -class UserAllocationRequest(BaseModel): - model_config = ConfigDict(frozen=True) - +class UserAllocationRequest(OctantModel): user_address: Address allocations: list[AllocationRequest] nonce: int @@ -53,9 +41,7 @@ class UserAllocationRequest(BaseModel): is_manually_edited: bool -class ProjectDonation(BaseModel): - model_config = ConfigDict(frozen=True) - - amount: int +class ProjectDonation(OctantModel): + amount: BigInteger donor_address: Address # user address project_address: Address diff --git a/backend/v2/allocations/services.py b/backend/v2/allocations/services.py index 18bb798733..d4e18593d8 100644 --- a/backend/v2/allocations/services.py +++ b/backend/v2/allocations/services.py @@ -1,24 +1,21 @@ import asyncio from dataclasses import dataclass -import time from app import exceptions from sqlalchemy.ext.asyncio import AsyncSession from v2.matched_rewards.services import MatchedRewardsEstimator -from v2.uniqueness_quotients.dependencies import UQScoreGetter -from v2.project_rewards.capped_quadriatic import ( - cqf_simulate_leverage, -) +from v2.project_rewards.capped_quadriatic import cqf_simulate_leverage from v2.projects.contracts import ProjectsContracts +from v2.uniqueness_quotients.dependencies import UQScoreGetter from v2.users.repositories import get_user_by_address -from .validators import SignatureVerifier -from .schemas import AllocationWithUserUQScore, UserAllocationRequest from .repositories import ( get_allocations_with_user_uqs, soft_delete_user_allocations_by_epoch, store_allocation_request, ) +from .schemas import AllocationWithUserUQScore, UserAllocationRequest +from .validators import SignatureVerifier @dataclass @@ -61,35 +58,25 @@ async def allocate( # Arguments request: UserAllocationRequest, ) -> str: - import time - - allocation_time = time.time() # Verify the signature await signature_verifier.verify( epoch_number=epoch_number, request=request, ) - print("signature verified in", time.time() - allocation_time) - - uq_score_time = time.time() - # Get or calculate UQ score of the user user_uq_score = await uq_score_getter.get_or_calculate( epoch_number=epoch_number, user_address=request.user_address, ) - print("uq score retrieved in", time.time() - uq_score_time) - - new_allocations_time = time.time() # Calculate leverage by simulating the allocation new_allocations = [ AllocationWithUserUQScore( - project_address=a.project_address, + projectAddress=a.project_address, amount=a.amount, - user_address=request.user_address, - user_uq_score=user_uq_score, + userAddress=request.user_address, + userUqScore=user_uq_score, ) for a in request.allocations ] @@ -102,16 +89,6 @@ async def allocate( new_allocations, ) - print("new allocations calculated in", time.time() - new_allocations_time) - - print("leverage", leverage) - print("request.user_address", request.user_address) - - # print("I'm here") - # return "I'm here" - - soft_delete_time = time.time() - await soft_delete_user_allocations_by_epoch( session, request.user_address, epoch_number ) @@ -134,8 +111,6 @@ async def allocate( # Commit the transaction await session.commit() - print("soft delete and store allocation request in", time.time() - soft_delete_time) - return request.user_address @@ -152,16 +127,12 @@ async def simulate_leverage( Calculate leverage of the allocation made by the user. """ - start_time = time.time() - all_projects, matched_rewards, existing_allocations = await asyncio.gather( projects_contracts.get_project_addresses(epoch_number), matched_rewards_estimator.get(), get_allocations_with_user_uqs(session, epoch_number), ) - print("existing allocations retrieved in", time.time() - start_time) - return cqf_simulate_leverage( existing_allocations=existing_allocations, new_allocations=new_allocations, diff --git a/backend/v2/allocations/socket.py b/backend/v2/allocations/socket.py index 60bafe98bf..d39a37d4c6 100644 --- a/backend/v2/allocations/socket.py +++ b/backend/v2/allocations/socket.py @@ -1,62 +1,47 @@ import asyncio -from contextlib import asynccontextmanager import logging +from contextlib import asynccontextmanager from typing import AsyncGenerator, Tuple import socketio - -from eth_utils import to_checksum_address -from v2.core.exceptions import AllocationWindowClosed -from v2.matched_rewards.dependencies import ( - get_matched_rewards_estimator, - get_matched_rewards_estimator_settings, -) -from v2.project_rewards.dependencies import get_project_rewards_estimator -from v2.project_rewards.services import ProjectRewardsEstimator +from sqlalchemy.ext.asyncio import AsyncSession from v2.allocations.dependencies import ( - SignatureVerifierSettings, get_allocator, get_signature_verifier, get_signature_verifier_settings, ) -from v2.epochs.contracts import EpochsContracts -from v2.projects.services import ( - ProjectsAllocationThresholdGetter, -) -from v2.uniqueness_quotients.dependencies import UQScoreSettings, get_uq_score_getter, get_uq_score_settings from v2.allocations.repositories import get_donations_by_project from v2.allocations.services import Allocator from v2.core.dependencies import ( - DatabaseSettings, - Web3ProviderSettings, get_database_settings, - get_db_session, get_sessionmaker, get_w3, get_web3_provider_settings, ) +from v2.core.exceptions import AllocationWindowClosed from v2.epochs.dependencies import ( - EpochsSettings, - EpochsSubgraphSettings, assert_allocation_window_open, get_epochs_contracts, get_epochs_settings, get_epochs_subgraph, get_epochs_subgraph_settings, ) +from v2.matched_rewards.dependencies import ( + get_matched_rewards_estimator, + get_matched_rewards_estimator_settings, +) +from v2.project_rewards.dependencies import get_project_rewards_estimator +from v2.project_rewards.services import ProjectRewardsEstimator from v2.projects.dependencies import ( - ProjectsAllocationThresholdSettings, - ProjectsSettings, + get_projects_allocation_threshold_getter, get_projects_allocation_threshold_settings, get_projects_contracts, - get_projects_allocation_threshold_getter, get_projects_settings, ) +from v2.projects.services import ProjectsAllocationThresholdGetter +from v2.uniqueness_quotients.dependencies import get_uq_score_getter, get_uq_score_settings -from .schemas import AllocationRequest, UserAllocationRequest, UserAllocationRequestV1 - - -from sqlalchemy.ext.asyncio import AsyncSession +from .schemas import UserAllocationRequest, UserAllocationRequestV1 @asynccontextmanager @@ -72,8 +57,7 @@ async def create_dependencies_on_connect() -> AsyncGenerator[ # We do not handle requests outside of pending epoch state (Allocation Window) # This will raise an exception if the allocation window is closed and connection does not happen - # epoch_number = await assert_allocation_window_open(epochs_contracts) - epoch_number = 128 + epoch_number = await assert_allocation_window_open(epochs_contracts) projects_contracts = get_projects_contracts(w3, get_projects_settings()) epochs_subgraph = get_epochs_subgraph(get_epochs_subgraph_settings()) @@ -112,7 +96,7 @@ async def create_dependencies_on_connect() -> AsyncGenerator[ # Yield the dependencies to the on_connect handler yield (s4, threshold_getter, estimated_project_rewards) - except Exception as e: + except Exception: await asyncio.gather( s1.rollback(), s2.rollback(), @@ -134,7 +118,6 @@ async def create_dependencies_on_allocate() -> AsyncGenerator[ Tuple[ AsyncSession, Allocator, - EpochsContracts, ProjectsAllocationThresholdGetter, ProjectRewardsEstimator, ], @@ -164,46 +147,76 @@ async def create_dependencies_on_allocate() -> AsyncGenerator[ sessionmaker() as s3, sessionmaker() as s4, sessionmaker() as s5, + sessionmaker() as s6, + sessionmaker() as s7, ): - threshold_getter = get_projects_allocation_threshold_getter( - epoch_number, - s1, - projects_contracts, - get_projects_allocation_threshold_settings(), - ) - estimated_matched_rewards = await get_matched_rewards_estimator( - epoch_number, s2, epochs_subgraph, get_matched_rewards_estimator_settings() - ) - estimated_project_rewards = await get_project_rewards_estimator( - epoch_number, - s3, - projects_contracts, - estimated_matched_rewards, - ) - - signature_verifier = get_signature_verifier( - s4, epochs_subgraph, projects_contracts, get_signature_verifier_settings() - ) - - uq_score_getter = get_uq_score_getter(s5, get_uq_score_settings()) - - allocations = await get_allocator( - epoch_number, - s5, - signature_verifier, - uq_score_getter, - projects_contracts, - estimated_matched_rewards, - ) - - # Yield the dependencies to the on_allocate handler - yield ( - s5, - allocations, - epochs_contracts, - threshold_getter, - estimated_project_rewards, - ) + try: + threshold_getter = get_projects_allocation_threshold_getter( + epoch_number, + s1, + projects_contracts, + get_projects_allocation_threshold_settings(), + ) + estimated_matched_rewards = await get_matched_rewards_estimator( + epoch_number, + s2, + epochs_subgraph, + get_matched_rewards_estimator_settings(), + ) + estimated_project_rewards = await get_project_rewards_estimator( + epoch_number, + s3, + projects_contracts, + estimated_matched_rewards, + ) + + signature_verifier = get_signature_verifier( + s4, + epochs_subgraph, + projects_contracts, + get_signature_verifier_settings(), + ) + + uq_score_getter = get_uq_score_getter(s5, get_uq_score_settings()) + + allocations = await get_allocator( + epoch_number, + s6, + signature_verifier, + uq_score_getter, + projects_contracts, + estimated_matched_rewards, + ) + + # Yield the dependencies to the on_allocate handler + yield ( + s7, + allocations, + threshold_getter, + estimated_project_rewards, + ) + + except Exception: + await asyncio.gather( + s1.rollback(), + s2.rollback(), + s3.rollback(), + s4.rollback(), + s5.rollback(), + s6.rollback(), + s7.rollback(), + ) + raise + finally: + await asyncio.gather( + s1.close(), + s2.close(), + s3.close(), + s4.close(), + s5.close(), + s6.close(), + s7.close(), + ) class AllocateNamespace(socketio.AsyncNamespace): @@ -224,18 +237,13 @@ async def handle_on_connect(self, sid: str, environ: dict): # Get the estimated project rewards and send them to the client project_rewards = await estimated_project_rewards.get() - # rewards = [ - # { - # "address": project_address, - # "allocated": str(project_rewards.amounts_by_project[project_address]), - # "matched": str(project_rewards.matched_by_project[project_address]), - # } - # for project_address in project_rewards.amounts_by_project.keys() - # ] await self.emit( "project_rewards", - [p.model_dump() for p in project_rewards.project_fundings.values()], + [ + p.model_dump(by_alias=True) + for p in project_rewards.project_fundings.values() + ], to=sid, ) @@ -248,7 +256,16 @@ async def handle_on_connect(self, sid: str, environ: dict): await self.emit( "project_donors", - {"project": project_address, "donors": donations}, + { + "project": project_address, + "donors": [ + { + "address": d.donor_address, + "amount": str(d.amount), + } + for d in donations + ], + }, ) async def on_connect(self, sid: str, environ: dict): @@ -262,11 +279,10 @@ async def on_connect(self, sid: str, environ: dict): async def on_disconnect(self, sid): logging.debug("Client disconnected") - async def handle_on_allocate(self, sid: str, data: dict): + async def handle_on_allocate(self, sid: str, data: str): async with create_dependencies_on_allocate() as ( session, allocations, - epochs_contracts, threshold_getter, estimated_project_rewards, ): @@ -285,18 +301,12 @@ async def handle_on_allocate(self, sid: str, data: dict): # Get the estimated project rewards and send them to the client project_rewards = await estimated_project_rewards.get() - # rewards = [ - # { - # "address": project_address, - # "allocated": str(project_rewards.amounts_by_project[project_address]), - # "matched": str(project_rewards.matched_by_project[project_address]), - # } - # for project_address in project_rewards.amounts_by_project.keys() - # ] - await self.emit( "project_rewards", - [p.model_dump() for p in project_rewards.project_fundings.values()], + [ + p.model_dump(by_alias=True) + for p in project_rewards.project_fundings.values() + ], to=sid, ) @@ -309,10 +319,19 @@ async def handle_on_allocate(self, sid: str, data: dict): await self.emit( "project_donors", - {"project": project_address, "donors": donations}, + { + "project": project_address, + "donors": [ + { + "address": d.donor_address, + "amount": str(d.amount), + } + for d in donations + ], + }, ) - async def on_allocate(self, sid: str, data: dict): + async def on_allocate(self, sid: str, data: str): try: await self.handle_on_allocate(sid, data) @@ -323,7 +342,7 @@ async def on_allocate(self, sid: str, data: dict): logging.error(f"Error handling on_allocate: {e}") -def from_dict(data: dict) -> UserAllocationRequest: +def from_dict(data: str) -> UserAllocationRequest: """ Example of data: { @@ -346,34 +365,14 @@ def from_dict(data: dict) -> UserAllocationRequest: } """ + # TODO: maybe we can switcht to UserAllocationRequest from V1 ? # parse the incoming data as UserAllocationRequestV1 - requestV1 = UserAllocationRequestV1.model_validate(data) + requestV1 = UserAllocationRequestV1.model_validate_json(data) request = UserAllocationRequest( - user_address=requestV1.user_address, + userAddress=requestV1.user_address, allocations=requestV1.payload.allocations, nonce=requestV1.payload.nonce, signature=requestV1.signature, - is_manually_edited=requestV1.is_manually_edited, + isManuallyEdited=requestV1.is_manually_edited, ) return request - - user_address = to_checksum_address(data["userAddress"]) - payload = data["payload"] - allocations = [ - AllocationRequest( - project_address=to_checksum_address(allocation_data["proposalAddress"]), - amount=allocation_data["amount"], - ) - for allocation_data in payload["allocations"] - ] - nonce = int(payload["nonce"]) - signature = payload.get("signature") - is_manually_edited = data.get("isManuallyEdited", False) - - return UserAllocationRequest( - user_address=user_address, - allocations=allocations, - nonce=nonce, - signature=signature, - is_manually_edited=is_manually_edited, - ) diff --git a/backend/v2/allocations/validators.py b/backend/v2/allocations/validators.py index 7a066cb222..e93c3a589d 100644 --- a/backend/v2/allocations/validators.py +++ b/backend/v2/allocations/validators.py @@ -1,21 +1,21 @@ import asyncio from dataclasses import dataclass -from web3 import AsyncWeb3 + from app import exceptions from app.modules.common.crypto.signature import EncodingStandardFor, encode_for_signing +from sqlalchemy.ext.asyncio import AsyncSession from v2.core.types import Address -from .schemas import UserAllocationRequest -from .repositories import get_last_allocation_request_nonce from v2.crypto.signatures import verify_signed_message from v2.epochs.subgraphs import EpochsSubgraph from v2.projects.contracts import ProjectsContracts - -from sqlalchemy.ext.asyncio import AsyncSession - from v2.user_patron_mode.repositories import ( get_budget_by_user_address_and_epoch, user_is_patron_with_budget, ) +from web3 import AsyncWeb3 + +from .repositories import get_last_allocation_request_nonce +from .schemas import UserAllocationRequest @dataclass @@ -26,10 +26,6 @@ class SignatureVerifier: chain_id: int async def verify(self, epoch_number: int, request: UserAllocationRequest) -> None: - import time - - start = time.time() - await asyncio.gather( verify_logic( session=self.session, @@ -46,8 +42,6 @@ async def verify(self, epoch_number: int, request: UserAllocationRequest) -> Non ), ) - print("verify_signature", time.time() - start) - async def verify_logic( # Component dependencies @@ -67,8 +61,6 @@ async def verify_logic( if not payload.allocations: raise exceptions.EmptyAllocations() - print("already here") - async def _check_database(): await _provided_nonce_matches_expected( session, payload.user_address, payload.nonce @@ -83,37 +75,6 @@ async def _check_database(): _provided_projects_are_correct(projects_contracts, epoch_number, payload), ) - # try: - # async with asyncio.TaskGroup() as tg: - - # tg.create_task(_provided_nonce_matches_expected(session, payload.user_address, payload.nonce)) - # tg.create_task(_user_is_not_patron(session, epoch_subgraph, payload.user_address, epoch_number)) - # tg.create_task(_provided_projects_are_correct(projects_contracts, epoch_number, payload)) - # tg.create_task(_user_has_budget(session, payload, epoch_number)) - # except Exception as e: - # print("e", e) - # raise e - - # summary = asyncio.gather( - # _provided_nonce_matches_expected(session, payload.user_address, payload.nonce), - # _user_is_not_patron( - # session, epoch_subgraph, payload.user_address, epoch_number - # ), - # _provided_projects_are_correct( - # projects_contracts, epoch_number, payload - # ), - # _user_has_budget(session, payload, epoch_number), - # return_exceptions=True, - # ) - - # print("maybe here?") - - # for i in await summary: - # if isinstance(i, Exception): - # raise i - - print("hehehehehe") - async def _provided_nonce_matches_expected( # Component dependencies @@ -171,8 +132,6 @@ async def get_next_user_nonce( session, user_address ) - print("last_allocation_request", last_allocation_request) - # Calculate the next nonce if last_allocation_request is None: return 0 @@ -192,16 +151,11 @@ async def _provided_projects_are_correct( Check if the projects in the allocation request are correct. """ - import time - - start = time.time() # Check if the user is not a project all_projects = await projects_contracts.get_project_addresses(epoch_number) if payload.user_address in all_projects: raise exceptions.ProjectAllocationToSelf() - print("get_project_addresses", time.time() - start) - project_addresses = [a.project_address for a in payload.allocations] # Check if the projects are valid diff --git a/backend/v2/core/dependencies.py b/backend/v2/core/dependencies.py index acf6a8ec80..23b3515e85 100644 --- a/backend/v2/core/dependencies.py +++ b/backend/v2/core/dependencies.py @@ -1,18 +1,11 @@ -from asyncio import current_task from functools import lru_cache from typing import Annotated, AsyncGenerator -from fastapi import Depends from app.infrastructure.database.models import BaseModel +from fastapi import Depends from pydantic import Field from pydantic_settings import BaseSettings, SettingsConfigDict - -from sqlalchemy.ext.asyncio import ( - AsyncSession, - async_sessionmaker, - create_async_engine, - async_scoped_session, -) +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine from web3 import AsyncHTTPProvider, AsyncWeb3 from web3.middleware import async_geth_poa_middleware @@ -116,18 +109,13 @@ async def get_db_session( # Create a new session async with sessionmaker() as session: - print("in gettersession id", id(session)) - print("in gettersession identity", session) - try: yield session await session.commit() - except Exception as e: - print("----Rolling back session, error:", e) + except Exception: await session.rollback() raise finally: - print("----Closing session") await session.close() diff --git a/backend/v2/core/types.py b/backend/v2/core/types.py index 76061ea9a5..83ae158eb3 100644 --- a/backend/v2/core/types.py +++ b/backend/v2/core/types.py @@ -1,7 +1,19 @@ from typing import Annotated + from eth_utils import to_checksum_address +from pydantic import BaseModel, ConfigDict +from pydantic.alias_generators import to_camel +from pydantic.functional_serializers import WrapSerializer from pydantic.functional_validators import AfterValidator +class OctantModel(BaseModel): + model_config = ConfigDict(frozen=True, alias_generator=to_camel) + + # Address is a checksummed Ethereum address. Address = Annotated[str, AfterValidator(to_checksum_address)] + +BigInteger = Annotated[ + int, AfterValidator(int), WrapSerializer(lambda x, y, z: str(x), str) +] diff --git a/backend/v2/deposits/contracts.py b/backend/v2/deposits/contracts.py index 559b9b4bb8..664eb8492b 100644 --- a/backend/v2/deposits/contracts.py +++ b/backend/v2/deposits/contracts.py @@ -1,8 +1,7 @@ -from v2.core.contracts import SmartContract - - from typing import Protocol +from v2.core.contracts import SmartContract + class AddressKey(Protocol): address: str diff --git a/backend/v2/deposits/dependencies.py b/backend/v2/deposits/dependencies.py index 927c22876a..57a2a36671 100644 --- a/backend/v2/deposits/dependencies.py +++ b/backend/v2/deposits/dependencies.py @@ -1,10 +1,9 @@ from typing import Annotated -from fastapi import Depends +from fastapi import Depends from v2.core.dependencies import OctantSettings, Web3 - -from .contracts import DepositsContracts, DEPOSITS_ABI +from .contracts import DEPOSITS_ABI, DepositsContracts class DepositsSettings(OctantSettings): diff --git a/backend/v2/epochs/dependencies.py b/backend/v2/epochs/dependencies.py index 6cb7bb3c2d..8b0d543a75 100644 --- a/backend/v2/epochs/dependencies.py +++ b/backend/v2/epochs/dependencies.py @@ -1,8 +1,8 @@ from typing import Annotated from fastapi import Depends -from v2.core.exceptions import AllocationWindowClosed from v2.core.dependencies import OctantSettings, Web3 +from v2.core.exceptions import AllocationWindowClosed from .contracts import EPOCHS_ABI, EpochsContracts from .subgraphs import EpochsSubgraph @@ -31,15 +31,9 @@ def get_epochs_contracts( async def assert_allocation_window_open( epochs_contracts: GetEpochsContracts, ) -> int: - import time - - # print("assert_allocation_window_open called") - - start = time.time() + """Asserts that the allocation window is open and returns the current epoch number.""" epoch_number = await epochs_contracts.get_pending_epoch() - - print("assert_allocation_window_open took", time.time() - start, "seconds") if epoch_number is None: raise AllocationWindowClosed() diff --git a/backend/v2/epochs/subgraphs.py b/backend/v2/epochs/subgraphs.py index 66e78d2390..d7f8e6cf72 100644 --- a/backend/v2/epochs/subgraphs.py +++ b/backend/v2/epochs/subgraphs.py @@ -67,9 +67,6 @@ def __init__( async def get_epoch_by_number(self, epoch_number: int) -> EpochDetails: """Get EpochDetails from the subgraph for a given epoch number.""" - import time - - start = time.time() logging.debug( f"[Subgraph] Getting epoch properties for epoch number: {epoch_number}" @@ -107,8 +104,6 @@ async def get_epoch_by_number(self, epoch_number: int) -> EpochDetails: epoch_details = data[0] - print(f"Time taken to get epoch details: {time.time() - start}") - return EpochDetails( epoch_num=epoch_details["epoch"], start=epoch_details["fromTs"], diff --git a/backend/v2/glms/contracts.py b/backend/v2/glms/contracts.py index fb0cfdaeb8..f5e9b26273 100644 --- a/backend/v2/glms/contracts.py +++ b/backend/v2/glms/contracts.py @@ -1,8 +1,7 @@ -from v2.core.contracts import SmartContract - - from typing import Protocol +from v2.core.contracts import SmartContract + class AddressKey(Protocol): address: str diff --git a/backend/v2/glms/dependencies.py b/backend/v2/glms/dependencies.py index 3b7bab9482..b0fb5075c1 100644 --- a/backend/v2/glms/dependencies.py +++ b/backend/v2/glms/dependencies.py @@ -1,10 +1,9 @@ from typing import Annotated -from fastapi import Depends +from fastapi import Depends from v2.core.dependencies import OctantSettings, Web3 - -from .contracts import GLMContracts, ERC20_ABI +from .contracts import ERC20_ABI, GLMContracts class GLMSettings(OctantSettings): diff --git a/backend/v2/main.py b/backend/v2/main.py index 20bb9d9b9e..eba6b6a43d 100644 --- a/backend/v2/main.py +++ b/backend/v2/main.py @@ -1,13 +1,13 @@ # Create FastAPI app import logging -from fastapi.responses import JSONResponse + import socketio -from fastapi import FastAPI from app.exceptions import OctantException -from v2.allocations.socket import AllocateNamespace +from fastapi import FastAPI +from fastapi.responses import JSONResponse from sqlalchemy.exc import SQLAlchemyError - from v2.allocations.router import api as allocations_api +from v2.allocations.socket import AllocateNamespace from v2.project_rewards.router import api as project_rewards_api fastapi_app = FastAPI() diff --git a/backend/v2/matched_rewards/dependencies.py b/backend/v2/matched_rewards/dependencies.py index 992b4794e6..cd0af7c34d 100644 --- a/backend/v2/matched_rewards/dependencies.py +++ b/backend/v2/matched_rewards/dependencies.py @@ -1,15 +1,11 @@ from decimal import Decimal from typing import Annotated + from fastapi import Depends from pydantic import Field -from v2.core.exceptions import AllocationWindowClosed -from v2.epochs.dependencies import ( - AssertAllocationWindowOpen, - GetEpochsContracts, - get_epochs_subgraph, -) -from v2.epochs.subgraphs import EpochsSubgraph from v2.core.dependencies import GetSession, OctantSettings +from v2.epochs.dependencies import AssertAllocationWindowOpen, get_epochs_subgraph +from v2.epochs.subgraphs import EpochsSubgraph from .services import MatchedRewardsEstimator @@ -39,9 +35,6 @@ async def get_matched_rewards_estimator( Depends(get_matched_rewards_estimator_settings), ], ) -> MatchedRewardsEstimator: - print("session id", id(session)) - print("session identity", session) - return MatchedRewardsEstimator( session=session, epochs_subgraph=epochs_subgraph, diff --git a/backend/v2/matched_rewards/services.py b/backend/v2/matched_rewards/services.py index b6b38c0662..f26aa50f19 100644 --- a/backend/v2/matched_rewards/services.py +++ b/backend/v2/matched_rewards/services.py @@ -4,7 +4,6 @@ from sqlalchemy.ext.asyncio import AsyncSession from v2.epoch_snapshots.repositories import get_pending_epoch_snapshot from v2.epochs.subgraphs import EpochsSubgraph - from v2.user_patron_mode.repositories import get_patrons_rewards diff --git a/backend/v2/project_rewards/capped_quadriatic.py b/backend/v2/project_rewards/capped_quadriatic.py index b19e907309..c26fac79f8 100644 --- a/backend/v2/project_rewards/capped_quadriatic.py +++ b/backend/v2/project_rewards/capped_quadriatic.py @@ -3,8 +3,8 @@ from math import sqrt from typing import Dict, NamedTuple -from v2.core.types import Address from v2.allocations.schemas import AllocationWithUserUQScore +from v2.core.types import Address from .schemas import ProjectFundingSummary @@ -136,7 +136,9 @@ def cqf_calculate_individual_leverage( total_difference = Decimal(0) for project_address in project_addresses: if project_address in before_allocation.project_fundings: - before = Decimal(before_allocation.project_fundings[project_address].matched) + before = Decimal( + before_allocation.project_fundings[project_address].matched + ) else: before = Decimal(0) diff --git a/backend/v2/project_rewards/dependencies.py b/backend/v2/project_rewards/dependencies.py index a729a2d5f5..2060c90382 100644 --- a/backend/v2/project_rewards/dependencies.py +++ b/backend/v2/project_rewards/dependencies.py @@ -1,10 +1,10 @@ from typing import Annotated + from fastapi import Depends -from v2.core.exceptions import AllocationWindowClosed -from v2.epochs.dependencies import AssertAllocationWindowOpen, GetEpochsContracts +from v2.core.dependencies import GetSession +from v2.epochs.dependencies import AssertAllocationWindowOpen from v2.matched_rewards.dependencies import GetMatchedRewardsEstimator from v2.projects.dependencies import GetProjectsContracts -from v2.core.dependencies import GetSession from .services import ProjectRewardsEstimator @@ -15,9 +15,6 @@ async def get_project_rewards_estimator( projects_contracts: GetProjectsContracts, estimated_project_matched_rewards: GetMatchedRewardsEstimator, ) -> ProjectRewardsEstimator: - print("session id", id(session)) - print("session identity", session) - return ProjectRewardsEstimator( session=session, projects_contracts=projects_contracts, diff --git a/backend/v2/project_rewards/router.py b/backend/v2/project_rewards/router.py index bec75df95a..8717c0b030 100644 --- a/backend/v2/project_rewards/router.py +++ b/backend/v2/project_rewards/router.py @@ -1,7 +1,7 @@ from fastapi import APIRouter -from .schemas import EstimatedProjectRewardsResponse from .dependencies import GetProjectRewardsEstimator +from .schemas import EstimatedProjectRewardsResponse api = APIRouter(prefix="/rewards", tags=["Allocations"]) diff --git a/backend/v2/project_rewards/schemas.py b/backend/v2/project_rewards/schemas.py index 25192816b2..5c53fce910 100644 --- a/backend/v2/project_rewards/schemas.py +++ b/backend/v2/project_rewards/schemas.py @@ -1,18 +1,18 @@ -from decimal import Decimal -from pydantic import BaseModel, Field +from pydantic import Field +from v2.core.types import Address, BigInteger, OctantModel -from v2.core.types import Address - -class ProjectFundingSummary(BaseModel): +class ProjectFundingSummary(OctantModel): address: Address = Field(..., description="The address of the project") - allocated: int = Field( + allocated: BigInteger = Field( ..., description="Sum of all allocation amounts for the project" ) - matched: int = Field(..., description="Sum of matched rewards for the project") + matched: BigInteger = Field( + ..., description="Sum of matched rewards for the project" + ) -class EstimatedProjectRewardsResponse(BaseModel): +class EstimatedProjectRewardsResponse(OctantModel): rewards: list[ProjectFundingSummary] = Field( ..., description="List of project funding summaries" ) diff --git a/backend/v2/project_rewards/services.py b/backend/v2/project_rewards/services.py index 3a01ec9017..82d93aeeb1 100644 --- a/backend/v2/project_rewards/services.py +++ b/backend/v2/project_rewards/services.py @@ -1,16 +1,12 @@ +import asyncio from dataclasses import dataclass from sqlalchemy.ext.asyncio import AsyncSession +from v2.allocations.repositories import get_allocations_with_user_uqs from v2.matched_rewards.services import MatchedRewardsEstimator -from v2.allocations.repositories import ( - get_allocations_with_user_uqs, -) -from .capped_quadriatic import ( - CappedQuadriaticFunding, - capped_quadriatic_funding, -) from v2.projects.contracts import ProjectsContracts -import asyncio + +from .capped_quadriatic import CappedQuadriaticFunding, capped_quadriatic_funding @dataclass diff --git a/backend/v2/projects/dependencies.py b/backend/v2/projects/dependencies.py index cddd007699..3e148b7dee 100644 --- a/backend/v2/projects/dependencies.py +++ b/backend/v2/projects/dependencies.py @@ -1,9 +1,9 @@ from typing import Annotated + from fastapi import Depends from pydantic import Field -from v2.epochs.dependencies import AssertAllocationWindowOpen from v2.core.dependencies import GetSession, OctantSettings, Web3 - +from v2.epochs.dependencies import AssertAllocationWindowOpen from .contracts import PROJECTS_ABI, ProjectsContracts from .services import ProjectsAllocationThresholdGetter diff --git a/backend/v2/projects/services.py b/backend/v2/projects/services.py index 23e59b5d12..7421f2c30f 100644 --- a/backend/v2/projects/services.py +++ b/backend/v2/projects/services.py @@ -1,9 +1,8 @@ +import asyncio from dataclasses import dataclass from sqlalchemy.ext.asyncio import AsyncSession -from v2.allocations.repositories import ( - sum_allocations_by_epoch, -) +from v2.allocations.repositories import sum_allocations_by_epoch from v2.projects.contracts import ProjectsContracts @@ -36,11 +35,10 @@ async def get_projects_allocation_threshold( ) -> int: # PROJECTS_COUNT_MULTIPLIER = 1 # TODO: from settings? - total_allocated = await sum_allocations_by_epoch(session, epoch_number) - project_addresses = await projects.get_project_addresses(epoch_number) - - print("total_allocated", total_allocated) - print("project_addresses", project_addresses) + total_allocated, project_addresses = await asyncio.gather( + sum_allocations_by_epoch(session, epoch_number), + projects.get_project_addresses(epoch_number), + ) return _calculate_threshold( total_allocated, len(project_addresses), project_count_multiplier diff --git a/backend/v2/uniqueness_quotients/dependencies.py b/backend/v2/uniqueness_quotients/dependencies.py index 57886c0f42..99e4e61bbf 100644 --- a/backend/v2/uniqueness_quotients/dependencies.py +++ b/backend/v2/uniqueness_quotients/dependencies.py @@ -1,10 +1,10 @@ from decimal import Decimal from typing import Annotated -from fastapi import Depends +from fastapi import Depends from pydantic import Field - from v2.core.dependencies import GetSession, OctantSettings + from .services import UQScoreGetter diff --git a/backend/v2/uniqueness_quotients/services.py b/backend/v2/uniqueness_quotients/services.py index f2f0f5d74b..ef96c19423 100644 --- a/backend/v2/uniqueness_quotients/services.py +++ b/backend/v2/uniqueness_quotients/services.py @@ -1,23 +1,19 @@ from dataclasses import dataclass from decimal import Decimal -from sqlalchemy.ext.asyncio import AsyncSession - from app.constants import GUEST_LIST, TIMEOUT_LIST from app.modules.user.antisybil.core import ( _apply_gtc_staking_stamp_nullification, _has_guest_stamp_applied_by_gp, ) - from eth_utils import to_checksum_address - +from sqlalchemy.ext.asyncio import AsyncSession from v2.core.types import Address - from .repositories import ( + get_gp_stamps_by_address, get_uq_score_by_user_address, save_uq_score_for_user_address, - get_gp_stamps_by_address, ) diff --git a/backend/v2/user_patron_mode/repositories.py b/backend/v2/user_patron_mode/repositories.py index 603b9a397a..5ba6511d63 100644 --- a/backend/v2/user_patron_mode/repositories.py +++ b/backend/v2/user_patron_mode/repositories.py @@ -80,13 +80,9 @@ async def get_budget_by_user_address_and_epoch( """ user = await get_user_by_address(session, user_address) - - print("user", user) if user is None: return None - print("epoch", epoch) - # epoch -= 1 result = await session.execute( select(Budget.budget) .filter(Budget.user_id == user.id) @@ -95,8 +91,6 @@ async def get_budget_by_user_address_and_epoch( budget = result.scalar() - print("budget", budget) - if budget is None: return None diff --git a/backend/v2/users/repositories.py b/backend/v2/users/repositories.py index c85d5a2304..f103df0e6c 100644 --- a/backend/v2/users/repositories.py +++ b/backend/v2/users/repositories.py @@ -1,7 +1,6 @@ from app.infrastructure.database.models import User from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select - from v2.core.types import Address @@ -10,13 +9,7 @@ async def get_user_by_address( ) -> User | None: """Get a user object by their address. Useful for all other operations related to a user.""" - import time - - start = time.time() - result = await session.scalar( select(User).filter(User.address == user_address).limit(1) ) - - print("USER BY ADDRESS", time.time() - start) return result From deeef470086cb1dfbbf8efb14375d08bdf984512 Mon Sep 17 00:00:00 2001 From: adam-gf Date: Thu, 10 Oct 2024 11:04:48 +0200 Subject: [PATCH 15/31] Updates MAINNET handling and UQ --- backend/v2/allocations/dependencies.py | 18 +--- backend/v2/allocations/socket.py | 9 +- backend/v2/core/dependencies.py | 14 +++ .../v2/uniqueness_quotients/dependencies.py | 26 +++++- backend/v2/uniqueness_quotients/services.py | 90 +++++++------------ 5 files changed, 74 insertions(+), 83 deletions(-) diff --git a/backend/v2/allocations/dependencies.py b/backend/v2/allocations/dependencies.py index 065b48c7cc..959491018f 100644 --- a/backend/v2/allocations/dependencies.py +++ b/backend/v2/allocations/dependencies.py @@ -1,8 +1,7 @@ from typing import Annotated from fastapi import Depends -from pydantic import Field -from v2.core.dependencies import GetSession, OctantSettings +from v2.core.dependencies import GetChainSettings, GetSession from v2.epochs.dependencies import AssertAllocationWindowOpen, GetEpochsSubgraph from v2.matched_rewards.dependencies import GetMatchedRewardsEstimator from v2.projects.dependencies import GetProjectsContracts @@ -12,24 +11,11 @@ from .validators import SignatureVerifier -class SignatureVerifierSettings(OctantSettings): - chain_id: int = Field( - default=11155111, - description="The chain id to use for the signature verification.", - ) - - -def get_signature_verifier_settings() -> SignatureVerifierSettings: - return SignatureVerifierSettings() - - def get_signature_verifier( session: GetSession, epochs_subgraph: GetEpochsSubgraph, projects_contracts: GetProjectsContracts, - settings: Annotated[ - SignatureVerifierSettings, Depends(get_signature_verifier_settings) - ], + settings: GetChainSettings, ) -> SignatureVerifier: return SignatureVerifier( session, epochs_subgraph, projects_contracts, settings.chain_id diff --git a/backend/v2/allocations/socket.py b/backend/v2/allocations/socket.py index d39a37d4c6..bfff310f80 100644 --- a/backend/v2/allocations/socket.py +++ b/backend/v2/allocations/socket.py @@ -5,14 +5,11 @@ import socketio from sqlalchemy.ext.asyncio import AsyncSession -from v2.allocations.dependencies import ( - get_allocator, - get_signature_verifier, - get_signature_verifier_settings, -) +from v2.allocations.dependencies import get_allocator, get_signature_verifier from v2.allocations.repositories import get_donations_by_project from v2.allocations.services import Allocator from v2.core.dependencies import ( + get_chain_settings, get_database_settings, get_sessionmaker, get_w3, @@ -174,7 +171,7 @@ async def create_dependencies_on_allocate() -> AsyncGenerator[ s4, epochs_subgraph, projects_contracts, - get_signature_verifier_settings(), + get_chain_settings(), ) uq_score_getter = get_uq_score_getter(s5, get_uq_score_settings()) diff --git a/backend/v2/core/dependencies.py b/backend/v2/core/dependencies.py index 23b3515e85..786584b57e 100644 --- a/backend/v2/core/dependencies.py +++ b/backend/v2/core/dependencies.py @@ -120,3 +120,17 @@ async def get_db_session( GetSession = Annotated[AsyncSession, Depends(get_db_session, use_cache=False)] + + +class ChainSettings(OctantSettings): + chain_id: int = Field( + default=11155111, + description="The chain id to use for the signature verification.", + ) + + +def get_chain_settings() -> ChainSettings: + return ChainSettings() + + +GetChainSettings = Annotated[ChainSettings, Depends(get_chain_settings)] diff --git a/backend/v2/uniqueness_quotients/dependencies.py b/backend/v2/uniqueness_quotients/dependencies.py index 99e4e61bbf..39f313d8ba 100644 --- a/backend/v2/uniqueness_quotients/dependencies.py +++ b/backend/v2/uniqueness_quotients/dependencies.py @@ -1,9 +1,17 @@ from decimal import Decimal from typing import Annotated +from app.constants import ( + GUEST_LIST, + TIMEOUT_LIST, + TIMEOUT_LIST_NOT_MAINNET, + UQ_THRESHOLD_MAINNET, + UQ_THRESHOLD_NOT_MAINNET, +) +from app.shared.blockchain_types import ChainTypes from fastapi import Depends from pydantic import Field -from v2.core.dependencies import GetSession, OctantSettings +from v2.core.dependencies import GetChainSettings, GetSession, OctantSettings from .services import UQScoreGetter @@ -21,6 +29,10 @@ class UQScoreSettings(OctantSettings): default=Decimal("1.0"), description="The UQ score to be returned if the Gitcoin Passport score is above the threshold.", ) + null_uq_score: Decimal = Field( + default=Decimal("0.0"), + description="The UQ score to be returned if the user is on the timeout list.", + ) def get_uq_score_settings() -> UQScoreSettings: @@ -30,12 +42,22 @@ def get_uq_score_settings() -> UQScoreSettings: def get_uq_score_getter( session: GetSession, settings: Annotated[UQScoreSettings, Depends(get_uq_score_settings)], + chain_settings: GetChainSettings, ) -> UQScoreGetter: + # TODO: this should be a much nicer dependency :) + is_mainnet = chain_settings.chain_id == ChainTypes.MAINNET + + uq_threshold = UQ_THRESHOLD_MAINNET if is_mainnet else UQ_THRESHOLD_NOT_MAINNET + timeout_list = TIMEOUT_LIST if is_mainnet else TIMEOUT_LIST_NOT_MAINNET + return UQScoreGetter( session=session, - uq_score_threshold=settings.uq_score_threshold, + uq_score_threshold=uq_threshold, max_uq_score=settings.max_uq_score, low_uq_score=settings.low_uq_score, + null_uq_score=settings.null_uq_score, + guest_list=GUEST_LIST, + timeout_list=timeout_list, ) diff --git a/backend/v2/uniqueness_quotients/services.py b/backend/v2/uniqueness_quotients/services.py index ef96c19423..ec04fa3a15 100644 --- a/backend/v2/uniqueness_quotients/services.py +++ b/backend/v2/uniqueness_quotients/services.py @@ -23,75 +23,51 @@ class UQScoreGetter: uq_score_threshold: float max_uq_score: Decimal low_uq_score: Decimal + null_uq_score: Decimal + guest_list: set[Address] + timeout_list: set[Address] async def get_or_calculate( self, epoch_number: int, user_address: Address ) -> Decimal: - return await get_or_calculate_uq_score( - session=self.session, - user_address=user_address, - epoch_number=epoch_number, - uq_score_threshold=self.uq_score_threshold, - max_uq_score=self.max_uq_score, - low_uq_score=self.low_uq_score, + """Get or calculate the UQ score for a user in a given epoch. + If the UQ score is already calculated, it will be returned. + Otherwise, it will be calculated based on the Gitcoin Passport score and saved for future reference. + """ + + # Check if the UQ score is already calculated and saved + uq_score = await get_uq_score_by_user_address( + self.session, user_address, epoch_number ) + if uq_score: + return uq_score + # Otherwise, calculate the UQ score + uq_score = await self._calculate_uq_score(user_address) -def calculate_uq_score( - gp_score: float, - uq_score_threshold: float, - max_uq_score: Decimal, - low_uq_score: Decimal, -) -> Decimal: - """Calculate UQ score (multiplier) based on the GP score and the UQ score threshold. - If the GP score is greater than or equal to the UQ score threshold, the UQ score is set to the maximum UQ score. - Otherwise, the UQ score is set to the low UQ score. - - Args: - gp_score (float): The GitcoinPassport antisybil score. - uq_score_threshold (int): Anything below this threshold will be considered low UQ score, and anything above will be considered maximum UQ score. - max_uq_score (Decimal): Score to be returned if the GP score is greater than or equal to the UQ score threshold. - low_uq_score (Decimal): Score to be returned if the GP score is less than the UQ score threshold. - """ - - if gp_score >= uq_score_threshold: - return max_uq_score - - return low_uq_score - - -async def get_or_calculate_uq_score( - session: AsyncSession, - user_address: Address, - epoch_number: int, - uq_score_threshold: float, - max_uq_score: Decimal, - low_uq_score: Decimal, -) -> Decimal: - """Get or calculate the UQ score for a user in a given epoch. - If the UQ score is already calculated, it will be returned. - Otherwise, it will be calculated based on the Gitcoin Passport score and saved for future reference. - """ + # Save the UQ score for future reference + await save_uq_score_for_user_address( + self.session, user_address, epoch_number, uq_score + ) - # Check if the UQ score is already calculated and saved - uq_score = await get_uq_score_by_user_address(session, user_address, epoch_number) - if uq_score: return uq_score - # Otherwise, calculate the UQ score based on the gitcoin passport score - gp_score = await get_gitcoin_passport_score(session, user_address) - uq_score = calculate_uq_score( - gp_score, uq_score_threshold, max_uq_score, low_uq_score - ) + async def _calculate_uq_score(self, user_address: Address) -> Decimal: + gp_score = await get_gitcoin_passport_score( + self.session, user_address, self.guest_list + ) + + if user_address in self.timeout_list: + return self.null_uq_score - # and save the UQ score for future reference - await save_uq_score_for_user_address(session, user_address, epoch_number, uq_score) + if gp_score >= self.uq_score_threshold: + return self.max_uq_score - return uq_score + return self.low_uq_score async def get_gitcoin_passport_score( - session: AsyncSession, user_address: Address + session: AsyncSession, user_address: Address, guest_list: set[Address] ) -> float: """Gets saved Gitcoin Passport score for a user. Returns None if the score is not saved. @@ -106,15 +82,11 @@ async def get_gitcoin_passport_score( if stamps is None: return 0.0 - # If the user is explicitly in the timeout list, return 0.0 Gitcoin Passport score - if user_address in TIMEOUT_LIST: - return 0.0 - # We remove score associated with GTC staking potential_score = _apply_gtc_staking_stamp_nullification(stamps.score, stamps) # If the user is in the guest list and has not been stamped by a guest list provider, increase the score by 21.0 - if user_address in GUEST_LIST and not _has_guest_stamp_applied_by_gp(stamps): + if user_address in guest_list and not _has_guest_stamp_applied_by_gp(stamps): return potential_score + 21.0 return potential_score From d634fd5dceb5f3f2654fe023c8f6eefd5f142826 Mon Sep 17 00:00:00 2001 From: Housekeeper Bot Date: Thu, 10 Oct 2024 09:18:14 +0000 Subject: [PATCH 16/31] [CI/CD] Update uat.env contracts --- ci/argocd/contracts/uat.env | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/ci/argocd/contracts/uat.env b/ci/argocd/contracts/uat.env index 1c7452eaba..da078f2019 100644 --- a/ci/argocd/contracts/uat.env +++ b/ci/argocd/contracts/uat.env @@ -1,8 +1,8 @@ -BLOCK_NUMBER=6793660 +BLOCK_NUMBER=6849437 GLM_CONTRACT_ADDRESS=0x71432DD1ae7DB41706ee6a22148446087BdD0906 -AUTH_CONTRACT_ADDRESS=0xa49b0d8711BDe73C4c48371bE53B3a0Ba9efFDa0 -DEPOSITS_CONTRACT_ADDRESS=0x773B267Eb3BfdC0C00b6d5e6ED0AFC8dF164E69b -EPOCHS_CONTRACT_ADDRESS=0x227c5ad3cD9Ba8646e1584e822F88538037e85b7 -PROPOSALS_CONTRACT_ADDRESS=0x322FA3313CDDE6e021db71D6Ea8aaaF57fE219f2 -WITHDRAWALS_TARGET_CONTRACT_ADDRESS=0xc0720c9cC73497A142E7D0a4A05cFF2DAf82b3fc -VAULT_CONTRACT_ADDRESS=0xff21F0910F7A3b83D2D4d145C648A738c48d993F +AUTH_CONTRACT_ADDRESS=0x2aBddebB710d06a34C7FBf2f66e45Ba7a8ACb723 +DEPOSITS_CONTRACT_ADDRESS=0x0c3fb6066B43E3B5b4cA357E16207147215cC13c +EPOCHS_CONTRACT_ADDRESS=0x2871E2c439C815A81c67CeFdBBcF4A0F5581DDa8 +PROPOSALS_CONTRACT_ADDRESS=0xBbCcd056e8264907F2ac5250d9BC82152DE7260e +WITHDRAWALS_TARGET_CONTRACT_ADDRESS=0x32aC790b8f1AB51AD355a3Cffee3217525ad2F8d +VAULT_CONTRACT_ADDRESS=0x48293e11e870C2aF4Be41693155a5EAFC7e0Ca11 From 8fa3e8c5dadf00004d502dcf4b9ea51e75a75f3a Mon Sep 17 00:00:00 2001 From: adam-gf Date: Tue, 15 Oct 2024 02:53:00 +0200 Subject: [PATCH 17/31] Updates the address list validation --- backend/v2/uniqueness_quotients/dependencies.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/backend/v2/uniqueness_quotients/dependencies.py b/backend/v2/uniqueness_quotients/dependencies.py index 39f313d8ba..fbd5451ca9 100644 --- a/backend/v2/uniqueness_quotients/dependencies.py +++ b/backend/v2/uniqueness_quotients/dependencies.py @@ -10,7 +10,8 @@ ) from app.shared.blockchain_types import ChainTypes from fastapi import Depends -from pydantic import Field +from pydantic import Field, TypeAdapter +from v2.core.types import Address from v2.core.dependencies import GetChainSettings, GetSession, OctantSettings from .services import UQScoreGetter @@ -50,14 +51,18 @@ def get_uq_score_getter( uq_threshold = UQ_THRESHOLD_MAINNET if is_mainnet else UQ_THRESHOLD_NOT_MAINNET timeout_list = TIMEOUT_LIST if is_mainnet else TIMEOUT_LIST_NOT_MAINNET + address_set_validator = TypeAdapter(set[Address]) + timeout_set = address_set_validator.validate_python(timeout_list) + guest_set = address_set_validator.validate_python(GUEST_LIST) + return UQScoreGetter( session=session, uq_score_threshold=uq_threshold, max_uq_score=settings.max_uq_score, low_uq_score=settings.low_uq_score, null_uq_score=settings.null_uq_score, - guest_list=GUEST_LIST, - timeout_list=timeout_list, + guest_list=guest_set, + timeout_list=timeout_set, ) From 360be6256fd24666875f45e10e9438a338652786 Mon Sep 17 00:00:00 2001 From: Housekeeper Bot Date: Tue, 15 Oct 2024 06:29:01 +0000 Subject: [PATCH 18/31] [CI/CD] Update uat.env contracts --- ci/argocd/contracts/uat.env | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/ci/argocd/contracts/uat.env b/ci/argocd/contracts/uat.env index a0eca800b0..ebf852ce05 100644 --- a/ci/argocd/contracts/uat.env +++ b/ci/argocd/contracts/uat.env @@ -1,8 +1,8 @@ -BLOCK_NUMBER=6873017 +BLOCK_NUMBER=6878735 GLM_CONTRACT_ADDRESS=0x71432DD1ae7DB41706ee6a22148446087BdD0906 -AUTH_CONTRACT_ADDRESS=0x459708cEEA480d64d4e8ed69dAFfe42e3842B2E7 -DEPOSITS_CONTRACT_ADDRESS=0xB83cd4A3D7F28799C4a29883bC1659702436ceec -EPOCHS_CONTRACT_ADDRESS=0x39e4437770Ed212e6950E2763b628bB21eF4F42a -PROPOSALS_CONTRACT_ADDRESS=0xdEAdB7D682C42E460cCdb4866cE95e6Ea60D3845 -WITHDRAWALS_TARGET_CONTRACT_ADDRESS=0x4c6af5C69299924292646e50a06b1E164846f8fe -VAULT_CONTRACT_ADDRESS=0xD2C922Ec183772386acCC211Ab2EedAD5370BAed +AUTH_CONTRACT_ADDRESS=0x76E1707603a40BF9c748B106c54254fA5615E50e +DEPOSITS_CONTRACT_ADDRESS=0xC2c0Ac7B045B954c9e75D49aB206fd0c6f692116 +EPOCHS_CONTRACT_ADDRESS=0xDC0521A6d39b194cD5bC3F4389874f7732B677d6 +PROPOSALS_CONTRACT_ADDRESS=0x8194B411A35ceee0f557c278EAd746540d448Ad5 +WITHDRAWALS_TARGET_CONTRACT_ADDRESS=0x1d630737487e23CA1C532ADa818cd16457263c29 +VAULT_CONTRACT_ADDRESS=0x323178Bb6e866982f44Ca3Ce72566a6632221317 From 4a231852c1121de20f089a56250e0de8b7440336 Mon Sep 17 00:00:00 2001 From: adam-gf Date: Tue, 15 Oct 2024 12:43:48 +0200 Subject: [PATCH 19/31] Bugfix missing argument --- backend/v2/allocations/socket.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/backend/v2/allocations/socket.py b/backend/v2/allocations/socket.py index bfff310f80..3adf140637 100644 --- a/backend/v2/allocations/socket.py +++ b/backend/v2/allocations/socket.py @@ -36,7 +36,10 @@ get_projects_settings, ) from v2.projects.services import ProjectsAllocationThresholdGetter -from v2.uniqueness_quotients.dependencies import get_uq_score_getter, get_uq_score_settings +from v2.uniqueness_quotients.dependencies import ( + get_uq_score_getter, + get_uq_score_settings, +) from .schemas import UserAllocationRequest, UserAllocationRequestV1 @@ -174,7 +177,9 @@ async def create_dependencies_on_allocate() -> AsyncGenerator[ get_chain_settings(), ) - uq_score_getter = get_uq_score_getter(s5, get_uq_score_settings()) + uq_score_getter = get_uq_score_getter( + s5, get_uq_score_settings(), get_chain_settings() + ) allocations = await get_allocator( epoch_number, From c0b59949a7c28d7a8c5d9d3a1f1c1d09a4e353ef Mon Sep 17 00:00:00 2001 From: adam-gf Date: Tue, 15 Oct 2024 14:13:54 +0200 Subject: [PATCH 20/31] update --- backend/v2/uniqueness_quotients/repositories.py | 1 + 1 file changed, 1 insertion(+) diff --git a/backend/v2/uniqueness_quotients/repositories.py b/backend/v2/uniqueness_quotients/repositories.py index 4fb27c9f3e..80f12bf679 100644 --- a/backend/v2/uniqueness_quotients/repositories.py +++ b/backend/v2/uniqueness_quotients/repositories.py @@ -44,6 +44,7 @@ async def save_uq_score_for_user_address( ) session.add(uq_score) + await session.commit() async def get_gp_stamps_by_address( From 60811c969930b1ef386ed800925381911955e8cf Mon Sep 17 00:00:00 2001 From: adam-gf Date: Tue, 15 Oct 2024 15:01:49 +0200 Subject: [PATCH 21/31] adds address to timeout for testing --- backend/app/constants.py | 1 + 1 file changed, 1 insertion(+) diff --git a/backend/app/constants.py b/backend/app/constants.py index aca64a667f..be30f9197e 100644 --- a/backend/app/constants.py +++ b/backend/app/constants.py @@ -981,6 +981,7 @@ TIMEOUT_LIST_NOT_MAINNET = { "0xdf486eec7b89c390569194834a2f7a71da05ee13", "0x689f1a51c177cce66e3afdca4b1ded7721f531f9", + "0x70997970C51812dc3A010C7d01b50e0d17dc79C8", } GUEST_LIST_STAMP_PROVIDERS = [ From b011b055f33d186d1a75fb8672ce1ba7b6ce03a1 Mon Sep 17 00:00:00 2001 From: adam-gf Date: Tue, 15 Oct 2024 15:17:16 +0200 Subject: [PATCH 22/31] up --- backend/app/constants.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/app/constants.py b/backend/app/constants.py index be30f9197e..58d6761110 100644 --- a/backend/app/constants.py +++ b/backend/app/constants.py @@ -981,7 +981,7 @@ TIMEOUT_LIST_NOT_MAINNET = { "0xdf486eec7b89c390569194834a2f7a71da05ee13", "0x689f1a51c177cce66e3afdca4b1ded7721f531f9", - "0x70997970C51812dc3A010C7d01b50e0d17dc79C8", + "0x70997970c51812dc3a010c7d01b50e0d17dc79c8", } GUEST_LIST_STAMP_PROVIDERS = [ From f3fc9b0e0177aaeb872b286bf0bb5238710a5826 Mon Sep 17 00:00:00 2001 From: adam-gf Date: Tue, 15 Oct 2024 15:55:12 +0200 Subject: [PATCH 23/31] adds address to timeout list for testing --- backend/app/constants.py | 1 + 1 file changed, 1 insertion(+) diff --git a/backend/app/constants.py b/backend/app/constants.py index 58d6761110..21af29d964 100644 --- a/backend/app/constants.py +++ b/backend/app/constants.py @@ -982,6 +982,7 @@ "0xdf486eec7b89c390569194834a2f7a71da05ee13", "0x689f1a51c177cce66e3afdca4b1ded7721f531f9", "0x70997970c51812dc3a010c7d01b50e0d17dc79c8", + "0x018d43ac91432d00c4ad1531c98b6ccd2b352538", } GUEST_LIST_STAMP_PROVIDERS = [ From 3eb9286f96832c897633809a87faccb2efe9a6d9 Mon Sep 17 00:00:00 2001 From: Housekeeper Bot Date: Tue, 15 Oct 2024 20:43:28 +0000 Subject: [PATCH 24/31] [CI/CD] Update uat.env contracts --- ci/argocd/contracts/uat.env | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/ci/argocd/contracts/uat.env b/ci/argocd/contracts/uat.env index ebf852ce05..da6658afca 100644 --- a/ci/argocd/contracts/uat.env +++ b/ci/argocd/contracts/uat.env @@ -1,8 +1,8 @@ -BLOCK_NUMBER=6878735 +BLOCK_NUMBER=6882311 GLM_CONTRACT_ADDRESS=0x71432DD1ae7DB41706ee6a22148446087BdD0906 -AUTH_CONTRACT_ADDRESS=0x76E1707603a40BF9c748B106c54254fA5615E50e -DEPOSITS_CONTRACT_ADDRESS=0xC2c0Ac7B045B954c9e75D49aB206fd0c6f692116 -EPOCHS_CONTRACT_ADDRESS=0xDC0521A6d39b194cD5bC3F4389874f7732B677d6 -PROPOSALS_CONTRACT_ADDRESS=0x8194B411A35ceee0f557c278EAd746540d448Ad5 -WITHDRAWALS_TARGET_CONTRACT_ADDRESS=0x1d630737487e23CA1C532ADa818cd16457263c29 -VAULT_CONTRACT_ADDRESS=0x323178Bb6e866982f44Ca3Ce72566a6632221317 +AUTH_CONTRACT_ADDRESS=0xC383DeeB8939efa46d12b4b8C5DbbB62b7EF64a5 +DEPOSITS_CONTRACT_ADDRESS=0xF6CAB83AC011c81d0c43F3FD90353f7967b0A288 +EPOCHS_CONTRACT_ADDRESS=0x3318204Af969917002764df4C4Ea7D57065af018 +PROPOSALS_CONTRACT_ADDRESS=0xa939Ebfb271c6372152d23F7E5adE7DB430116f3 +WITHDRAWALS_TARGET_CONTRACT_ADDRESS=0xC2C4d95D79D2eA2Bcd09B3c170ae5E6B30315A40 +VAULT_CONTRACT_ADDRESS=0xA2935F8c8E605ef396CacFDa0EA17175D2CBf6a5 From 724b765e754444fa0dc021b2d8b3a6eb54f9c4d1 Mon Sep 17 00:00:00 2001 From: adam-gf Date: Wed, 16 Oct 2024 11:05:41 +0200 Subject: [PATCH 25/31] update timeout list for dev envs --- backend/app/constants.py | 1 - 1 file changed, 1 deletion(-) diff --git a/backend/app/constants.py b/backend/app/constants.py index 21af29d964..d62dcecfac 100644 --- a/backend/app/constants.py +++ b/backend/app/constants.py @@ -981,7 +981,6 @@ TIMEOUT_LIST_NOT_MAINNET = { "0xdf486eec7b89c390569194834a2f7a71da05ee13", "0x689f1a51c177cce66e3afdca4b1ded7721f531f9", - "0x70997970c51812dc3a010c7d01b50e0d17dc79c8", "0x018d43ac91432d00c4ad1531c98b6ccd2b352538", } From 0065e043fcfcd0fea6dfd5f28056b28bd2c79a4d Mon Sep 17 00:00:00 2001 From: adam-gf Date: Wed, 16 Oct 2024 15:48:54 +0200 Subject: [PATCH 26/31] Adds redis for socketio manager --- backend/socket_client.py | 263 ------------------ backend/v2/allocations/socket.py | 57 ++-- backend/v2/core/dependencies.py | 18 ++ backend/v2/main.py | 26 +- .../v2/uniqueness_quotients/dependencies.py | 2 +- backend/ws_allocation_tester.py | 230 --------------- backend/ws_metrics.py | 126 --------- backend/ws_req_metrics.py | 179 ------------ 8 files changed, 67 insertions(+), 834 deletions(-) delete mode 100644 backend/socket_client.py delete mode 100644 backend/ws_allocation_tester.py delete mode 100644 backend/ws_metrics.py delete mode 100644 backend/ws_req_metrics.py diff --git a/backend/socket_client.py b/backend/socket_client.py deleted file mode 100644 index f34dcc09a7..0000000000 --- a/backend/socket_client.py +++ /dev/null @@ -1,263 +0,0 @@ -import asyncio -import socketio - -from app.legacy.crypto.eip712 import sign -from v2.allocations.validators import build_allocations_eip712_data - -# Create a Socket.IO client -sio = socketio.AsyncClient(logger=True, engineio_logger=True) - - -# Define event handlers -@sio.event -async def connect(): - print(">>>Connected to the server") - - -@sio.event -async def connect_error(data): - print(">>>The connection failed with error:", data) - - -@sio.event -async def disconnect(): - print(">>>I'm disconnected!") - - -# A handler for any event with a wildcard (not all implementations of Socket.IO support this feature directly) -@sio.on("*") -async def catch_all(event, data): - print(f">>>Received an event of type '{event}' with data:", data) - - -@sio.event -async def epoch(data): - print(f">>>Epoch received: {data}") - - -@sio.event -async def project_rewards(data): - print(f"Message received: {data}") - - -@sio.event -async def threshold(data): - print(f"Custom event received: {data}") - - -# Connect to the server -async def main(): - print("Connecting to the server...") - await sio.connect("http://localhost:8000/", wait_timeout=10) - print("Connected. Waiting for events...") - # This line will keep the client running and listening for events - - # Emit events - - # Emit a custom event - data = { - "userAddress": "0xb429d71F676f6e804010D8B699EefbF1ed050420", - "payload": { - "allocations": [ - { - "proposalAddress": "0x1c01595f9534E33d411035AE99a4317faeC4f6Fe", - "amount": 100, - }, - { - "proposalAddress": "0x6e8873085530406995170Da467010565968C7C62", - "amount": 200, - }, - ], - "nonce": 0, - "signature": "0x03c0e67cdc612bf1c0a690346805c5f461fbc0a8fe3041b4849c9ddbc939553a53997dfb6578200192e071618d9f054ae68513f134206149acf70ff04cea02931c", - }, - "isManuallyEdited": False, - } - await sio.emit("allocate", data) - - await sio.wait() - - -# Emit events -async def emit_event(event_name, data): - await sio.emit(event_name, data) - - -# Run the client -# if __name__ == "__main__": -# asyncio.run(main()) - - -from tests.helpers.constants import ALICE, BOB -from v2.projects.depdendencies import ProjectsSettings, get_projects_contracts -from v2.core.dependencies import Web3ProviderSettings, get_w3 -from v2.epochs.dependencies import EpochsSettings, get_epochs_contracts -from v2.deposits.dependencies import DepositsSettings, get_deposits_contracts -from v2.glms.dependencies import GLMSettings, get_glm_contracts - -w3 = get_w3(Web3ProviderSettings()) -epochs_contracts = get_epochs_contracts(w3, EpochsSettings()) -projects_contracts = get_projects_contracts(w3, ProjectsSettings()) -deposits_contracts = get_deposits_contracts(w3, DepositsSettings()) -glm_contracts = get_glm_contracts(w3, GLMSettings()) -# epochs_subgraph = get_epochs_subgraph(EpochsSubgraphSettings()) - - -from eth_account.signers.local import LocalAccount -from eth_account import Account as EthAccount - - -async def move_to_next_epoch(target: int): - - assert await epochs_contracts.get_current_epoch() == target - 1 - now = (await w3.eth.get_block("latest")).timestamp - nextEpochAt = await epochs_contracts.get_current_epoch_end() - forward = nextEpochAt - now + 30 - # await w3.provider.make_request("evm_increaseTime", [forward]) - # await w3.provider.make_request("evm_mine", []) - # assert await epochs_contracts.get_current_epoch() == target - -chain_id = 11155111 # Sepolia - -mine = "a184bdfb5f83fcd76d7f6ac4ae530c69fa941845283b23aee8db411e31c8a367" -me: LocalAccount = EthAccount.from_key(mine) - -project_addresses = [ - "0x0B7246eF74Ca7b37Fdc3D15be4f0b49876622F95", - "0x0c9dc7622aE5f56491aB4cCe060d6002450B79D2", -] - -async def mine(): - current = await epochs_contracts.get_current_epoch() - ts = await epochs_contracts.get_current_epoch_end() - pending = await epochs_contracts.get_pending_epoch() - duration = await epochs_contracts.get_epoch_duration() - finalized = await epochs_contracts.get_finalized_epoch() - decision_window = await epochs_contracts.get_decision_window() - is_open = await epochs_contracts.is_decision_window_open() - - print("current: ", current) - print("ts: ", ts) - print("pending: ", pending) - print("duration: ", duration) - print("finalized: ", finalized) - print("decision_window: ", decision_window) - print("is_open: ", is_open) - - return ts - - -def allocate(): - - asyncio.run(mine()) - - # rv = self._flask_client.get(f"/allocations/users/{address}/allocation_nonce") - nonce = 0 - - - payload = { - "allocations": [ - { - "proposalAddress": address, - "amount": 10, - } - for address in project_addresses - ], - "nonce": nonce, - } - - data = build_allocations_eip712_data(chain_id, payload) - - signature = sign(me, data) - - - import requests - import time - - start = time.time() - resp = requests.post( - "https://uat-backend.octant.wildland.dev/allocations/allocate", - json={ - "payload": payload, - "userAddress": me.address, - "signature": signature, - }, - ) - print("time taken: ", time.time() - start) - - print("after request") - print(resp.status_code) - print(resp.json()) - - # rv = self._flask_client.post( - # "/allocations/allocate", - # json={ - # "payload": { - # "allocations": [ - # {"proposalAddress": address, "amount": amount} - # for address in addresses - # ], - # "nonce": nonce, - # }, - # "userAddress": account.address, - # "signature": signature, - # }, - # ) - # return rv.status_code - -if __name__ == "__main__": - allocate() - -# async def test_allocation(): - -# pending = await epochs_contracts.get_pending_epoch() - -# alice_proposals = await projects_contracts.get_project_addresses(1)[:3] - -# # alice_proposals = get_projects_addresses(1)[:3] -# alice: LocalAccount = EthAccount.from_key(ALICE) -# bob: LocalAccount = EthAccount.from_key(BOB) - -# await glm_contracts.approve(alice, deposits_contracts.contract.address, w3.to_wei(10000, "ether")) -# await deposits_contracts.lock(alice, w3.to_wei(10000, "ether")) - -# await glm_contracts.approve(bob, deposits_contracts.contract.address, w3.to_wei(15000, "ether")) -# await deposits_contracts.lock(bob, w3.to_wei(15000, "ether")) - -# # glm.approve(alice, deposits.contract.address, w3.to_wei(10000, "ether")) - -# # # lock GLM from two accounts -# # UserAccount(CryptoAccount.from_key(ALICE), client) -# # glm.approve(self._account, deposits.contract.address, w3.to_wei(value, "ether")) -# # deposits.lock(self._account, w3.to_wei(value, "ether")) - -# # ua_alice.lock(10000) -# # ua_bob.lock(15000) - -# # forward time to the beginning of the epoch 2 -# client.move_to_next_epoch(STARTING_EPOCH + 1) - -# # wait for indexer to catch up -# epoch_no = client.wait_for_sync(STARTING_EPOCH + 1) -# app.logger.debug(f"indexed epoch: {epoch_no}") - -# # make a snapshot -# res = client.pending_snapshot() -# assert res["epoch"] > 0 - -# ua_alice.allocate(1000, alice_proposals) -# ua_bob.allocate(1000, alice_proposals[:1]) - -# allocations, _ = client.get_epoch_allocations(STARTING_EPOCH) -# unique_donors = set() -# unique_proposals = set() -# app.logger.debug(f"Allocations: \n {allocations}") - -# assert len(allocations["allocations"]) == 4 -# for allocation in allocations["allocations"]: -# unique_donors.add(allocation["donor"]) -# unique_proposals.add(allocation["project"]) -# assert int(allocation["amount"]) > 0 -# app.logger.debug(f"Allocations: {allocations}") -# assert len(unique_donors) == 2 -# assert len(unique_proposals) == 3 diff --git a/backend/v2/allocations/socket.py b/backend/v2/allocations/socket.py index 3adf140637..47f436a039 100644 --- a/backend/v2/allocations/socket.py +++ b/backend/v2/allocations/socket.py @@ -36,10 +36,7 @@ get_projects_settings, ) from v2.projects.services import ProjectsAllocationThresholdGetter -from v2.uniqueness_quotients.dependencies import ( - get_uq_score_getter, - get_uq_score_settings, -) +from v2.uniqueness_quotients.dependencies import get_uq_score_getter, get_uq_score_settings from .schemas import UserAllocationRequest, UserAllocationRequestV1 @@ -97,20 +94,10 @@ async def create_dependencies_on_connect() -> AsyncGenerator[ yield (s4, threshold_getter, estimated_project_rewards) except Exception: - await asyncio.gather( - s1.rollback(), - s2.rollback(), - s3.rollback(), - s4.rollback(), - ) + await cleanup_sessions(s1, s2, s3, s4) raise finally: - await asyncio.gather( - s1.close(), - s2.close(), - s3.close(), - s4.close(), - ) + pass @asynccontextmanager @@ -199,26 +186,10 @@ async def create_dependencies_on_allocate() -> AsyncGenerator[ ) except Exception: - await asyncio.gather( - s1.rollback(), - s2.rollback(), - s3.rollback(), - s4.rollback(), - s5.rollback(), - s6.rollback(), - s7.rollback(), - ) + await cleanup_sessions(s1, s2, s3, s4, s5, s6, s7) raise finally: - await asyncio.gather( - s1.close(), - s2.close(), - s3.close(), - s4.close(), - s5.close(), - s6.close(), - s7.close(), - ) + pass class AllocateNamespace(socketio.AsyncNamespace): @@ -378,3 +349,21 @@ def from_dict(data: str) -> UserAllocationRequest: isManuallyEdited=requestV1.is_manually_edited, ) return request + + +async def safe_session_cleanup(session): + try: + await session.rollback() + except Exception: + # Log the rollback error, but don't raise it + logging.exception("Error during session rollback") + finally: + try: + await session.close() + except Exception: + # Log the close error, but don't raise it + logging.exception("Error during session close") + + +async def cleanup_sessions(*sessions): + await asyncio.gather(*(safe_session_cleanup(s) for s in sessions)) diff --git a/backend/v2/core/dependencies.py b/backend/v2/core/dependencies.py index 786584b57e..43c309abc2 100644 --- a/backend/v2/core/dependencies.py +++ b/backend/v2/core/dependencies.py @@ -134,3 +134,21 @@ def get_chain_settings() -> ChainSettings: GetChainSettings = Annotated[ChainSettings, Depends(get_chain_settings)] + + +class SocketioSettings(OctantSettings): + host: str = Field(..., alias="SOCKETIO_REDIS_HOST") + port: int = Field(..., alias="SOCKETIO_REDIS_PORT") + password: str = Field(..., alias="SOCKETIO_REDIS_PASSWORD") + db: int = Field(..., alias="SOCKETIO_REDIS_DB") + + @property + def url(self) -> str: + return f"redis://:{self.password}@{self.host}:{self.port}/{self.db}" + + +def get_socketio_settings() -> SocketioSettings: + return SocketioSettings() + + +GetSocketioSettings = Annotated[SocketioSettings, Depends(get_socketio_settings)] diff --git a/backend/v2/main.py b/backend/v2/main.py index eba6b6a43d..17bb7444d3 100644 --- a/backend/v2/main.py +++ b/backend/v2/main.py @@ -1,6 +1,8 @@ # Create FastAPI app import logging +import os +import redis import socketio from app.exceptions import OctantException from fastapi import FastAPI @@ -8,6 +10,7 @@ from sqlalchemy.exc import SQLAlchemyError from v2.allocations.router import api as allocations_api from v2.allocations.socket import AllocateNamespace +from v2.core.dependencies import get_socketio_settings from v2.project_rewards.router import api as project_rewards_api fastapi_app = FastAPI() @@ -35,7 +38,28 @@ async def fastapi_endpoint(): return {"message": "This is a FastAPI endpoint."} -sio = socketio.AsyncServer(cors_allowed_origins="*", async_mode="asgi") +def get_socketio_manager() -> socketio.AsyncRedisManager | None: + if os.environ.get("SOCKETIO_MANAGER_TYPE") != "redis": + logging.info("Initializing socketio manager to default in-memory manager") + return None + + settings = get_socketio_settings() + try: + # Attempt to create a Redis connection + redis_client = redis.Redis.from_url(settings.url) + # Test the connection + redis_client.ping() + # If successful, return the AsyncRedisManager + return socketio.AsyncRedisManager(settings.url) + except Exception as e: + logging.error(f"Failed to establish Redis connection: {str(e)}") + raise + + +mgr = get_socketio_manager() +sio = socketio.AsyncServer( + cors_allowed_origins="*", async_mode="asgi", client_manager=mgr +) sio.register_namespace(AllocateNamespace("/")) sio_asgi_app = socketio.ASGIApp(socketio_server=sio, other_asgi_app=fastapi_app) diff --git a/backend/v2/uniqueness_quotients/dependencies.py b/backend/v2/uniqueness_quotients/dependencies.py index fbd5451ca9..d0b99b35e7 100644 --- a/backend/v2/uniqueness_quotients/dependencies.py +++ b/backend/v2/uniqueness_quotients/dependencies.py @@ -11,8 +11,8 @@ from app.shared.blockchain_types import ChainTypes from fastapi import Depends from pydantic import Field, TypeAdapter -from v2.core.types import Address from v2.core.dependencies import GetChainSettings, GetSession, OctantSettings +from v2.core.types import Address from .services import UQScoreGetter diff --git a/backend/ws_allocation_tester.py b/backend/ws_allocation_tester.py deleted file mode 100644 index 799c7a110c..0000000000 --- a/backend/ws_allocation_tester.py +++ /dev/null @@ -1,230 +0,0 @@ -import asyncio -import json -import multiprocessing -import os -import random -import sys -import socketio - -import time -import requests -from app.legacy.crypto.eip712 import sign -from v2.allocations.validators import build_allocations_eip712_data - -from eth_account.signers.local import LocalAccount -from eth_account import Account as EthAccount - - - -events = [] - -pre_allocate = True -donors_count = 0 - - -sio = socketio.AsyncClient(logger=False, engineio_logger=False) - - -# Define event handlers -@sio.on("connect") -async def connect(): - events.append({"event": "connect", "time": time.time()}) - -@sio.event -async def connect_error(data): - events.append({"event": "connect_error", "data": data, "time": time.time()}) - await sio.disconnect() - -@sio.on("connect_error") -async def connect_error(data): - events.append({"event": "connect_error", "data": data, "time": time.time()}) - await sio.disconnect() - -@sio.on("disconnect") -async def disconnect(): - events.append({"event": "disconnect", "time": time.time()}) - -@sio.on("project_rewards") -async def project_rewards(data): - events.append({"event": "project_rewards_received", "data": data, "time": time.time()}) - -@sio.on("project_donors") -async def project_donors(data): - global donors_count - if pre_allocate: - donors_count += 1 - else: - donors_count -= 1 - events.append({"event": "project_donors_received", "data": data, "time": time.time()}) - -@sio.on("threshold") -async def threshold(data): - events.append({"event": "threshold_received", "data": data, "time": time.time()}) - -chain_id = 11155111 # Sepolia - - - - -mine0 = os.getenv("MINE_KEY", None) -me: LocalAccount = EthAccount.from_key(mine0) - -# UAT projects -# project_addresses = [ -# "0xc6FD734790E83820e311211B6d9A682BCa4ac97b", -# "0x242ba6d68FfEb4a098B591B32d370F973FF882B7", -# ] -project_addresses = [ - "0x0B7246eF74Ca7b37Fdc3D15be4f0b49876622F95", - "0x0cbF31Ef6545EE30f47651D1A991Bf0aeB03DF29", - "0x1c01595f9534E33d411035AE99a4317faeC4f6Fe", -] - - -allocation_info = { - "request": None, - "signature_duration": None, - "send_time": None, -} - -async def allocate(): - # Emit a custom event - - events.append({"event": "nonce_request", "time": time.time()}) - - try: - # url = f"https://uat-backend.octant.wildland.dev/allocations/users/{me.address}/allocation_nonce" - # url = f"https://master-backend.octant.wildland.dev/allocations/users/{me.address}/allocation_nonce" - url = f"http://127.0.0.1:5000/allocations/users/{me.address}/allocation_nonce" # forward to flask - # url = f"http://127.0.0.1:5000/flask/allocations/users/{me.address}/allocation_nonce" - nonce = requests.get(url).json()['allocationNonce'] - - events.append({"event": "nonce_response", "data": nonce, "time": time.time()}) - - except Exception as e: - - events.append({"event": "error_nonce", "data": str(e), "time": time.time()}) - await sio.disconnect() - return - - sig_time = time.time() - - random_mult = random.random() - amout = int(1222333312223333 * random_mult) - # amout = 827243882781739 - # print("Amount: ", amout) - - payload = { - "allocations": [ - { - "proposalAddress": address, - "amount": str(amout), - } - for address in project_addresses - ], - "nonce": nonce, - } - - events.append({"event": "payload_generated", "data": payload, "time": time.time()}) - - data = build_allocations_eip712_data(chain_id, payload) - - signature = sign(me, data) - - request_data = { - "userAddress": me.address, - "payload": payload, - "signature": signature, - "isManuallyEdited": False, - } - - # print("signature: ", signature) - # print("payload: ", payload) - - # print("time taken for signature: ", time.time() - sig_time) - - events.append({"event": "allocate_request", "data": request_data, "time": time.time()}) - - # await sio.emit("allocate", json.dumps(request_data)) - - resp = requests.post( - # "https://uat-backend.octant.wildland.dev/allocations/allocate", - # "https://master-backend.octant.wildland.dev/allocations/allocate", - "http://127.0.0.1:5000/allocations/allocate", # async fapi - # "http://127.0.0.1:5000/flask/allocations/allocate", - json=request_data, - ) - - events.append({"event": "allocate_response", "data": resp.json(), "time": time.time(), "status_code": resp.status_code}) - - - # print("donors_count: ", donors_count) - - global pre_allocate - pre_allocate = False - - - -from uuid import uuid4 - -async def run_ws(): - - for i in range(5): - - global pre_allocate - global donors_count - global events - - donors_count = 0 - events = [] - pre_allocate = True - - print(f"Running test {i:02d} : {me.address[:6]}") - - try: - # await sio.connect('https://uat-backend.octant.wildland.dev/', wait_timeout=10) - await sio.connect('https://master-backend.octant.wildland.dev/', wait_timeout=10) - await allocate() - # Wait till donors count is 0 again - while donors_count > 0: - await asyncio.sleep(0.3) - await sio.disconnect() - except Exception as e: - events.append({"event": "error", "data": str(e), "time": time.time()}) - await sio.disconnect() - - with open(f"ws_logs/events_{uuid4()}.json", "w") as f: - json.dump(events, f, indent=4) - - -async def run_request(): - - for i in range(1): - - global pre_allocate - global donors_count - global events - - donors_count = 0 - events = [] - pre_allocate = True - - print(f"Running test {i:02d} : {me.address[:6]}") - - try: - await allocate() - except Exception as e: - events.append({"event": "error", "data": str(e), "time": time.time()}) - - with open(f"wr_logs/events_{uuid4()}.json", "w") as f: - json.dump(events, f, indent=4) - - - -# Main function to start the test -if __name__ == "__main__": - # num_connections = 2 # Number of concurrent connections - # metrics = run_concurrent_connections(num_connections) - # print_metrics(metrics) - - asyncio.run(run_request()) \ No newline at end of file diff --git a/backend/ws_metrics.py b/backend/ws_metrics.py deleted file mode 100644 index 24cd6ddbab..0000000000 --- a/backend/ws_metrics.py +++ /dev/null @@ -1,126 +0,0 @@ -import json -import os -import logging -from datetime import datetime - -# Configure logging -logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s', filename='ws_metrics.log', filemode='w') - -def parse_timestamp(timestamp): - return datetime.fromtimestamp(timestamp) - -def calculate_durations(events): - first_event_time = parse_timestamp(events[0]['time']) - last_event_time = parse_timestamp(events[-1]['time']) - - allocation_start_time = None - allocate_sent_time = None - threshold_received_time = None - disconnect_time = None - non_201_allocate_responses = 0 - - for event in events: - event_time = parse_timestamp(event['time']) - if event['event'].startswith('error'): - logging.error(f"Error event found: {event}") - - if event['event'] == 'allocate_request': - allocate_sent_time = event_time - elif event['event'] == 'allocate_response': - threshold_received_time = event_time - if event.get('status_code') != 201: - non_201_allocate_responses += 1 - logging.error(f"Allocate response with status code {event.get('status_code')} found: {event}") - elif event['event'] == 'threshold_received': - threshold_received_time = event_time - elif event['event'] == 'disconnect': - disconnect_time = event_time - elif event['event'] == 'nonce_request': - allocation_start_time = event_time - - metrics = { - 'duration_from_first_to_last_event': (last_event_time - first_event_time).total_seconds(), - 'duration_before_allocation': (allocation_start_time - first_event_time).total_seconds() if allocation_start_time else None, - 'duration_allocate_to_threshold': (threshold_received_time - allocate_sent_time).total_seconds() if allocate_sent_time and threshold_received_time else None, - 'duration_threshold_to_disconnect': (disconnect_time - threshold_received_time).total_seconds() if threshold_received_time and disconnect_time else None, - 'non_201_allocate_responses': non_201_allocate_responses - } - - return metrics - -def aggregate_metrics(all_metrics): - aggregated = { - 'duration_from_first_to_last_event': {'min': float('inf'), 'max': float('-inf'), 'total': 0, 'count': 0}, - 'duration_before_allocation': {'min': float('inf'), 'max': float('-inf'), 'total': 0, 'count': 0}, - 'duration_allocate_to_threshold': {'min': float('inf'), 'max': float('-inf'), 'total': 0, 'count': 0}, - 'duration_threshold_to_disconnect': {'min': float('inf'), 'max': float('-inf'), 'total': 0, 'count': 0}, - 'non_201_allocate_responses': {'total': 0, 'count': 0} - } - - for metrics in all_metrics: - for key in aggregated.keys(): - if key == 'non_201_allocate_responses': - aggregated[key]['total'] += metrics[key] - aggregated[key]['count'] += 1 - elif metrics[key] is not None: - aggregated[key]['min'] = min(aggregated[key]['min'], metrics[key]) - aggregated[key]['max'] = max(aggregated[key]['max'], metrics[key]) - aggregated[key]['total'] += metrics[key] - aggregated[key]['count'] += 1 - - # Calculate averages - for key in aggregated.keys(): - if key != 'non_201_allocate_responses' and aggregated[key]['count'] > 0: - aggregated[key]['avg'] = aggregated[key]['total'] / aggregated[key]['count'] - elif key == 'non_201_allocate_responses': - aggregated[key]['avg'] = aggregated[key]['total'] / aggregated[key]['count'] if aggregated[key]['count'] > 0 else None - else: - aggregated[key]['avg'] = None - - - return aggregated - -def process_file(file_path): - try: - with open(file_path, 'r') as file: - events = json.load(file) - - metrics = calculate_durations(events) - return metrics - except Exception as e: - logging.error(f"Error processing file {file_path}: {e}") - return None - -def main(): - ws_logs_dir = 'wr_logs' - if not os.path.exists(ws_logs_dir): - print(f"Directory {ws_logs_dir} does not exist.") - return - - all_metrics = [] - for file_name in os.listdir(ws_logs_dir): - file_path = os.path.join(ws_logs_dir, file_name) - if os.path.isfile(file_path): - metrics = process_file(file_path) - if metrics: - all_metrics.append(metrics) - - aggregated_metrics = aggregate_metrics(all_metrics) - - if aggregated_metrics: - print("Aggregated Metrics:") - for key, value in aggregated_metrics.items(): - if key == 'non_201_allocate_responses': - print(f" {key.replace('_', ' ').title()}: {value['total']} occurrences") - else: - print("key", key) - print("value", value) - print(f" {key.replace('_', ' ').title()}:") - print(f" Min: {value['min']} seconds") - print(f" Max: {value['max']} seconds") - print(f" Avg: {value['avg']} seconds") - else: - print("Failed to aggregate metrics.") - -if __name__ == "__main__": - main() \ No newline at end of file diff --git a/backend/ws_req_metrics.py b/backend/ws_req_metrics.py deleted file mode 100644 index 5969a1ad9d..0000000000 --- a/backend/ws_req_metrics.py +++ /dev/null @@ -1,179 +0,0 @@ -import json -import os -from datetime import datetime -import numpy as np - -def parse_timestamp(timestamp): - return datetime.fromtimestamp(timestamp) - -def calculate_metrics(events): - first_event_time = parse_timestamp(events[0]['time']) - last_event_time = parse_timestamp(events[-1]['time']) - - allocate_request_time = None - payload_generated_time = None - nonce_request_time = None - response_times = {} - response_counts = {} - - for event in events: - event_time = parse_timestamp(event['time']) - if event['event'] == 'nonce_response': - nonce_request_time = event_time - elif event['event'] == 'payload_generated': - payload_generated_time = event_time - elif event['event'] == 'allocate_request': - allocate_request_time = event_time - elif event['event'] == 'allocate_response': - allocate_response_time = event_time - status_code = event.get('status_code') - if status_code not in response_times: - response_times[status_code] = [] - response_counts[status_code] = 0 - response_times[status_code].append((allocate_response_time - allocate_request_time).total_seconds()) - response_counts[status_code] += 1 - - # If theres no "allocate_response" we should assume it's a 600 status code - if 'allocate_response' not in [e['event'] for e in events]: - print("No allocate response found", events) - status_code = 600 - if status_code not in response_times: - response_times[status_code] = [] - response_counts[status_code] = 0 - response_times[status_code].append((last_event_time - allocate_request_time).total_seconds()) - response_counts[status_code] - - metrics = { - 'response_times': response_times, - 'response_counts': response_counts, - 'payload_generation_duration': (payload_generated_time - nonce_request_time).total_seconds() if payload_generated_time and nonce_request_time else None, - 'nonce_request_duration': (nonce_request_time - first_event_time).total_seconds() if nonce_request_time else None, - 'first_event_time': first_event_time.timestamp(), - 'last_event_time': last_event_time.timestamp(), - } - - return metrics - -def aggregate_metrics(all_metrics): - aggregated = { - 'response_times': {}, - 'response_counts': {}, - 'payload_generation_duration': {'min': float('inf'), 'max': float('-inf'), 'total': 0, 'count': 0}, - 'nonce_request_duration': {'min': float('inf'), 'max': float('-inf'), 'total': 0, 'count': 0}, - 'first_event_time': float('inf'), - 'last_event_time': float('-inf'), - 'total_requests': 0, - } - - for metrics in all_metrics: - for status_code, times in metrics['response_times'].items(): - if status_code not in aggregated['response_times']: - aggregated['response_times'][status_code] = {'min': float('inf'), 'max': float('-inf'), 'total': 0, 'count': 0, 'times': []} - aggregated['response_counts'][status_code] = 0 - for time in times: - aggregated['response_times'][status_code]['min'] = min(aggregated['response_times'][status_code]['min'], time) - aggregated['response_times'][status_code]['max'] = max(aggregated['response_times'][status_code]['max'], time) - aggregated['response_times'][status_code]['total'] += time - aggregated['response_times'][status_code]['count'] += 1 - aggregated['response_times'][status_code]['times'].append(time) - aggregated['response_counts'][status_code] += metrics['response_counts'][status_code] - - for key in ['payload_generation_duration', 'nonce_request_duration']: - if metrics[key] is not None: - aggregated[key]['min'] = min(aggregated[key]['min'], metrics[key]) - aggregated[key]['max'] = max(aggregated[key]['max'], metrics[key]) - aggregated[key]['total'] += metrics[key] - aggregated[key]['count'] += 1 - - aggregated['first_event_time'] = min(aggregated['first_event_time'], metrics['first_event_time']) - aggregated['last_event_time'] = max(aggregated['last_event_time'], metrics['last_event_time']) - aggregated['total_requests'] += 1 - - # Calculate averages and additional statistics - for key in ['payload_generation_duration', 'nonce_request_duration']: - if aggregated[key]['count'] > 0: - aggregated[key]['avg'] = aggregated[key]['total'] / aggregated[key]['count'] - else: - aggregated[key]['avg'] = None - - for status_code, data in aggregated['response_times'].items(): - if data['count'] > 0: - data['avg'] = data['total'] / data['count'] - data['median'] = np.median(data['times']) - data['90th_percentile'] = np.percentile(data['times'], 90) - data['80th_percentile'] = np.percentile(data['times'], 80) - data['std_dev'] = np.std(data['times']) - else: - data['avg'] = data['median'] = data['90th_percentile'] = data['std_dev'] = None - - # Calculate requests per second - total_duration = aggregated['last_event_time'] - aggregated['first_event_time'] - if total_duration > 0: - aggregated['requests_per_second'] = aggregated['total_requests'] / total_duration - else: - aggregated['requests_per_second'] = None - - return aggregated - -def process_file(file_path): - try: - with open(file_path, 'r') as file: - events = json.load(file) - - metrics = calculate_metrics(events) - return metrics - except Exception as e: - print(f"Error processing file {file_path}: {e}") - return None - -def main(): - ws_logs_dir = 'wr_logs' - if not os.path.exists(ws_logs_dir): - print(f"Directory {ws_logs_dir} does not exist.") - return - - all_metrics = [] - for file_name in os.listdir(ws_logs_dir): - file_path = os.path.join(ws_logs_dir, file_name) - if os.path.isfile(file_path): - metrics = process_file(file_path) - if metrics: - all_metrics.append(metrics) - - aggregated_metrics = aggregate_metrics(all_metrics) - - if aggregated_metrics: - print("Aggregated Metrics:") - for key, value in aggregated_metrics.items(): - if key in ['first_event_time', 'last_event_time']: - print(f" {key.replace('_', ' ').title()}: {datetime.fromtimestamp(value)}") - elif key == 'requests_per_second': - print(f" {key.replace('_', ' ').title()}: {value} requests/second") - elif key == 'response_counts': - print(" Response Counts:") - for status_code, count in value.items(): - print(f" {status_code}: {count} requests") - elif key == 'response_times': - print(" Response Times:") - for status_code, data in value.items(): - print(f" Status Code {status_code}:") - print(f" Min: {data['min']} seconds") - print(f" Max: {data['max']} seconds") - print(f" Avg: {data['avg']} seconds") - print(f" Median: {data['median']} seconds") - print(f" 90th Percentile: {data['90th_percentile']} seconds") - print(f" 80th Percentile: {data['80th_percentile']} seconds") - print(f" Std Dev: {data['std_dev']} seconds") - print(f" Num: {data['count']} requests") - elif isinstance(value, dict): - print(f" {key.replace('_', ' ').title()}:") - print(f" Min: {value.get('min')} seconds") - print(f" Max: {value.get('max')} seconds") - print(f" Avg: {value.get('avg')} seconds") - else: - print(f" {key.replace('_', ' ').title()}: {value}") - else: - print("Failed to aggregate metrics.") - -if __name__ == "__main__": - main() \ No newline at end of file From 9dc8ba0971fd417d0688244bff262363e83b4473 Mon Sep 17 00:00:00 2001 From: adam-gf Date: Wed, 16 Oct 2024 20:36:09 +0200 Subject: [PATCH 27/31] updated formatting --- backend/v2/allocations/socket.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/backend/v2/allocations/socket.py b/backend/v2/allocations/socket.py index 47f436a039..5e501b6813 100644 --- a/backend/v2/allocations/socket.py +++ b/backend/v2/allocations/socket.py @@ -36,7 +36,10 @@ get_projects_settings, ) from v2.projects.services import ProjectsAllocationThresholdGetter -from v2.uniqueness_quotients.dependencies import get_uq_score_getter, get_uq_score_settings +from v2.uniqueness_quotients.dependencies import ( + get_uq_score_getter, + get_uq_score_settings, +) from .schemas import UserAllocationRequest, UserAllocationRequestV1 From 15798301c268339ed78458f625b6c023280e53dd Mon Sep 17 00:00:00 2001 From: adam-gf Date: Wed, 16 Oct 2024 20:41:12 +0200 Subject: [PATCH 28/31] Trying to fix tests --- backend/app/settings.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/backend/app/settings.py b/backend/app/settings.py index 5c9e73d87e..d60eacf8df 100644 --- a/backend/app/settings.py +++ b/backend/app/settings.py @@ -109,8 +109,7 @@ class DevConfig(Config): CHAIN_ID = int(os.getenv("CHAIN_ID", 1337)) # Put the db file in project root DB_PATH = os.path.join(Config.PROJECT_ROOT, DB_NAME) - # SQLALCHEMY_DATABASE_URI = f"sqlite:///{DB_PATH}" - SQLALCHEMY_DATABASE_URI = os.getenv("DB_URI") + SQLALCHEMY_DATABASE_URI = f"sqlite:///{DB_PATH}" SUBGRAPH_RETRY_TIMEOUT_SEC = 2 X_REAL_IP_REQUIRED = parse_bool(os.getenv("X_REAL_IP_REQUIRED", "false")) CACHE_TYPE = "SimpleCache" From 093a86b872c4fa905610833ed21e83ceebb235c5 Mon Sep 17 00:00:00 2001 From: adam-gf Date: Wed, 16 Oct 2024 20:43:11 +0200 Subject: [PATCH 29/31] remove ununsed imports --- backend/v2/uniqueness_quotients/services.py | 1 - 1 file changed, 1 deletion(-) diff --git a/backend/v2/uniqueness_quotients/services.py b/backend/v2/uniqueness_quotients/services.py index ec04fa3a15..6f7dfddb0b 100644 --- a/backend/v2/uniqueness_quotients/services.py +++ b/backend/v2/uniqueness_quotients/services.py @@ -1,7 +1,6 @@ from dataclasses import dataclass from decimal import Decimal -from app.constants import GUEST_LIST, TIMEOUT_LIST from app.modules.user.antisybil.core import ( _apply_gtc_staking_stamp_nullification, _has_guest_stamp_applied_by_gp, From 1e48be28ce2c0885582905b0e1266f2210eced5d Mon Sep 17 00:00:00 2001 From: adam-gf Date: Fri, 18 Oct 2024 12:44:09 +0200 Subject: [PATCH 30/31] Update and PR comments address --- backend/app/__init__.py | 1 + backend/app/settings.py | 2 +- backend/startup.py | 23 ++++++------ backend/v2/allocations/dependencies.py | 9 +++-- backend/v2/allocations/repositories.py | 7 ++-- backend/v2/allocations/router.py | 5 ++- backend/v2/allocations/schemas.py | 4 --- backend/v2/allocations/services.py | 15 ++++---- backend/v2/allocations/socket.py | 36 ++++++++++--------- backend/v2/allocations/validators.py | 5 ++- backend/v2/core/dependencies.py | 10 +----- backend/v2/deposits/dependencies.py | 3 +- backend/v2/epochs/dependencies.py | 15 ++++---- backend/v2/glms/dependencies.py | 3 +- backend/v2/main.py | 26 ++++++-------- backend/v2/matched_rewards/dependencies.py | 10 +++--- .../v2/project_rewards/capped_quadriatic.py | 3 +- backend/v2/project_rewards/dependencies.py | 7 ++-- backend/v2/project_rewards/router.py | 10 ++---- backend/v2/project_rewards/services.py | 6 ++-- backend/v2/projects/dependencies.py | 9 +++-- .../v2/uniqueness_quotients/dependencies.py | 3 +- .../v2/uniqueness_quotients/repositories.py | 8 ----- backend/v2/uniqueness_quotients/services.py | 3 +- backend/v2/user_patron_mode/repositories.py | 36 +++++++------------ 25 files changed, 111 insertions(+), 148 deletions(-) diff --git a/backend/app/__init__.py b/backend/app/__init__.py index e57ad3d07b..c5000845ff 100644 --- a/backend/app/__init__.py +++ b/backend/app/__init__.py @@ -46,6 +46,7 @@ def register_extensions(app): cors.init_app(app) db.init_app(app) migrate.init_app(app, db) + # This is meant to be disabled because we migrate to FastAPI # socketio.init_app(app) cache.init_app(app) init_scheduler(app) diff --git a/backend/app/settings.py b/backend/app/settings.py index d60eacf8df..75c37cb388 100644 --- a/backend/app/settings.py +++ b/backend/app/settings.py @@ -104,7 +104,7 @@ class DevConfig(Config): ENV = "dev" DEBUG = True - LOG_LVL = os.getenv("OCTANT_LOG_LEVEL", "INFO") + LOG_LVL = os.getenv("OCTANT_LOG_LEVEL", "DEBUG") DB_NAME = "dev.db" CHAIN_ID = int(os.getenv("CHAIN_ID", 1337)) # Put the db file in project root diff --git a/backend/startup.py b/backend/startup.py index 8920cecdd0..54d30052df 100644 --- a/backend/startup.py +++ b/backend/startup.py @@ -5,6 +5,8 @@ from starlette.middleware.base import BaseHTTPMiddleware +from app import create_app as create_flask_app +from app.extensions import db as flask_db if os.getenv("SENTRY_DSN"): import sentry_sdk @@ -40,33 +42,31 @@ def sentry_before_send(event, hint): before_send=sentry_before_send, ) -from app import create_app # noqa -from app.extensions import db # noqa -# Create Flask app -flask_app = create_app() +flask_app = create_flask_app() @flask_app.teardown_request def teardown_session(*args, **kwargs): - db.session.remove() + flask_db.session.remove() -from v2.main import fastapi_app # noqa - -# Mount Flask app under a sub-path -fastapi_app.mount("/flask", WSGIMiddleware(flask_app)) +# I'm importing it here to make sure that the flask initializes before the fastapi one +from v2.main import app as fastapi_app # noqa # Middleware to check if the path exists in FastAPI +# If it does, proceed with the request +# If it doesn't, modify the request to forward to the Flask app class PathCheckMiddleware(BaseHTTPMiddleware): async def dispatch(self, request: Request, call_next): path = request.url.path - # Check if the path exists in FastAPI routes + for route in fastapi_app.routes: if path == route.path: # If path exists, proceed with the request return await call_next(request) + # If path does not exist, modify the request to forward to the Flask app if path.startswith("/flask"): return await call_next(request) @@ -75,11 +75,12 @@ async def dispatch(self, request: Request, call_next): return response +# Setup the pass-through to Flask app fastapi_app.add_middleware(PathCheckMiddleware) +fastapi_app.mount("/flask", WSGIMiddleware(flask_app)) if __name__ == "__main__": import uvicorn uvicorn.run(fastapi_app, host="0.0.0.0", port=5000) - # uvicorn.run(fastapi_app, port=5000) diff --git a/backend/v2/allocations/dependencies.py b/backend/v2/allocations/dependencies.py index 959491018f..df1ea54741 100644 --- a/backend/v2/allocations/dependencies.py +++ b/backend/v2/allocations/dependencies.py @@ -1,15 +1,14 @@ from typing import Annotated from fastapi import Depends +from v2.allocations.services import Allocator +from v2.allocations.validators import SignatureVerifier from v2.core.dependencies import GetChainSettings, GetSession -from v2.epochs.dependencies import AssertAllocationWindowOpen, GetEpochsSubgraph +from v2.epochs.dependencies import GetEpochsSubgraph, GetOpenAllocationWindowEpochNumber from v2.matched_rewards.dependencies import GetMatchedRewardsEstimator from v2.projects.dependencies import GetProjectsContracts from v2.uniqueness_quotients.dependencies import GetUQScoreGetter -from .services import Allocator -from .validators import SignatureVerifier - def get_signature_verifier( session: GetSession, @@ -26,7 +25,7 @@ def get_signature_verifier( async def get_allocator( - epoch_number: AssertAllocationWindowOpen, + epoch_number: GetOpenAllocationWindowEpochNumber, session: GetSession, signature_verifier: GetSignatureVerifier, uq_score_getter: GetUQScoreGetter, diff --git a/backend/v2/allocations/repositories.py b/backend/v2/allocations/repositories.py index 3bf37fb79a..eac0d715c0 100644 --- a/backend/v2/allocations/repositories.py +++ b/backend/v2/allocations/repositories.py @@ -9,11 +9,14 @@ from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import joinedload from sqlalchemy.sql.functions import coalesce +from v2.allocations.schemas import ( + AllocationWithUserUQScore, + ProjectDonation, + UserAllocationRequest, +) from v2.core.types import Address from v2.users.repositories import get_user_by_address -from .schemas import AllocationWithUserUQScore, ProjectDonation, UserAllocationRequest - async def sum_allocations_by_epoch(session: AsyncSession, epoch_number: int) -> int: """Get the sum of all allocations for a given epoch. We only consider the allocations that have not been deleted.""" diff --git a/backend/v2/allocations/router.py b/backend/v2/allocations/router.py index b82b0f3ffb..dba8cc4782 100644 --- a/backend/v2/allocations/router.py +++ b/backend/v2/allocations/router.py @@ -1,7 +1,6 @@ from fastapi import APIRouter - -from .dependencies import GetAllocator -from .schemas import UserAllocationRequest, UserAllocationRequestV1 +from v2.allocations.dependencies import GetAllocator +from v2.allocations.schemas import UserAllocationRequest, UserAllocationRequestV1 api = APIRouter(prefix="/allocations", tags=["Allocations"]) diff --git a/backend/v2/allocations/schemas.py b/backend/v2/allocations/schemas.py index 04639eacb7..983cdcb166 100644 --- a/backend/v2/allocations/schemas.py +++ b/backend/v2/allocations/schemas.py @@ -15,10 +15,6 @@ class AllocationRequest(OctantModel): project_address: Address = Field(..., alias="proposalAddress") amount: BigInteger - # first_name: str = Field(..., alias='firstName') - # last_name: str = Field(..., alias='lastName') - # age: int = Field(..., alias='age') - class UserAllocationRequestPayloadV1(OctantModel): allocations: list[AllocationRequest] diff --git a/backend/v2/allocations/services.py b/backend/v2/allocations/services.py index d4e18593d8..04d227534e 100644 --- a/backend/v2/allocations/services.py +++ b/backend/v2/allocations/services.py @@ -3,20 +3,19 @@ from app import exceptions from sqlalchemy.ext.asyncio import AsyncSession +from v2.allocations.repositories import ( + get_allocations_with_user_uqs, + soft_delete_user_allocations_by_epoch, + store_allocation_request, +) +from v2.allocations.schemas import AllocationWithUserUQScore, UserAllocationRequest +from v2.allocations.validators import SignatureVerifier from v2.matched_rewards.services import MatchedRewardsEstimator from v2.project_rewards.capped_quadriatic import cqf_simulate_leverage from v2.projects.contracts import ProjectsContracts from v2.uniqueness_quotients.dependencies import UQScoreGetter from v2.users.repositories import get_user_by_address -from .repositories import ( - get_allocations_with_user_uqs, - soft_delete_user_allocations_by_epoch, - store_allocation_request, -) -from .schemas import AllocationWithUserUQScore, UserAllocationRequest -from .validators import SignatureVerifier - @dataclass class Allocator: diff --git a/backend/v2/allocations/socket.py b/backend/v2/allocations/socket.py index 5e501b6813..23d9dbecfc 100644 --- a/backend/v2/allocations/socket.py +++ b/backend/v2/allocations/socket.py @@ -4,9 +4,11 @@ from typing import AsyncGenerator, Tuple import socketio +from app.exceptions import OctantException from sqlalchemy.ext.asyncio import AsyncSession from v2.allocations.dependencies import get_allocator, get_signature_verifier from v2.allocations.repositories import get_donations_by_project +from v2.allocations.schemas import UserAllocationRequest, UserAllocationRequestV1 from v2.allocations.services import Allocator from v2.core.dependencies import ( get_chain_settings, @@ -17,11 +19,11 @@ ) from v2.core.exceptions import AllocationWindowClosed from v2.epochs.dependencies import ( - assert_allocation_window_open, get_epochs_contracts, get_epochs_settings, get_epochs_subgraph, get_epochs_subgraph_settings, + get_open_allocation_window_epoch_number, ) from v2.matched_rewards.dependencies import ( get_matched_rewards_estimator, @@ -41,8 +43,6 @@ get_uq_score_settings, ) -from .schemas import UserAllocationRequest, UserAllocationRequestV1 - @asynccontextmanager async def create_dependencies_on_connect() -> AsyncGenerator[ @@ -57,7 +57,7 @@ async def create_dependencies_on_connect() -> AsyncGenerator[ # We do not handle requests outside of pending epoch state (Allocation Window) # This will raise an exception if the allocation window is closed and connection does not happen - epoch_number = await assert_allocation_window_open(epochs_contracts) + epoch_number = await get_open_allocation_window_epoch_number(epochs_contracts) projects_contracts = get_projects_contracts(w3, get_projects_settings()) epochs_subgraph = get_epochs_subgraph(get_epochs_subgraph_settings()) @@ -96,11 +96,9 @@ async def create_dependencies_on_connect() -> AsyncGenerator[ # Yield the dependencies to the on_connect handler yield (s4, threshold_getter, estimated_project_rewards) - except Exception: + except Exception as e: await cleanup_sessions(s1, s2, s3, s4) - raise - finally: - pass + raise e @asynccontextmanager @@ -122,7 +120,7 @@ async def create_dependencies_on_allocate() -> AsyncGenerator[ # We do not handle requests outside of pending epoch state (Allocation Window) # This will raise an exception if the allocation window is closed and connection does not happen - epoch_number = await assert_allocation_window_open(epochs_contracts) + epoch_number = await get_open_allocation_window_epoch_number(epochs_contracts) projects_contracts = get_projects_contracts(w3, get_projects_settings()) epochs_subgraph = get_epochs_subgraph(get_epochs_subgraph_settings()) @@ -188,11 +186,9 @@ async def create_dependencies_on_allocate() -> AsyncGenerator[ estimated_project_rewards, ) - except Exception: + except Exception as e: await cleanup_sessions(s1, s2, s3, s4, s5, s6, s7) - raise - finally: - pass + raise e class AllocateNamespace(socketio.AsyncNamespace): @@ -247,10 +243,15 @@ async def handle_on_connect(self, sid: str, environ: dict): async def on_connect(self, sid: str, environ: dict): try: await self.handle_on_connect(sid, environ) + except AllocationWindowClosed: logging.info("Allocation window is closed, connection not established") + + except OctantException as e: + logging.error(f"OctantException({e.__class__.__name__}): {e}") + except Exception as e: - logging.error(f"Error handling on_connect: {e}") + logging.error(f"Error handling on_connect ({e.__class__.__name__}): {e}") async def on_disconnect(self, sid): logging.debug("Client disconnected") @@ -312,10 +313,13 @@ async def on_allocate(self, sid: str, data: str): await self.handle_on_allocate(sid, data) except AllocationWindowClosed: - logging.info("Allocation window is closed, allocation not processed") + logging.info("Allocation window is closed, connection not established") + + except OctantException as e: + logging.error(f"OctantException({e.__class__.__name__}): {e.message}") except Exception as e: - logging.error(f"Error handling on_allocate: {e}") + logging.error(f"Error handling on_allocate ({e.__class__.__name__}): {e}") def from_dict(data: str) -> UserAllocationRequest: diff --git a/backend/v2/allocations/validators.py b/backend/v2/allocations/validators.py index e93c3a589d..3311b4c555 100644 --- a/backend/v2/allocations/validators.py +++ b/backend/v2/allocations/validators.py @@ -4,6 +4,8 @@ from app import exceptions from app.modules.common.crypto.signature import EncodingStandardFor, encode_for_signing from sqlalchemy.ext.asyncio import AsyncSession +from v2.allocations.repositories import get_last_allocation_request_nonce +from v2.allocations.schemas import UserAllocationRequest from v2.core.types import Address from v2.crypto.signatures import verify_signed_message from v2.epochs.subgraphs import EpochsSubgraph @@ -14,9 +16,6 @@ ) from web3 import AsyncWeb3 -from .repositories import get_last_allocation_request_nonce -from .schemas import UserAllocationRequest - @dataclass class SignatureVerifier: diff --git a/backend/v2/core/dependencies.py b/backend/v2/core/dependencies.py index 43c309abc2..3fbd366444 100644 --- a/backend/v2/core/dependencies.py +++ b/backend/v2/core/dependencies.py @@ -1,7 +1,6 @@ from functools import lru_cache from typing import Annotated, AsyncGenerator -from app.infrastructure.database.models import BaseModel from fastapi import Depends from pydantic import Field from pydantic_settings import BaseSettings, SettingsConfigDict @@ -48,13 +47,6 @@ def get_database_settings() -> DatabaseSettings: return DatabaseSettings() # type: ignore[call-arg] -async def create_tables(): - settings = DatabaseSettings() - engine = create_async_engine(settings.sqlalchemy_database_uri) - async with engine.begin() as conn: - await conn.run_sync(BaseModel.metadata.create_all) - - @lru_cache(1) def get_sessionmaker( settings: Annotated[DatabaseSettings, Depends(get_database_settings)] @@ -148,7 +140,7 @@ def url(self) -> str: def get_socketio_settings() -> SocketioSettings: - return SocketioSettings() + return SocketioSettings() # type: ignore[call-arg] GetSocketioSettings = Annotated[SocketioSettings, Depends(get_socketio_settings)] diff --git a/backend/v2/deposits/dependencies.py b/backend/v2/deposits/dependencies.py index 57a2a36671..cbe9a36500 100644 --- a/backend/v2/deposits/dependencies.py +++ b/backend/v2/deposits/dependencies.py @@ -2,8 +2,7 @@ from fastapi import Depends from v2.core.dependencies import OctantSettings, Web3 - -from .contracts import DEPOSITS_ABI, DepositsContracts +from v2.deposits.contracts import DEPOSITS_ABI, DepositsContracts class DepositsSettings(OctantSettings): diff --git a/backend/v2/epochs/dependencies.py b/backend/v2/epochs/dependencies.py index 8b0d543a75..3315a7e386 100644 --- a/backend/v2/epochs/dependencies.py +++ b/backend/v2/epochs/dependencies.py @@ -3,9 +3,8 @@ from fastapi import Depends from v2.core.dependencies import OctantSettings, Web3 from v2.core.exceptions import AllocationWindowClosed - -from .contracts import EPOCHS_ABI, EpochsContracts -from .subgraphs import EpochsSubgraph +from v2.epochs.contracts import EPOCHS_ABI, EpochsContracts +from v2.epochs.subgraphs import EpochsSubgraph class EpochsSettings(OctantSettings): @@ -28,10 +27,12 @@ def get_epochs_contracts( ] -async def assert_allocation_window_open( +async def get_open_allocation_window_epoch_number( epochs_contracts: GetEpochsContracts, ) -> int: - """Asserts that the allocation window is open and returns the current epoch number.""" + """Returns the current epoch number only if the allocation window is open, + otherwise raises AllocationWindowClosed. + """ epoch_number = await epochs_contracts.get_pending_epoch() if epoch_number is None: @@ -40,9 +41,9 @@ async def assert_allocation_window_open( return epoch_number -AssertAllocationWindowOpen = Annotated[ +GetOpenAllocationWindowEpochNumber = Annotated[ int, - Depends(assert_allocation_window_open), + Depends(get_open_allocation_window_epoch_number), ] diff --git a/backend/v2/glms/dependencies.py b/backend/v2/glms/dependencies.py index b0fb5075c1..44debdc9ce 100644 --- a/backend/v2/glms/dependencies.py +++ b/backend/v2/glms/dependencies.py @@ -2,8 +2,7 @@ from fastapi import Depends from v2.core.dependencies import OctantSettings, Web3 - -from .contracts import ERC20_ABI, GLMContracts +from v2.glms.contracts import ERC20_ABI, GLMContracts class GLMSettings(OctantSettings): diff --git a/backend/v2/main.py b/backend/v2/main.py index 17bb7444d3..122bf435cb 100644 --- a/backend/v2/main.py +++ b/backend/v2/main.py @@ -13,10 +13,10 @@ from v2.core.dependencies import get_socketio_settings from v2.project_rewards.router import api as project_rewards_api -fastapi_app = FastAPI() +app = FastAPI() -@fastapi_app.exception_handler(OctantException) +@app.exception_handler(OctantException) async def handle_octant_exception(request, ex: OctantException): return JSONResponse( status_code=ex.status_code, @@ -24,7 +24,7 @@ async def handle_octant_exception(request, ex: OctantException): ) -@fastapi_app.exception_handler(SQLAlchemyError) +@app.exception_handler(SQLAlchemyError) async def handle_sqlalchemy_exception(request, ex: SQLAlchemyError): logging.error(f"SQLAlchemyError: {ex}") return JSONResponse( @@ -33,11 +33,6 @@ async def handle_sqlalchemy_exception(request, ex: SQLAlchemyError): ) -@fastapi_app.get("/fastapi-endpoint") -async def fastapi_endpoint(): - return {"message": "This is a FastAPI endpoint."} - - def get_socketio_manager() -> socketio.AsyncRedisManager | None: if os.environ.get("SOCKETIO_MANAGER_TYPE") != "redis": logging.info("Initializing socketio manager to default in-memory manager") @@ -50,6 +45,9 @@ def get_socketio_manager() -> socketio.AsyncRedisManager | None: # Test the connection redis_client.ping() # If successful, return the AsyncRedisManager + logging.info( + f"Initialized socketio manager to redis://{settings.host}:{settings.port}/{settings.db}" + ) return socketio.AsyncRedisManager(settings.url) except Exception as e: logging.error(f"Failed to establish Redis connection: {str(e)}") @@ -61,12 +59,10 @@ def get_socketio_manager() -> socketio.AsyncRedisManager | None: cors_allowed_origins="*", async_mode="asgi", client_manager=mgr ) sio.register_namespace(AllocateNamespace("/")) -sio_asgi_app = socketio.ASGIApp(socketio_server=sio, other_asgi_app=fastapi_app) +sio_asgi_app = socketio.ASGIApp(socketio_server=sio, other_asgi_app=app) -fastapi_app.add_route("/socket.io/", route=sio_asgi_app) -fastapi_app.add_websocket_route("/socket.io/", sio_asgi_app) +app.add_route("/socket.io/", route=sio_asgi_app) +app.add_websocket_route("/socket.io/", sio_asgi_app) -fastapi_app.include_router(allocations_api) -fastapi_app.include_router(project_rewards_api) -# from v2.core.dependencies import create_tables -# fastapi_app.add_event_handler("startup", create_tables) +app.include_router(allocations_api) +app.include_router(project_rewards_api) diff --git a/backend/v2/matched_rewards/dependencies.py b/backend/v2/matched_rewards/dependencies.py index cd0af7c34d..cdafa0ed3f 100644 --- a/backend/v2/matched_rewards/dependencies.py +++ b/backend/v2/matched_rewards/dependencies.py @@ -4,10 +4,12 @@ from fastapi import Depends from pydantic import Field from v2.core.dependencies import GetSession, OctantSettings -from v2.epochs.dependencies import AssertAllocationWindowOpen, get_epochs_subgraph +from v2.epochs.dependencies import ( + GetOpenAllocationWindowEpochNumber, + get_epochs_subgraph, +) from v2.epochs.subgraphs import EpochsSubgraph - -from .services import MatchedRewardsEstimator +from v2.matched_rewards.services import MatchedRewardsEstimator class MatchedRewardsEstimatorSettings(OctantSettings): @@ -27,7 +29,7 @@ def get_matched_rewards_estimator_settings() -> MatchedRewardsEstimatorSettings: async def get_matched_rewards_estimator( - epoch_number: AssertAllocationWindowOpen, + epoch_number: GetOpenAllocationWindowEpochNumber, session: GetSession, epochs_subgraph: Annotated[EpochsSubgraph, Depends(get_epochs_subgraph)], settings: Annotated[ diff --git a/backend/v2/project_rewards/capped_quadriatic.py b/backend/v2/project_rewards/capped_quadriatic.py index c26fac79f8..8fa3197c94 100644 --- a/backend/v2/project_rewards/capped_quadriatic.py +++ b/backend/v2/project_rewards/capped_quadriatic.py @@ -5,8 +5,7 @@ from v2.allocations.schemas import AllocationWithUserUQScore from v2.core.types import Address - -from .schemas import ProjectFundingSummary +from v2.project_rewards.schemas import ProjectFundingSummary class CappedQuadriaticFunding(NamedTuple): diff --git a/backend/v2/project_rewards/dependencies.py b/backend/v2/project_rewards/dependencies.py index 2060c90382..fad325071e 100644 --- a/backend/v2/project_rewards/dependencies.py +++ b/backend/v2/project_rewards/dependencies.py @@ -2,15 +2,14 @@ from fastapi import Depends from v2.core.dependencies import GetSession -from v2.epochs.dependencies import AssertAllocationWindowOpen +from v2.epochs.dependencies import GetOpenAllocationWindowEpochNumber from v2.matched_rewards.dependencies import GetMatchedRewardsEstimator +from v2.project_rewards.services import ProjectRewardsEstimator from v2.projects.dependencies import GetProjectsContracts -from .services import ProjectRewardsEstimator - async def get_project_rewards_estimator( - epoch_number: AssertAllocationWindowOpen, + epoch_number: GetOpenAllocationWindowEpochNumber, session: GetSession, projects_contracts: GetProjectsContracts, estimated_project_matched_rewards: GetMatchedRewardsEstimator, diff --git a/backend/v2/project_rewards/router.py b/backend/v2/project_rewards/router.py index 8717c0b030..7a68f8e531 100644 --- a/backend/v2/project_rewards/router.py +++ b/backend/v2/project_rewards/router.py @@ -1,7 +1,6 @@ from fastapi import APIRouter - -from .dependencies import GetProjectRewardsEstimator -from .schemas import EstimatedProjectRewardsResponse +from v2.project_rewards.dependencies import GetProjectRewardsEstimator +from v2.project_rewards.schemas import EstimatedProjectRewardsResponse api = APIRouter(prefix="/rewards", tags=["Allocations"]) @@ -16,13 +15,8 @@ async def get_estimated_project_rewards( This endpoint is available only for the pending epoch state. """ - import time - - start = time.time() estimated_funding = await project_rewards_estimator.get() - print("get_estimated_project_rewards took", time.time() - start, "seconds") - return EstimatedProjectRewardsResponse( rewards=[f for f in estimated_funding.project_fundings.values()] ) diff --git a/backend/v2/project_rewards/services.py b/backend/v2/project_rewards/services.py index 82d93aeeb1..76e6b43734 100644 --- a/backend/v2/project_rewards/services.py +++ b/backend/v2/project_rewards/services.py @@ -4,10 +4,12 @@ from sqlalchemy.ext.asyncio import AsyncSession from v2.allocations.repositories import get_allocations_with_user_uqs from v2.matched_rewards.services import MatchedRewardsEstimator +from v2.project_rewards.capped_quadriatic import ( + CappedQuadriaticFunding, + capped_quadriatic_funding, +) from v2.projects.contracts import ProjectsContracts -from .capped_quadriatic import CappedQuadriaticFunding, capped_quadriatic_funding - @dataclass class ProjectRewardsEstimator: diff --git a/backend/v2/projects/dependencies.py b/backend/v2/projects/dependencies.py index 3e148b7dee..3cf6bfef4e 100644 --- a/backend/v2/projects/dependencies.py +++ b/backend/v2/projects/dependencies.py @@ -3,10 +3,9 @@ from fastapi import Depends from pydantic import Field from v2.core.dependencies import GetSession, OctantSettings, Web3 -from v2.epochs.dependencies import AssertAllocationWindowOpen - -from .contracts import PROJECTS_ABI, ProjectsContracts -from .services import ProjectsAllocationThresholdGetter +from v2.epochs.dependencies import GetOpenAllocationWindowEpochNumber +from v2.projects.contracts import PROJECTS_ABI, ProjectsContracts +from v2.projects.services import ProjectsAllocationThresholdGetter class ProjectsSettings(OctantSettings): @@ -43,7 +42,7 @@ def get_projects_allocation_threshold_settings() -> ProjectsAllocationThresholdS def get_projects_allocation_threshold_getter( - epoch_number: AssertAllocationWindowOpen, + epoch_number: GetOpenAllocationWindowEpochNumber, session: GetSession, projects: GetProjectsContracts, settings: Annotated[ diff --git a/backend/v2/uniqueness_quotients/dependencies.py b/backend/v2/uniqueness_quotients/dependencies.py index d0b99b35e7..a5c110a7e2 100644 --- a/backend/v2/uniqueness_quotients/dependencies.py +++ b/backend/v2/uniqueness_quotients/dependencies.py @@ -13,8 +13,7 @@ from pydantic import Field, TypeAdapter from v2.core.dependencies import GetChainSettings, GetSession, OctantSettings from v2.core.types import Address - -from .services import UQScoreGetter +from v2.uniqueness_quotients.services import UQScoreGetter class UQScoreSettings(OctantSettings): diff --git a/backend/v2/uniqueness_quotients/repositories.py b/backend/v2/uniqueness_quotients/repositories.py index 80f12bf679..bb4653b6df 100644 --- a/backend/v2/uniqueness_quotients/repositories.py +++ b/backend/v2/uniqueness_quotients/repositories.py @@ -63,12 +63,4 @@ async def get_gp_stamps_by_address( .limit(1) ) - # result = await session.execute( - # select(GPStamps) - # .join(User) - # .filter(User.address == to_checksum_address(user_address)) - # .order_by(GPStamps.created_at.desc()) - # .limit(1) - # ) - return result diff --git a/backend/v2/uniqueness_quotients/services.py b/backend/v2/uniqueness_quotients/services.py index 6f7dfddb0b..9f31ad3bda 100644 --- a/backend/v2/uniqueness_quotients/services.py +++ b/backend/v2/uniqueness_quotients/services.py @@ -8,8 +8,7 @@ from eth_utils import to_checksum_address from sqlalchemy.ext.asyncio import AsyncSession from v2.core.types import Address - -from .repositories import ( +from v2.uniqueness_quotients.repositories import ( get_gp_stamps_by_address, get_uq_score_by_user_address, save_uq_score_for_user_address, diff --git a/backend/v2/user_patron_mode/repositories.py b/backend/v2/user_patron_mode/repositories.py index 5ba6511d63..903660a5ef 100644 --- a/backend/v2/user_patron_mode/repositories.py +++ b/backend/v2/user_patron_mode/repositories.py @@ -1,47 +1,37 @@ from datetime import datetime -from typing import List from app.infrastructure.database.models import Budget, PatronModeEvent, User from sqlalchemy import Numeric, cast, func from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select -from sqlalchemy.orm import aliased from v2.core.types import Address from v2.users.repositories import get_user_by_address async def get_all_patrons_at_timestamp( session: AsyncSession, dt: datetime -) -> List[str]: +) -> list[str]: """ - From PatronModeEvent table, get all the user addresses that have patron_mode_enabled=True at a given timestamp. + Get all the user addresses that at given timestamp have patron_mode_enabled=True. """ - subquery = ( - select( - PatronModeEvent.user_address, - PatronModeEvent.patron_mode_enabled, - PatronModeEvent.created_at, - ) + results = await session.execute( + select(PatronModeEvent.user_address) .filter(PatronModeEvent.created_at <= dt) - .order_by(PatronModeEvent.user_address, PatronModeEvent.created_at.desc()) - .subquery() - ) - - alias = aliased(PatronModeEvent, subquery) - - result = await session.execute( - select(alias.user_address) - .filter(alias.patron_mode_enabled) - .group_by(alias.user_address) + .group_by(PatronModeEvent.user_address) + .having( + func.max(PatronModeEvent.created_at).filter( + PatronModeEvent.patron_mode_enabled + ) + == func.max(PatronModeEvent.created_at) + ) ) - patrons = [row[0] for row in result.fetchall()] - return patrons + return [row[0] for row in results.all()] async def get_budget_sum_by_users_addresses_and_epoch( - session: AsyncSession, users_addresses: List[str], epoch_number: int + session: AsyncSession, users_addresses: list[str], epoch_number: int ) -> int: """ Sum the budgets of given users for a given epoch. From 36e4a968628b4f9482efb691ae4662e8b5ff339d Mon Sep 17 00:00:00 2001 From: adam-gf Date: Fri, 18 Oct 2024 12:49:15 +0200 Subject: [PATCH 31/31] liter formatting fix --- backend/startup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/startup.py b/backend/startup.py index 54d30052df..9947a2aa48 100644 --- a/backend/startup.py +++ b/backend/startup.py @@ -61,12 +61,12 @@ def teardown_session(*args, **kwargs): class PathCheckMiddleware(BaseHTTPMiddleware): async def dispatch(self, request: Request, call_next): path = request.url.path - + for route in fastapi_app.routes: if path == route.path: # If path exists, proceed with the request return await call_next(request) - + # If path does not exist, modify the request to forward to the Flask app if path.startswith("/flask"): return await call_next(request)