Skip to content

Commit

Permalink
Fix Failing Tests (#172)
Browse files Browse the repository at this point in the history
In an effort to introduce stronger testing and coverage reporting, we stumbled upon some e2e/integration tests that were failing due to lop-sided recent changes to V2, but not V1 queries. This PR repairs them.
  • Loading branch information
bh2smith authored Jan 10, 2023
1 parent 9908465 commit e7bef0f
Show file tree
Hide file tree
Showing 4 changed files with 81 additions and 72 deletions.
22 changes: 11 additions & 11 deletions queries/dune_v1/risk_free_batches.sql
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
-- Query Here: https://dune.com/queries/1432733
-- Query Here: https://dune.com/queries/1870864
-- The following query shows a complete list of all different selectors
-- used on cow protocol and so far there are no collisions.
-- We can monitor this for new risk free events:
Expand All @@ -17,24 +17,24 @@ with interactions as (select selector,
from gnosis_protocol_v2."GPv2Settlement_evt_Interaction"
where evt_block_time between '{{StartTime}}' and '{{EndTime}}'),

no_interactions as (select evt_tx_hash
from gnosis_protocol_v2."GPv2Settlement_evt_Settlement"
where evt_block_time between '{{StartTime}}' and '{{EndTime}}'
and evt_tx_hash not in (select evt_tx_hash
no_interactions as (select tx_hash
from gnosis_protocol_v2."batches"
where block_time between '{{StartTime}}' and '{{EndTime}}'
and tx_hash not in (select evt_tx_hash
from gnosis_protocol_v2."GPv2Settlement_evt_Interaction"
where evt_block_time between '{{StartTime}}' and '{{EndTime}}')),

batch_interaction_counts as (select s.evt_tx_hash,
batch_interaction_counts as (select tx_hash,
count(*) as num_interactions,
sum(case when risk_free = true then 1 else 0 end) as num_risk_fee
from gnosis_protocol_v2."GPv2Settlement_evt_Settlement" s
from gnosis_protocol_v2."batches" s
inner join gnosis_protocol_v2."GPv2Settlement_evt_Interaction" i
on s.evt_tx_hash = i.evt_tx_hash
on tx_hash = i.evt_tx_hash
inner join interactions i2
on i.selector = i2.selector
and i.target = i2.target
where s.evt_block_time between '{{StartTime}}' and '{{EndTime}}'
group by s.evt_tx_hash),
where block_time between '{{StartTime}}' and '{{EndTime}}'
group by tx_hash),

combined_risk_free_batches as (select *
from batch_interaction_counts
Expand All @@ -43,5 +43,5 @@ with interactions as (select selector,
select *, 0 as num_interactions, 0 as risk_free
from no_interactions)

select concat('0x', encode(evt_tx_hash, 'hex')) as tx_hash
select concat('0x', encode(tx_hash, 'hex')) as tx_hash
from combined_risk_free_batches
2 changes: 1 addition & 1 deletion src/queries.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def with_params(
"RISK_FREE_BATCHES": QueryData(
name="Risk Free Batches",
filepath="risk_free_batches.sql",
v1_id=1432733,
v1_id=1870864,
v2_id=1788438,
),
"VOUCH_REGISTRY": QueryData(
Expand Down
32 changes: 18 additions & 14 deletions tests/e2e/test_transfer_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from src.fetch.transfer_file import Transfer
from src.models.accounting_period import AccountingPeriod
from src.models.overdraft import Overdraft
from src.models.slippage import SolverSlippage
from src.models.slippage import SplitSlippages
from src.models.split_transfers import SplitTransfers
from src.models.token import Token
from src.utils.print_store import PrintStore
Expand All @@ -31,23 +31,27 @@ def test_process_transfers(self):
Transfer(token=cow_token, receiver=barn_zerox, amount_wei=600 * ONE_ETH),
Transfer(token=cow_token, receiver=other_solver, amount_wei=2000 * ONE_ETH),
]

barn_slippage = SolverSlippage(
amount_wei=-324697366789535540,
solver_name="barn-0x",
solver_address=barn_zerox,
)
other_slippage = SolverSlippage(
amount_wei=-11 * 10**17,
solver_name="Other Solver",
solver_address=other_solver,
slippages = SplitSlippages.from_data_set(
[
# Barn Slippage
{
"eth_slippage_wei": -324697366789535540,
"solver_name": "barn-0x",
"solver_address": barn_zerox.address,
},
# Other Slippage
{
"eth_slippage_wei": -11 * 10**17,
"solver_name": "Other Solver",
"solver_address": other_solver.address,
},
]
)
indexed_slippage = {barn_zerox: barn_slippage, other_solver: other_slippage}
cow_redirects = {}

accounting = SplitTransfers(period, mixed_transfers)
accounting = SplitTransfers(period, mixed_transfers, PrintStore())

transfers = accounting.process(indexed_slippage, cow_redirects, PrintStore())
transfers = accounting.process(slippages, cow_redirects)
# The only remaining transfer is the other_solver's COW reward.
self.assertEqual(
transfers,
Expand Down
97 changes: 51 additions & 46 deletions tests/integration/test_solver_slippage.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,12 +89,14 @@ def get_cte_results(

def test_batch_transfers(self):
table_name = "batch_transfers"
data = get_slippage_cte_rows(
self.dune,
table_name,
self.period,
v1_cache="01GJQNSER0PYSMGHMEBDT03805",
v2_cache="01GJQNSER0PYSMGHMEBDT03805",
data = Comparison.from_dune_results(
*get_slippage_cte_rows(
self.dune,
table_name,
self.period,
v1_cache="01GPDRF9C3S4EESEA6525NAJA1",
v2_cache="01GPDRFZ8230XZ03DA2RT8QCC4",
)
)

# No missing transactions.
Expand All @@ -113,12 +115,14 @@ def test_batch_transfers(self):
def test_incoming_and_outgoing(self):
# This test demonstrates that records are "essentially" matching up to this table.
table_name = "incoming_and_outgoing"
data = get_slippage_cte_rows(
self.dune,
table_name,
self.period,
v1_cache="01GJR1HKR37V7HRPTRJCDMZBAX",
v2_cache="01GJR1HTNS87RKTV90WKH4TVSC",
data = Comparison.from_dune_results(
*get_slippage_cte_rows(
self.dune,
table_name,
self.period,
v1_cache="01GJR1HKR37V7HRPTRJCDMZBAX",
v2_cache="01GJR1HTNS87RKTV90WKH4TVSC",
)
)

# There are 14 records in missing_v2 for the specified accounting period.
Expand Down Expand Up @@ -148,47 +152,48 @@ def test_incoming_and_outgoing(self):

def test_final_token_balance_sheet(self):
table_name = "final_token_balance_sheet"
data = get_slippage_cte_rows(
self.dune,
table_name,
self.period,
# Results for Period(2022-11-01)
# v1_not_v2 = 172 batches
# v2_not_v1 = 107 batches
# overlap = 3062 batches
# Check out the missing records for this period:
# http://jsonblob.com/1046134755808264192
v1_cache="01GJR2PTEXWT63HVG6WZ7PXB4R",
v2_cache="01GJR2Q0CWVKKRZ7J53RC463X9",
# --------------------------------------
# Results for Period(2022-11-08)
# v1_not_v2 = 160 batches
# v2_not_v1 = 122 batches
# overlap = 4378 batches
# v1_cache="01GJTHE88DH5TFHRDX9D8H39XK",
# v2_cache="01GJTHEK7BRZ8G4N10TGXBJ1W3",
# --------------------------------------
# Results for Period(2022-11-08)
# v1_not_v2 = 160 batches
# v2_not_v1 = 90 batches
# overlap = 3598 batches
# v1_cache="",
# v2_cache="",
# --------------------------------------
# Results for Period(2022-10-04)
# v1_not_v2 = 99 batches
# v2_not_v1 = 63 batches
# overlap = 2491 batches
# v1_cache="",
# v2_cache="",
data = Comparison.from_dune_results(
*get_slippage_cte_rows(
self.dune,
table_name,
self.period,
# Results for Period(2022-11-01)
# v1_not_v2 = 172 batches
# v2_not_v1 = 107 batches
# overlap = 3062 batches
# Check out the missing records for this period:
# http://jsonblob.com/1046134755808264192
v1_cache="01GJR2PTEXWT63HVG6WZ7PXB4R",
v2_cache="01GJR2Q0CWVKKRZ7J53RC463X9",
# --------------------------------------
# Results for Period(2022-11-08)
# v1_not_v2 = 160 batches
# v2_not_v1 = 122 batches
# overlap = 4378 batches
# v1_cache="01GJTHE88DH5TFHRDX9D8H39XK",
# v2_cache="01GJTHEK7BRZ8G4N10TGXBJ1W3",
# --------------------------------------
# Results for Period(2022-11-08)
# v1_not_v2 = 160 batches
# v2_not_v1 = 90 batches
# overlap = 3598 batches
# v1_cache="",
# v2_cache="",
# --------------------------------------
# Results for Period(2022-10-04)
# v1_not_v2 = 99 batches
# v2_not_v1 = 63 batches
# overlap = 2491 batches
# v1_cache="",
# v2_cache="",
)
)
num_outliers = len(data.v1_not_v2) + len(data.v2_not_v1)
size_overlap = len(data.overlap)
# (v1 (-------overlap-------) v2)
# |--A--|----------D----------|--B--|
# assert (A + B) / D < 10%
self.assertLess(num_outliers / size_overlap, 0.1)
print(data)
data.describe_missing()

def test_similar_slippage_for_period(self):
Expand Down

0 comments on commit e7bef0f

Please sign in to comment.