Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor pre_validate_blocks_multiprocessing #18469

Merged
merged 22 commits into from
Sep 6, 2024
Merged
Show file tree
Hide file tree
Changes from 17 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions chia/_tests/blockchain/test_blockchain.py
Original file line number Diff line number Diff line change
Expand Up @@ -3352,6 +3352,7 @@ async def test_long_reorg(
# can catch up
fork_block = default_10000_blocks[num_blocks_chain_2_start - 200]
fork_info = ForkInfo(fork_block.height, fork_block.height, fork_block.header_hash)
await b.warmup(fork_block.height)
for block in blocks:
if (block.height % 128) == 0:
peak = b.get_peak()
Expand Down
64 changes: 7 additions & 57 deletions chia/_tests/core/full_node/test_full_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from chia._tests.core.full_node.stores.test_coin_store import get_future_reward_coins
from chia._tests.core.make_block_generator import make_spend_bundle
from chia._tests.core.node_height import node_height_at_least
from chia._tests.util.misc import wallet_height_at_least
from chia._tests.util.misc import add_blocks_in_batches, wallet_height_at_least
from chia._tests.util.setup_nodes import SimulatorsAndWalletsServices
from chia._tests.util.time_out_assert import time_out_assert, time_out_assert_custom_interval, time_out_messages
from chia.consensus.block_body_validation import ForkInfo
Expand Down Expand Up @@ -59,7 +59,6 @@
from chia.types.peer_info import PeerInfo, TimestampedPeerInfo
from chia.types.spend_bundle import SpendBundle, estimate_fees
from chia.types.unfinished_block import UnfinishedBlock
from chia.util.batches import to_batches
from chia.util.errors import ConsensusError, Err
from chia.util.hash import std_hash
from chia.util.ints import uint8, uint16, uint32, uint64, uint128
Expand Down Expand Up @@ -125,23 +124,11 @@ async def test_sync_no_farmer(
blocks = default_1000_blocks

# full node 1 has the complete chain
for block_batch in to_batches(blocks, 64):
success, change, err = await full_node_1.full_node.add_block_batch(
block_batch.entries, PeerInfo("0.0.0.0", 8884), None
)
assert err is None
assert success is True

await add_blocks_in_batches(blocks, full_node_1.full_node)
target_peak = full_node_1.full_node.blockchain.get_peak()

# full node 2 is behind by 800 blocks
for block_batch in to_batches(blocks[:-800], 64):
success, change, err = await full_node_2.full_node.add_block_batch(
block_batch.entries, PeerInfo("0.0.0.0", 8884), None
)
assert err is None
assert success is True

await add_blocks_in_batches(blocks[:-800], full_node_2.full_node)
# connect the nodes and wait for node 2 to sync up to node 1
await connect_and_get_peer(server_1, server_2, self_hostname)

Expand Down Expand Up @@ -2272,16 +2259,7 @@ async def test_long_reorg(
else:
reorg_blocks = test_long_reorg_blocks[:1200]

for block_batch in to_batches(blocks, 64):
b = block_batch.entries[0]
if (b.height % 128) == 0:
print(f"main chain: {b.height:4} weight: {b.weight}")
success, change, err = await node.full_node.add_block_batch(
block_batch.entries, PeerInfo("0.0.0.0", 8884), None
)
assert err is None
assert success is True

await add_blocks_in_batches(blocks, node.full_node)
peak = node.full_node.blockchain.get_peak()
chain_1_height = peak.height
chain_1_weight = peak.weight
Expand Down Expand Up @@ -2365,29 +2343,10 @@ async def test_long_reorg_nodes(
reorg_blocks = test_long_reorg_blocks_light[: 1600 - chain_length]
else:
reorg_blocks = test_long_reorg_blocks[: 1200 - chain_length]

# full node 1 has the original chain
for block_batch in to_batches(blocks, 64):
b = block_batch.entries[0]
if (b.height % 128) == 0:
print(f"main chain: {b.height:4} weight: {b.weight}")
success, change, err = await full_node_1.full_node.add_block_batch(
block_batch.entries, PeerInfo("0.0.0.0", 8884), None
)
assert err is None
assert success is True
await add_blocks_in_batches(blocks, full_node_1.full_node)

# full node 2 has the reorg-chain
for block_batch in to_batches(reorg_blocks[:-1], 64):
b = block_batch.entries[0]
if (b.height % 128) == 0:
print(f"reorg chain: {b.height:4} weight: {b.weight}")
success, change, err = await full_node_2.full_node.add_block_batch(
block_batch.entries, PeerInfo("0.0.0.0", 8884), None
)
assert err is None
assert success is True

await add_blocks_in_batches(reorg_blocks[:-1], full_node_2.full_node)
await connect_and_get_peer(full_node_1.full_node.server, full_node_2.full_node.server, self_hostname)

# TODO: There appears to be an issue where the node with the lighter chain
Expand Down Expand Up @@ -2417,16 +2376,7 @@ def check_nodes_in_sync():
blocks = default_10000_blocks[:4000]

# full node 3 has the original chain, but even longer
for block_batch in to_batches(blocks, 64):
b = block_batch.entries[0]
if (b.height % 128) == 0:
print(f"main chain: {b.height:4} weight: {b.weight}")
success, change, err = await full_node_3.full_node.add_block_batch(
block_batch.entries, PeerInfo("0.0.0.0", 8884), None
)
assert err is None
assert success is True

await add_blocks_in_batches(blocks, full_node_3.full_node)
print("connecting node 3")
await connect_and_get_peer(full_node_3.full_node.server, full_node_1.full_node.server, self_hostname)
await connect_and_get_peer(full_node_3.full_node.server, full_node_2.full_node.server, self_hostname)
Expand Down
5 changes: 2 additions & 3 deletions chia/_tests/core/mempool/test_mempool_performance.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import pytest

from chia._tests.util.misc import BenchmarkRunner, wallet_height_at_least
from chia._tests.util.misc import BenchmarkRunner, add_blocks_in_batches, wallet_height_at_least
from chia._tests.util.setup_nodes import OldSimulatorsAndWallets
from chia._tests.util.time_out_assert import time_out_assert
from chia.types.full_block import FullBlock
Expand Down Expand Up @@ -37,8 +37,7 @@ async def test_mempool_update_performance(
# We need an initialized mempool as we want to add a transaction, so we use
# the first block to achieve that
await full_node.add_block(blocks[0])
await full_node.add_block_batch(blocks[1:], PeerInfo("0.0.0.0", 0), None)

await add_blocks_in_batches(blocks[1:], full_node)
await wallet_server.start_client(PeerInfo(self_hostname, full_node.server.get_port()), None)
await time_out_assert(30, wallet_height_at_least, True, wallet_node, 399)
send_amount = uint64(40_000_000_000_000)
Expand Down
11 changes: 10 additions & 1 deletion chia/_tests/util/full_sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from chia.cmds.init_funcs import chia_init
from chia.consensus.constants import replace_str_to_bytes
from chia.consensus.default_constants import DEFAULT_CONSTANTS
from chia.consensus.difficulty_adjustment import get_next_sub_slot_iters_and_difficulty
from chia.full_node.full_node import FullNode
from chia.server.outbound_message import Message, NodeType
from chia.server.server import ChiaServer
Expand Down Expand Up @@ -199,7 +200,15 @@ async def run_sync_test(
await full_node.add_unfinished_block(make_unfinished_block(b, constants), peer)
await full_node.add_block(b, None, full_node._bls_cache)
else:
success, summary, _ = await full_node.add_block_batch(block_batch, peer_info, None)
block_record = await full_node.blockchain.get_block_record_from_db(
block_batch[0].prev_header_hash
)
ssi, diff = get_next_sub_slot_iters_and_difficulty(
full_node.constants, True, block_record, full_node.blockchain
)
success, summary, _, _, _, _ = await full_node.add_block_batch(
block_batch, peer_info, None, current_ssi=ssi, current_difficulty=diff
)
end_height = block_batch[-1].height
full_node.blockchain.clean_block_record(end_height - full_node.constants.BLOCKS_CACHE_SIZE)

Expand Down
35 changes: 35 additions & 0 deletions chia/_tests/util/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,9 +55,14 @@
import chia._tests
from chia._tests import ether
from chia._tests.core.data_layer.util import ChiaRoot
from chia.consensus.difficulty_adjustment import get_next_sub_slot_iters_and_difficulty
from chia.full_node.full_node import FullNode
from chia.full_node.mempool import Mempool
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.condition_opcodes import ConditionOpcode
from chia.types.full_block import FullBlock
from chia.types.peer_info import PeerInfo
from chia.util.batches import to_batches
from chia.util.hash import std_hash
from chia.util.ints import uint16, uint32, uint64
from chia.util.network import WebServer
Expand Down Expand Up @@ -685,3 +690,33 @@ def caller_file_and_line(distance: int = 1, relative_to: Iterable[Path] = ()) ->
pass

return min(options, key=len), caller.lineno


async def add_blocks_in_batches(
blocks: List[FullBlock],
full_node: FullNode,
header_hash: Optional[bytes32] = None,
) -> None:
if header_hash is None:
diff = full_node.constants.DIFFICULTY_STARTING
ssi = full_node.constants.SUB_SLOT_ITERS_STARTING
else:
block_record = await full_node.blockchain.get_block_record_from_db(header_hash)
ssi, diff = get_next_sub_slot_iters_and_difficulty(
full_node.constants, True, block_record, full_node.blockchain
)
prev_ses_block = None
for block_batch in to_batches(blocks, 64):
b = block_batch.entries[0]
if (b.height % 128) == 0:
print(f"main chain: {b.height:4} weight: {b.weight}")
success, _, ssi, diff, prev_ses_block, err = await full_node.add_block_batch(
block_batch.entries,
PeerInfo("0.0.0.0", 0),
None,
current_ssi=ssi,
current_difficulty=diff,
prev_ses_block=prev_ses_block,
)
assert err is None
assert success is True
Loading
Loading