From b21b2fdd97d3bb4a5b8e866bca4af1f99a227154 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Tue, 31 Oct 2023 14:22:04 -0500 Subject: [PATCH 001/116] Add interface for Proof and Prover class, test prove_parallel implementation --- src/pyk/proof/equality.py | 12 +- src/pyk/proof/proof.py | 25 +++- src/pyk/proof/reachability.py | 14 +- .../integration/proof/test_parallel_prove.py | 130 ++++++++++++++++++ 4 files changed, 175 insertions(+), 6 deletions(-) create mode 100644 src/tests/integration/proof/test_parallel_prove.py diff --git a/src/pyk/proof/equality.py b/src/pyk/proof/equality.py index 3a3b2f8fa..874a2da32 100644 --- a/src/pyk/proof/equality.py +++ b/src/pyk/proof/equality.py @@ -326,7 +326,7 @@ def lines(self) -> list[str]: ] -class ImpliesProver(Prover): +class ImpliesProver(Prover[ImpliesProof, None, None]): proof: ImpliesProof def __init__( @@ -375,6 +375,16 @@ def advance_proof(self) -> None: _LOGGER.info(f'{proof_type} finished {self.proof.id}: {self.proof.status}') self.proof.write_proof_data() + def steps(self, proof: ImpliesProof) -> Iterable[None]: + return [None] + + def advance(self, proof: ImpliesProof, step: None) -> None: + return None + + # Should return P to be more flexible, but let's assume this for implicity + def commit(self, proof: ImpliesProof, update: None) -> None: + return + class EqualityProver(ImpliesProver): def __init__(self, proof: EqualityProof, kcfg_explore: KCFGExplore) -> None: diff --git a/src/pyk/proof/proof.py b/src/pyk/proof/proof.py index f4d0558c5..2dbb96d00 100644 --- a/src/pyk/proof/proof.py +++ b/src/pyk/proof/proof.py @@ -6,18 +6,22 @@ from dataclasses import dataclass from enum import Enum from itertools import chain -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Generic, TypeVar from ..utils import ensure_dir_path, hash_file, hash_str if TYPE_CHECKING: from collections.abc import Iterable, Mapping from pathlib import Path - from typing import Any, Final, TypeVar + from typing import Any, Final from pyk.kcfg.explore import KCFGExplore - T = TypeVar('T', bound='Proof') +Q = TypeVar('Q', bound='Any') +P = TypeVar('P', bound='Any') +S = TypeVar('S', bound='Any') +U = TypeVar('U', bound='Any') +# T = TypeVar('T', bound='Proof') _LOGGER: Final = logging.getLogger(__name__) @@ -281,8 +285,21 @@ def lines(self) -> list[str]: return [line for lines in (summary.lines for summary in self.summaries) for line in lines] -class Prover: +class Prover(ABC, Generic[P, S, U]): kcfg_explore: KCFGExplore def __init__(self, kcfg_explore: KCFGExplore): self.kcfg_explore = kcfg_explore + + @abstractmethod + def steps(self, proof: P) -> Iterable[S]: + ... + + @abstractmethod + def advance(self, proof: P, step: S) -> U: + ... + + # Should return P to be more flexible, but let's assume this for implicity + @abstractmethod + def commit(self, proof: P, update: U) -> None: + ... diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index f1a7e1b33..02265a41e 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -13,6 +13,7 @@ from ..kast.outer import KClaim from ..kcfg import KCFG from ..kcfg.exploration import KCFGExploration +from ..kcfg.explore import ExtendResult, Vacuous from ..prelude.kbool import BOOL, TRUE from ..prelude.ml import mlAnd, mlEquals, mlTop from ..utils import FrozenDict, ensure_dir_path, hash_str, shorten_hashes, single @@ -623,7 +624,7 @@ def summary(self) -> CompositeSummary: ) -class APRProver(Prover): +class APRProver(Prover[APRProof, int, ExtendResult]): proof: APRProof main_module_name: str @@ -844,6 +845,17 @@ def save_failure_info(self) -> None: def failure_info(self) -> APRFailureInfo: return APRFailureInfo.from_proof(self.proof, self.kcfg_explore, counterexample_info=self.counterexample_info) + def steps(self, proof: APRProof) -> Iterable[int]: + return [0] + ... + + def advance(self, proof: APRProof, step: int) -> ExtendResult: + return Vacuous() + + # Should return P to be more flexible, but let's assume this for implicity + def commit(self, proof: APRProof, update: ExtendResult) -> None: + return + @dataclass(frozen=True) class APRSummary(ProofSummary): diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py new file mode 100644 index 000000000..2c0cdcafe --- /dev/null +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -0,0 +1,130 @@ +from __future__ import annotations + +import time +import sys +from concurrent.futures import ProcessPoolExecutor, wait +from typing import TYPE_CHECKING, Any, Mapping + +from pyk.proof.proof import Proof, ProofStatus, Prover + +if TYPE_CHECKING: + from collections.abc import Iterable + from concurrent.futures import Executor, Future + from pathlib import Path + + +class TreeExploreProof(Proof): + nodes_values: dict[int, int] + nodes_explored: dict[int, bool] + edges: dict[int, list[int]] + + def __init__(self) -> None: + self.nodes_explored = {} + self.edges = {} + for i in range(10): + self.nodes_explored[i] = False + + # 0 + # / \ + # 1 2 + # / \ + # 3 4 + # / \ \ + # 5 6 7 + # / \ + # 8 9 + self.edges[0] = [1, 2] + self.edges[1] = [] + self.edges[2] = [3, 4] + self.edges[3] = [5, 6] + self.edges[4] = [7] + self.edges[5] = [] + self.edges[6] = [] + self.edges[7] = [8, 9] + self.edges[8] = [] + self.edges[9] = [] + + @property + def status(self) -> ProofStatus: + if all(self.nodes_explored.values()): + return ProofStatus.PASSED + else: + return ProofStatus.PENDING + + @classmethod + def from_dict(cls: type[Proof], dct: Mapping[str, Any], proof_dir: Path | None = None) -> TreeExploreProof: + return TreeExploreProof() + + def write_proof_data(self) -> None: + return + + +class TreeExploreProver(Prover[TreeExploreProof, int, int]): + def __init__(self) -> None: + return + + def steps(self, proof: TreeExploreProof) -> Iterable[int]: + def parents(node_id: int) -> Iterable[int]: + return [source for source, targets in proof.edges.items() if node_id in targets] + + return [ + node_id + for node_id, explored in proof.nodes_explored.items() + if not explored and all(proof.nodes_explored[parent] for parent in parents(node_id)) + ] + + def advance(self, proof: TreeExploreProof, step: int) -> int: + print(f'Advancing node {step}\n', file=sys.stderr) + time.sleep(5) + print(f'Done advancing node {step}\n', file=sys.stderr) + return step + + # Should return P to be more flexible, but let's assume this for implicity + def commit(self, proof: TreeExploreProof, update: int) -> None: + proof.nodes_explored[update] = True + + +def prove_parallel( + proofs: list[Proof], + # We need a way to map proofs to provers, but for simplicity, I'll assume it as a given + provers: dict[Proof, Prover], +) -> Iterable[Proof]: + pending: dict[Future[int], Proof] = {} + + def submit(proof: Proof, pool: Executor) -> None: + prover = provers[proof] + for step in prover.steps(proof): # <-- get next steps (represented by e.g. pending nodes, ...) + future = pool.submit(prover.advance, proof, step) # <-- schedule steps for execution + pending[future] = proof + + with ProcessPoolExecutor(max_workers=3) as pool: + for proof in proofs: + submit(proof, pool) + + while pending: + future = list(wait(pending).done)[0] + proof = pending[future] + prover = provers[proof] + update = future.result() + prover.commit(proof, update) # <-- update the proof (can be in-memory, access disk with locking, ...) + + match proof.status: + # terminate on first failure, yield partial results, etc. + case ProofStatus.FAILED: + break + case ProofStatus.PENDING: + ... + case ProofStatus.PASSED: + break + + submit(proof, pool) + pending.pop(future) + return proofs + + +def test_parallel_prove() -> None: + proof = TreeExploreProof() + prover = TreeExploreProver() + results = prove_parallel([proof], {proof: prover}) + assert len(list(results)) == 1 + assert list(results)[0].status == ProofStatus.PASSED From 71eb0a30b723db71e7c4517a30018377af9cd17a Mon Sep 17 00:00:00 2001 From: devops Date: Tue, 31 Oct 2023 19:25:00 +0000 Subject: [PATCH 002/116] Set Version: 0.1.489 --- package/version | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package/version b/package/version index 5897f36a6..943628fcf 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.488 +0.1.489 diff --git a/pyproject.toml b/pyproject.toml index f8a0467d8..4a58ea12b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.488" +version = "0.1.489" description = "" authors = [ "Runtime Verification, Inc. ", From fa0f4d022527d2c5f138867b9de2bb4b4efa8be1 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Tue, 31 Oct 2023 14:47:27 -0500 Subject: [PATCH 003/116] Keep track of begun nodes --- src/tests/integration/proof/test_parallel_prove.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index 2c0cdcafe..40ca2db85 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -1,7 +1,7 @@ from __future__ import annotations -import time import sys +import time from concurrent.futures import ProcessPoolExecutor, wait from typing import TYPE_CHECKING, Any, Mapping @@ -90,14 +90,18 @@ def prove_parallel( provers: dict[Proof, Prover], ) -> Iterable[Proof]: pending: dict[Future[int], Proof] = {} + explored: set[int] = set() def submit(proof: Proof, pool: Executor) -> None: prover = provers[proof] for step in prover.steps(proof): # <-- get next steps (represented by e.g. pending nodes, ...) + if step in explored: + continue + explored.add(step) future = pool.submit(prover.advance, proof, step) # <-- schedule steps for execution pending[future] = proof - with ProcessPoolExecutor(max_workers=3) as pool: + with ProcessPoolExecutor(max_workers=2) as pool: for proof in proofs: submit(proof, pool) From f644aecbc250bfbc8905bf0d472f53f5e833bb40 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Wed, 1 Nov 2023 14:39:00 -0500 Subject: [PATCH 004/116] Separate new Prover/Proof interface from existing classes --- src/pyk/proof/equality.py | 12 +------ src/pyk/proof/parallel.py | 35 +++++++++++++++++++ src/pyk/proof/proof.py | 25 +++---------- src/pyk/proof/reachability.py | 14 +------- .../integration/proof/test_parallel_prove.py | 13 ++----- 5 files changed, 44 insertions(+), 55 deletions(-) create mode 100644 src/pyk/proof/parallel.py diff --git a/src/pyk/proof/equality.py b/src/pyk/proof/equality.py index 874a2da32..3a3b2f8fa 100644 --- a/src/pyk/proof/equality.py +++ b/src/pyk/proof/equality.py @@ -326,7 +326,7 @@ def lines(self) -> list[str]: ] -class ImpliesProver(Prover[ImpliesProof, None, None]): +class ImpliesProver(Prover): proof: ImpliesProof def __init__( @@ -375,16 +375,6 @@ def advance_proof(self) -> None: _LOGGER.info(f'{proof_type} finished {self.proof.id}: {self.proof.status}') self.proof.write_proof_data() - def steps(self, proof: ImpliesProof) -> Iterable[None]: - return [None] - - def advance(self, proof: ImpliesProof, step: None) -> None: - return None - - # Should return P to be more flexible, but let's assume this for implicity - def commit(self, proof: ImpliesProof, update: None) -> None: - return - class EqualityProver(ImpliesProver): def __init__(self, proof: EqualityProof, kcfg_explore: KCFGExplore) -> None: diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py new file mode 100644 index 000000000..4bc5996c3 --- /dev/null +++ b/src/pyk/proof/parallel.py @@ -0,0 +1,35 @@ +from __future__ import annotations + +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, Any, Generic, TypeVar + +if TYPE_CHECKING: + from collections.abc import Iterable + + from pyk.proof.proof import ProofStatus + +P = TypeVar('P', bound='Any') +S = TypeVar('S', bound='Any') +U = TypeVar('U', bound='Any') + + +class Prover(ABC, Generic[P, S, U]): + @abstractmethod + def steps(self, proof: P) -> Iterable[S]: + ... + + @abstractmethod + def advance(self, proof: P, step: S) -> U: + ... + + # Should return P to be more flexible, but let's assume this for implicity + @abstractmethod + def commit(self, proof: P, update: U) -> None: + ... + + +class Proof: + @property + @abstractmethod + def status(self) -> ProofStatus: + ... diff --git a/src/pyk/proof/proof.py b/src/pyk/proof/proof.py index 2dbb96d00..f4d0558c5 100644 --- a/src/pyk/proof/proof.py +++ b/src/pyk/proof/proof.py @@ -6,22 +6,18 @@ from dataclasses import dataclass from enum import Enum from itertools import chain -from typing import TYPE_CHECKING, Generic, TypeVar +from typing import TYPE_CHECKING from ..utils import ensure_dir_path, hash_file, hash_str if TYPE_CHECKING: from collections.abc import Iterable, Mapping from pathlib import Path - from typing import Any, Final + from typing import Any, Final, TypeVar from pyk.kcfg.explore import KCFGExplore -Q = TypeVar('Q', bound='Any') -P = TypeVar('P', bound='Any') -S = TypeVar('S', bound='Any') -U = TypeVar('U', bound='Any') -# T = TypeVar('T', bound='Proof') + T = TypeVar('T', bound='Proof') _LOGGER: Final = logging.getLogger(__name__) @@ -285,21 +281,8 @@ def lines(self) -> list[str]: return [line for lines in (summary.lines for summary in self.summaries) for line in lines] -class Prover(ABC, Generic[P, S, U]): +class Prover: kcfg_explore: KCFGExplore def __init__(self, kcfg_explore: KCFGExplore): self.kcfg_explore = kcfg_explore - - @abstractmethod - def steps(self, proof: P) -> Iterable[S]: - ... - - @abstractmethod - def advance(self, proof: P, step: S) -> U: - ... - - # Should return P to be more flexible, but let's assume this for implicity - @abstractmethod - def commit(self, proof: P, update: U) -> None: - ... diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index 02265a41e..f1a7e1b33 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -13,7 +13,6 @@ from ..kast.outer import KClaim from ..kcfg import KCFG from ..kcfg.exploration import KCFGExploration -from ..kcfg.explore import ExtendResult, Vacuous from ..prelude.kbool import BOOL, TRUE from ..prelude.ml import mlAnd, mlEquals, mlTop from ..utils import FrozenDict, ensure_dir_path, hash_str, shorten_hashes, single @@ -624,7 +623,7 @@ def summary(self) -> CompositeSummary: ) -class APRProver(Prover[APRProof, int, ExtendResult]): +class APRProver(Prover): proof: APRProof main_module_name: str @@ -845,17 +844,6 @@ def save_failure_info(self) -> None: def failure_info(self) -> APRFailureInfo: return APRFailureInfo.from_proof(self.proof, self.kcfg_explore, counterexample_info=self.counterexample_info) - def steps(self, proof: APRProof) -> Iterable[int]: - return [0] - ... - - def advance(self, proof: APRProof, step: int) -> ExtendResult: - return Vacuous() - - # Should return P to be more flexible, but let's assume this for implicity - def commit(self, proof: APRProof, update: ExtendResult) -> None: - return - @dataclass(frozen=True) class APRSummary(ProofSummary): diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index 40ca2db85..015585554 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -3,14 +3,14 @@ import sys import time from concurrent.futures import ProcessPoolExecutor, wait -from typing import TYPE_CHECKING, Any, Mapping +from typing import TYPE_CHECKING -from pyk.proof.proof import Proof, ProofStatus, Prover +from pyk.proof.parallel import Proof, Prover +from pyk.proof.proof import ProofStatus if TYPE_CHECKING: from collections.abc import Iterable from concurrent.futures import Executor, Future - from pathlib import Path class TreeExploreProof(Proof): @@ -51,13 +51,6 @@ def status(self) -> ProofStatus: else: return ProofStatus.PENDING - @classmethod - def from_dict(cls: type[Proof], dct: Mapping[str, Any], proof_dir: Path | None = None) -> TreeExploreProof: - return TreeExploreProof() - - def write_proof_data(self) -> None: - return - class TreeExploreProver(Prover[TreeExploreProof, int, int]): def __init__(self) -> None: From de48482f82f6c7e159a1b354017246154918a9ed Mon Sep 17 00:00:00 2001 From: devops Date: Wed, 1 Nov 2023 19:39:19 +0000 Subject: [PATCH 005/116] Set Version: 0.1.490 --- package/version | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package/version b/package/version index 943628fcf..d0ac8ea7a 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.489 +0.1.490 diff --git a/pyproject.toml b/pyproject.toml index 4a58ea12b..7a9330364 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.489" +version = "0.1.490" description = "" authors = [ "Runtime Verification, Inc. ", From 06438ec2f845667f4fa9e03ce86d873b23735a9e Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Wed, 1 Nov 2023 15:42:19 -0500 Subject: [PATCH 006/116] Change interface to use return steps from commit --- src/pyk/proof/parallel.py | 9 ++-- .../integration/proof/test_parallel_prove.py | 41 +++++++++---------- 2 files changed, 25 insertions(+), 25 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index 4bc5996c3..8e08700b6 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -14,17 +14,20 @@ class Prover(ABC, Generic[P, S, U]): + + # Return first available step(s) of proof @abstractmethod - def steps(self, proof: P) -> Iterable[S]: + def initial_steps(self, proof: P) -> Iterable[S]: ... + # Applies step to proof @abstractmethod def advance(self, proof: P, step: S) -> U: ... - # Should return P to be more flexible, but let's assume this for implicity + # Returns steps that were made available by this commit @abstractmethod - def commit(self, proof: P, update: U) -> None: + def commit(self, proof: P, update: U) -> Iterable[S]: ... diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index 015585554..6c42f4353 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -56,15 +56,8 @@ class TreeExploreProver(Prover[TreeExploreProof, int, int]): def __init__(self) -> None: return - def steps(self, proof: TreeExploreProof) -> Iterable[int]: - def parents(node_id: int) -> Iterable[int]: - return [source for source, targets in proof.edges.items() if node_id in targets] - - return [ - node_id - for node_id, explored in proof.nodes_explored.items() - if not explored and all(proof.nodes_explored[parent] for parent in parents(node_id)) - ] + def initial_steps(self, proof: TreeExploreProof) -> Iterable[int]: + return [0] def advance(self, proof: TreeExploreProof, step: int) -> int: print(f'Advancing node {step}\n', file=sys.stderr) @@ -72,10 +65,14 @@ def advance(self, proof: TreeExploreProof, step: int) -> int: print(f'Done advancing node {step}\n', file=sys.stderr) return step - # Should return P to be more flexible, but let's assume this for implicity - def commit(self, proof: TreeExploreProof, update: int) -> None: + def commit(self, proof: TreeExploreProof, update: int) -> Iterable[int]: proof.nodes_explored[update] = True + def parents(node_id: int) -> Iterable[int]: + return [source for source, targets in proof.edges.items() if node_id in targets] + + return proof.edges[update] + def prove_parallel( proofs: list[Proof], @@ -83,27 +80,26 @@ def prove_parallel( provers: dict[Proof, Prover], ) -> Iterable[Proof]: pending: dict[Future[int], Proof] = {} - explored: set[int] = set() - def submit(proof: Proof, pool: Executor) -> None: + def submit(proof: Proof, pool: Executor, step: int) -> None: prover = provers[proof] - for step in prover.steps(proof): # <-- get next steps (represented by e.g. pending nodes, ...) - if step in explored: - continue - explored.add(step) - future = pool.submit(prover.advance, proof, step) # <-- schedule steps for execution - pending[future] = proof + future = pool.submit(prover.advance, proof, step) # <-- schedule steps for execution + pending[future] = proof with ProcessPoolExecutor(max_workers=2) as pool: for proof in proofs: - submit(proof, pool) + prover = provers[proof] + for step in prover.initial_steps(proof): # <-- get next steps (represented by e.g. pending nodes, ...) + submit(proof, pool, step) while pending: future = list(wait(pending).done)[0] proof = pending[future] prover = provers[proof] update = future.result() - prover.commit(proof, update) # <-- update the proof (can be in-memory, access disk with locking, ...) + next_steps = prover.commit( + proof, update + ) # <-- update the proof (can be in-memory, access disk with locking, ...) match proof.status: # terminate on first failure, yield partial results, etc. @@ -114,7 +110,8 @@ def submit(proof: Proof, pool: Executor) -> None: case ProofStatus.PASSED: break - submit(proof, pool) + for step in next_steps: + submit(proof, pool, step) pending.pop(future) return proofs From 3ddda2dd370a5810a18808f5f54bb9ff58f10e86 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Wed, 1 Nov 2023 15:55:36 -0500 Subject: [PATCH 007/116] Simplify TreeExploreProver --- src/pyk/proof/parallel.py | 1 - .../integration/proof/test_parallel_prove.py | 39 ++++++++++--------- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index 8e08700b6..f27f09bac 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -14,7 +14,6 @@ class Prover(ABC, Generic[P, S, U]): - # Return first available step(s) of proof @abstractmethod def initial_steps(self, proof: P) -> Iterable[S]: diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index 6c42f4353..582c8c3c2 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -14,15 +14,16 @@ class TreeExploreProof(Proof): - nodes_values: dict[int, int] - nodes_explored: dict[int, bool] - edges: dict[int, list[int]] + init: int + target: int + edges: dict[int, set[int]] + reached: set[int] def __init__(self) -> None: - self.nodes_explored = {} + self.init = 0 + self.reached = set() + self.target = 9 self.edges = {} - for i in range(10): - self.nodes_explored[i] = False # 0 # / \ @@ -33,20 +34,20 @@ def __init__(self) -> None: # 5 6 7 # / \ # 8 9 - self.edges[0] = [1, 2] - self.edges[1] = [] - self.edges[2] = [3, 4] - self.edges[3] = [5, 6] - self.edges[4] = [7] - self.edges[5] = [] - self.edges[6] = [] - self.edges[7] = [8, 9] - self.edges[8] = [] - self.edges[9] = [] + self.edges[0] = {1, 2} + self.edges[1] = set() + self.edges[2] = {3, 4} + self.edges[3] = {5, 6} + self.edges[4] = {7} + self.edges[5] = set() + self.edges[6] = set() + self.edges[7] = {8, 9} + self.edges[8] = set() + self.edges[9] = set() @property def status(self) -> ProofStatus: - if all(self.nodes_explored.values()): + if self.target in self.reached: return ProofStatus.PASSED else: return ProofStatus.PENDING @@ -57,7 +58,7 @@ def __init__(self) -> None: return def initial_steps(self, proof: TreeExploreProof) -> Iterable[int]: - return [0] + return [proof.init] def advance(self, proof: TreeExploreProof, step: int) -> int: print(f'Advancing node {step}\n', file=sys.stderr) @@ -66,7 +67,7 @@ def advance(self, proof: TreeExploreProof, step: int) -> int: return step def commit(self, proof: TreeExploreProof, update: int) -> Iterable[int]: - proof.nodes_explored[update] = True + proof.reached.add(update) def parents(node_id: int) -> Iterable[int]: return [source for source, targets in proof.edges.items() if node_id in targets] From 68bd89d1dbee8b3c651112083c634c335c52a456 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Wed, 1 Nov 2023 20:50:33 -0500 Subject: [PATCH 008/116] Use steps --- src/pyk/proof/parallel.py | 4 +- .../integration/proof/test_parallel_prove.py | 42 ++++++++++--------- 2 files changed, 25 insertions(+), 21 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index f27f09bac..18642738b 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -16,7 +16,7 @@ class Prover(ABC, Generic[P, S, U]): # Return first available step(s) of proof @abstractmethod - def initial_steps(self, proof: P) -> Iterable[S]: + def steps(self, proof: P) -> Iterable[S]: ... # Applies step to proof @@ -26,7 +26,7 @@ def advance(self, proof: P, step: S) -> U: # Returns steps that were made available by this commit @abstractmethod - def commit(self, proof: P, update: U) -> Iterable[S]: + def commit(self, proof: P, update: U) -> None: ... diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index 582c8c3c2..104edf3fd 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -57,8 +57,17 @@ class TreeExploreProver(Prover[TreeExploreProof, int, int]): def __init__(self) -> None: return - def initial_steps(self, proof: TreeExploreProof) -> Iterable[int]: - return [proof.init] + def steps(self, proof: TreeExploreProof) -> Iterable[int]: + def parents(node_id: int) -> Iterable[int]: + return [source for source, targets in proof.edges.items() if node_id in targets] + + nodes = set(range(10)) + + return [ + node_id + for node_id in nodes + if node_id not in proof.reached and all(parent in proof.reached for parent in parents(node_id)) + ] def advance(self, proof: TreeExploreProof, step: int) -> int: print(f'Advancing node {step}\n', file=sys.stderr) @@ -66,14 +75,9 @@ def advance(self, proof: TreeExploreProof, step: int) -> int: print(f'Done advancing node {step}\n', file=sys.stderr) return step - def commit(self, proof: TreeExploreProof, update: int) -> Iterable[int]: + def commit(self, proof: TreeExploreProof, update: int) -> None: proof.reached.add(update) - def parents(node_id: int) -> Iterable[int]: - return [source for source, targets in proof.edges.items() if node_id in targets] - - return proof.edges[update] - def prove_parallel( proofs: list[Proof], @@ -81,26 +85,27 @@ def prove_parallel( provers: dict[Proof, Prover], ) -> Iterable[Proof]: pending: dict[Future[int], Proof] = {} + explored: set[int] = set() - def submit(proof: Proof, pool: Executor, step: int) -> None: + def submit(proof: Proof, pool: Executor) -> None: prover = provers[proof] - future = pool.submit(prover.advance, proof, step) # <-- schedule steps for execution - pending[future] = proof + for step in prover.steps(proof): # <-- get next steps (represented by e.g. pending nodes, ...) + if step in explored: + continue + explored.add(step) + future = pool.submit(prover.advance, proof, step) # <-- schedule steps for execution + pending[future] = proof with ProcessPoolExecutor(max_workers=2) as pool: for proof in proofs: - prover = provers[proof] - for step in prover.initial_steps(proof): # <-- get next steps (represented by e.g. pending nodes, ...) - submit(proof, pool, step) + submit(proof, pool) while pending: future = list(wait(pending).done)[0] proof = pending[future] prover = provers[proof] update = future.result() - next_steps = prover.commit( - proof, update - ) # <-- update the proof (can be in-memory, access disk with locking, ...) + prover.commit(proof, update) # <-- update the proof (can be in-memory, access disk with locking, ...) match proof.status: # terminate on first failure, yield partial results, etc. @@ -111,8 +116,7 @@ def submit(proof: Proof, pool: Executor, step: int) -> None: case ProofStatus.PASSED: break - for step in next_steps: - submit(proof, pool, step) + submit(proof, pool) pending.pop(future) return proofs From 34d2442a675ae5c83b68a1e58964ea146907973c Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Fri, 3 Nov 2023 18:36:20 -0500 Subject: [PATCH 009/116] Update to match new interface plan --- src/pyk/proof/parallel.py | 6 ++---- src/tests/integration/proof/test_parallel_prove.py | 5 +++-- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index 18642738b..6876c8f1c 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -14,17 +14,15 @@ class Prover(ABC, Generic[P, S, U]): - # Return first available step(s) of proof @abstractmethod def steps(self, proof: P) -> Iterable[S]: ... - # Applies step to proof + @classmethod @abstractmethod - def advance(self, proof: P, step: S) -> U: + def advance(cls, step: S) -> U: ... - # Returns steps that were made available by this commit @abstractmethod def commit(self, proof: P, update: U) -> None: ... diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index 104edf3fd..9ad3df3c9 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -69,7 +69,8 @@ def parents(node_id: int) -> Iterable[int]: if node_id not in proof.reached and all(parent in proof.reached for parent in parents(node_id)) ] - def advance(self, proof: TreeExploreProof, step: int) -> int: + @classmethod + def advance(cls, step: int) -> int: print(f'Advancing node {step}\n', file=sys.stderr) time.sleep(5) print(f'Done advancing node {step}\n', file=sys.stderr) @@ -93,7 +94,7 @@ def submit(proof: Proof, pool: Executor) -> None: if step in explored: continue explored.add(step) - future = pool.submit(prover.advance, proof, step) # <-- schedule steps for execution + future = pool.submit(prover.advance, step) # <-- schedule steps for execution pending[future] = proof with ProcessPoolExecutor(max_workers=2) as pool: From 76e239bab71b6168dfce10cf4eb2c32e4593f734 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Fri, 3 Nov 2023 21:07:07 -0500 Subject: [PATCH 010/116] Add testing module --- src/pyk/proof/parallel.py | 56 +++++++++++++++++++ .../integration/proof/test_parallel_prove.py | 56 ++++++++++++++----- 2 files changed, 99 insertions(+), 13 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index 6876c8f1c..91bc70bb5 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -1,12 +1,17 @@ from __future__ import annotations from abc import ABC, abstractmethod +from dataclasses import dataclass from typing import TYPE_CHECKING, Any, Generic, TypeVar +from pyk.proof.reachability import APRProof + if TYPE_CHECKING: from collections.abc import Iterable + from pyk.kcfg.kcfg import CTerm from pyk.proof.proof import ProofStatus + from pyk.proof.reachability import APRProver P = TypeVar('P', bound='Any') S = TypeVar('S', bound='Any') @@ -33,3 +38,54 @@ class Proof: @abstractmethod def status(self) -> ProofStatus: ... + + +@dataclass +class APRProverTask: + cterm: CTerm + node_id: int + module_name: str + execute_depth: int + cut_point_rules: Iterable[str] + terminal_rules: Iterable[str] + + +class APRProverResult: + ... + + +class NewAPRProver(Prover[APRProof, APRProverTask, APRProverResult]): + prover: APRProver + execute_depth: int + cut_point_rules: Iterable[str] + terminal_rules: Iterable[str] + + def __init__(self, prover: APRProver) -> None: + self.prover = prover + + def steps(self, proof: APRProof) -> Iterable[APRProverTask]: + steps = [] + for node in proof.pending: + module_name = ( + self.prover.circularities_module_name + if self.prover.nonzero_depth(node) + else self.prover.dependencies_module_name + ) + steps.append( + APRProverTask( + cterm=node.cterm, + node_id=node.id, + module_name=module_name, + execute_depth=self.execute_depth, + cut_point_rules=self.cut_point_rules, + terminal_rules=self.terminal_rules, + ) + ) + return steps + + @classmethod + def advance(cls, step: APRProverTask) -> APRProverResult: + return APRProverResult() + + def commit(self, proof: APRProof, update: APRProverResult) -> None: + ... diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index 9ad3df3c9..15e58789a 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -3,15 +3,28 @@ import sys import time from concurrent.futures import ProcessPoolExecutor, wait +from pathlib import Path from typing import TYPE_CHECKING -from pyk.proof.parallel import Proof, Prover +import pytest + +from pyk.proof.parallel import APRProof, APRProver, NewAPRProver, Proof, Prover from pyk.proof.proof import ProofStatus +from pyk.testing import KCFGExploreTest, KProveTest +from pyk.utils import single + +from ..utils import K_FILES if TYPE_CHECKING: from collections.abc import Iterable from concurrent.futures import Executor, Future + from pytest import TempPathFactory + + from pyk.kcfg.explore import KCFGExplore + from pyk.ktool.kprove import KProve + from pyk.proof.parallel import APRProverResult, APRProverTask + class TreeExploreProof(Proof): init: int @@ -81,14 +94,14 @@ def commit(self, proof: TreeExploreProof, update: int) -> None: def prove_parallel( - proofs: list[Proof], + proofs: list[APRProof], # We need a way to map proofs to provers, but for simplicity, I'll assume it as a given - provers: dict[Proof, Prover], -) -> Iterable[Proof]: - pending: dict[Future[int], Proof] = {} - explored: set[int] = set() + provers: dict[APRProof, NewAPRProver], +) -> Iterable[APRProof]: + pending: dict[Future[APRProverResult], APRProof] = {} + explored: set[APRProverTask] = set() - def submit(proof: Proof, pool: Executor) -> None: + def submit(proof: APRProof, pool: Executor) -> None: prover = provers[proof] for step in prover.steps(proof): # <-- get next steps (represented by e.g. pending nodes, ...) if step in explored: @@ -122,9 +135,26 @@ def submit(proof: Proof, pool: Executor) -> None: return proofs -def test_parallel_prove() -> None: - proof = TreeExploreProof() - prover = TreeExploreProver() - results = prove_parallel([proof], {proof: prover}) - assert len(list(results)) == 1 - assert list(results)[0].status == ProofStatus.PASSED +@pytest.fixture(scope='function') +def proof_dir(tmp_path_factory: TempPathFactory) -> Path: + return tmp_path_factory.mktemp('proofs') + + +class TestAPRProofParallel(KCFGExploreTest, KProveTest): + KOMPILE_MAIN_FILE = K_FILES / 'imp.k' + + def test_parallel_prove(self, kprove: KProve, kcfg_explore: KCFGExplore) -> None: + spec_file = K_FILES / 'imp-simple-spec.k' + spec_module = 'IMP-SPEC' + claim_id = 'concrete-addition' + + claim = single( + kprove.get_claims(Path(spec_file), spec_module_name=spec_module, claim_labels=[f'{spec_module}.{claim_id}']) + ) + + proof = APRProof.from_claim(defn=kprove.definition, claim=claim, logs={}) + prover = APRProver(proof=proof, kcfg_explore=kcfg_explore) + new_prover = NewAPRProver(prover=prover) + results = prove_parallel([proof], {proof: new_prover}) + assert len(list(results)) == 1 + assert list(results)[0].status == ProofStatus.PASSED From a60290c24487dc63a14caf598f55fed20e5a44c2 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Mon, 6 Nov 2023 13:20:11 -0600 Subject: [PATCH 011/116] Change interface to use ProofStep --- src/pyk/proof/parallel.py | 116 +++++++++--------- .../integration/proof/test_parallel_prove.py | 111 +++++++++-------- 2 files changed, 113 insertions(+), 114 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index 91bc70bb5..18d193381 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -1,20 +1,15 @@ from __future__ import annotations from abc import ABC, abstractmethod -from dataclasses import dataclass from typing import TYPE_CHECKING, Any, Generic, TypeVar -from pyk.proof.reachability import APRProof - if TYPE_CHECKING: from collections.abc import Iterable - from pyk.kcfg.kcfg import CTerm from pyk.proof.proof import ProofStatus - from pyk.proof.reachability import APRProver -P = TypeVar('P', bound='Any') -S = TypeVar('S', bound='Any') +P = TypeVar('P', bound='Proof') +S = TypeVar('S', bound='ProofStep') U = TypeVar('U', bound='Any') @@ -23,69 +18,70 @@ class Prover(ABC, Generic[P, S, U]): def steps(self, proof: P) -> Iterable[S]: ... - @classmethod - @abstractmethod - def advance(cls, step: S) -> U: - ... - @abstractmethod def commit(self, proof: P, update: U) -> None: ... -class Proof: +class Proof(ABC): @property @abstractmethod def status(self) -> ProofStatus: ... -@dataclass -class APRProverTask: - cterm: CTerm - node_id: int - module_name: str - execute_depth: int - cut_point_rules: Iterable[str] - terminal_rules: Iterable[str] - - -class APRProverResult: - ... - - -class NewAPRProver(Prover[APRProof, APRProverTask, APRProverResult]): - prover: APRProver - execute_depth: int - cut_point_rules: Iterable[str] - terminal_rules: Iterable[str] - - def __init__(self, prover: APRProver) -> None: - self.prover = prover - - def steps(self, proof: APRProof) -> Iterable[APRProverTask]: - steps = [] - for node in proof.pending: - module_name = ( - self.prover.circularities_module_name - if self.prover.nonzero_depth(node) - else self.prover.dependencies_module_name - ) - steps.append( - APRProverTask( - cterm=node.cterm, - node_id=node.id, - module_name=module_name, - execute_depth=self.execute_depth, - cut_point_rules=self.cut_point_rules, - terminal_rules=self.terminal_rules, - ) - ) - return steps +class ProofStep(ABC, Generic[U]): + @abstractmethod + def exec(self) -> U: + ... - @classmethod - def advance(cls, step: APRProverTask) -> APRProverResult: - return APRProverResult() - def commit(self, proof: APRProof, update: APRProverResult) -> None: - ... +# @dataclass +# class APRProverTask: +# cterm: CTerm +# node_id: int +# module_name: str +# execute_depth: int +# cut_point_rules: Iterable[str] +# terminal_rules: Iterable[str] +# +# +# class APRProverResult: +# ... +# +# +# class NewAPRProver(Prover[APRProof, APRProverTask, APRProverResult]): +# prover: APRProver +# execute_depth: int +# cut_point_rules: Iterable[str] +# terminal_rules: Iterable[str] +# +# def __init__(self, prover: APRProver) -> None: +# self.prover = prover +# +# def steps(self, proof: APRProof) -> Iterable[APRProverTask]: +# steps = [] +# for node in proof.pending: +# module_name = ( +# self.prover.circularities_module_name +# if self.prover.nonzero_depth(node) +# else self.prover.dependencies_module_name +# ) +# steps.append( +# APRProverTask( +# cterm=node.cterm, +# node_id=node.id, +# module_name=module_name, +# execute_depth=self.execute_depth, +# cut_point_rules=self.cut_point_rules, +# terminal_rules=self.terminal_rules, +# ) +# ) +# return steps +# +# @classmethod +# def advance(cls, step: APRProverTask) -> APRProverResult: +# return APRProverResult() +# +# def commit(self, proof: APRProof, update: APRProverResult) -> None: +# ... diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index 15e58789a..db401c0f1 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -3,28 +3,16 @@ import sys import time from concurrent.futures import ProcessPoolExecutor, wait -from pathlib import Path +from dataclasses import dataclass from typing import TYPE_CHECKING -import pytest - -from pyk.proof.parallel import APRProof, APRProver, NewAPRProver, Proof, Prover +from pyk.proof.parallel import Proof, ProofStep, Prover from pyk.proof.proof import ProofStatus -from pyk.testing import KCFGExploreTest, KProveTest -from pyk.utils import single - -from ..utils import K_FILES if TYPE_CHECKING: from collections.abc import Iterable from concurrent.futures import Executor, Future - from pytest import TempPathFactory - - from pyk.kcfg.explore import KCFGExplore - from pyk.ktool.kprove import KProve - from pyk.proof.parallel import APRProverResult, APRProverTask - class TreeExploreProof(Proof): init: int @@ -66,48 +54,55 @@ def status(self) -> ProofStatus: return ProofStatus.PENDING -class TreeExploreProver(Prover[TreeExploreProof, int, int]): +@dataclass +class TreeExploreProofStep(ProofStep): + node: int + + def __hash__(self) -> int: + return self.node.__hash__() + + def exec(self) -> int: + print(f'Advancing node {self.node}\n', file=sys.stderr) + time.sleep(5) + print(f'Done advancing node {self.node}\n', file=sys.stderr) + return self.node + + +class TreeExploreProver(Prover[TreeExploreProof, TreeExploreProofStep, int]): def __init__(self) -> None: return - def steps(self, proof: TreeExploreProof) -> Iterable[int]: + def steps(self, proof: TreeExploreProof) -> Iterable[TreeExploreProofStep]: def parents(node_id: int) -> Iterable[int]: return [source for source, targets in proof.edges.items() if node_id in targets] nodes = set(range(10)) return [ - node_id + TreeExploreProofStep(node_id) for node_id in nodes if node_id not in proof.reached and all(parent in proof.reached for parent in parents(node_id)) ] - @classmethod - def advance(cls, step: int) -> int: - print(f'Advancing node {step}\n', file=sys.stderr) - time.sleep(5) - print(f'Done advancing node {step}\n', file=sys.stderr) - return step - def commit(self, proof: TreeExploreProof, update: int) -> None: proof.reached.add(update) def prove_parallel( - proofs: list[APRProof], + proofs: list[TreeExploreProof], # We need a way to map proofs to provers, but for simplicity, I'll assume it as a given - provers: dict[APRProof, NewAPRProver], -) -> Iterable[APRProof]: - pending: dict[Future[APRProverResult], APRProof] = {} - explored: set[APRProverTask] = set() + provers: dict[TreeExploreProof, TreeExploreProver], +) -> Iterable[TreeExploreProof]: + pending: dict[Future[int], TreeExploreProof] = {} + explored: set[TreeExploreProofStep] = set() - def submit(proof: APRProof, pool: Executor) -> None: + def submit(proof: TreeExploreProof, pool: Executor) -> None: prover = provers[proof] for step in prover.steps(proof): # <-- get next steps (represented by e.g. pending nodes, ...) if step in explored: continue explored.add(step) - future = pool.submit(prover.advance, step) # <-- schedule steps for execution + future = pool.submit(step.exec) # <-- schedule steps for execution pending[future] = proof with ProcessPoolExecutor(max_workers=2) as pool: @@ -135,26 +130,34 @@ def submit(proof: APRProof, pool: Executor) -> None: return proofs -@pytest.fixture(scope='function') -def proof_dir(tmp_path_factory: TempPathFactory) -> Path: - return tmp_path_factory.mktemp('proofs') - - -class TestAPRProofParallel(KCFGExploreTest, KProveTest): - KOMPILE_MAIN_FILE = K_FILES / 'imp.k' - - def test_parallel_prove(self, kprove: KProve, kcfg_explore: KCFGExplore) -> None: - spec_file = K_FILES / 'imp-simple-spec.k' - spec_module = 'IMP-SPEC' - claim_id = 'concrete-addition' - - claim = single( - kprove.get_claims(Path(spec_file), spec_module_name=spec_module, claim_labels=[f'{spec_module}.{claim_id}']) - ) - - proof = APRProof.from_claim(defn=kprove.definition, claim=claim, logs={}) - prover = APRProver(proof=proof, kcfg_explore=kcfg_explore) - new_prover = NewAPRProver(prover=prover) - results = prove_parallel([proof], {proof: new_prover}) - assert len(list(results)) == 1 - assert list(results)[0].status == ProofStatus.PASSED +def test_parallel_prove() -> None: + prover = TreeExploreProver() + proof = TreeExploreProof() + results = prove_parallel([proof], {proof: prover}) + assert len(list(results)) == 1 + assert list(results)[0].status == ProofStatus.PASSED + + +# @pytest.fixture(scope='function') +# def proof_dir(tmp_path_factory: TempPathFactory) -> Path: +# return tmp_path_factory.mktemp('proofs') +# +# +# class TestAPRProofParallel(KCFGExploreTest, KProveTest): +# KOMPILE_MAIN_FILE = K_FILES / 'imp.k' +# +# def test_parallel_prove(self, kprove: KProve, kcfg_explore: KCFGExplore) -> None: +# spec_file = K_FILES / 'imp-simple-spec.k' +# spec_module = 'IMP-SPEC' +# claim_id = 'concrete-addition' +# +# claim = single( +# kprove.get_claims(Path(spec_file), spec_module_name=spec_module, claim_labels=[f'{spec_module}.{claim_id}']) +# ) +# +# proof = APRProof.from_claim(defn=kprove.definition, claim=claim, logs={}) +# prover = APRProver(proof=proof, kcfg_explore=kcfg_explore) +# new_prover = NewAPRProver(prover=prover) +# results = prove_parallel([proof], {proof: new_prover}) +# assert len(list(results)) == 1 +# assert list(results)[0].status == ProofStatus.PASSED From 1d3b9af093bdf68361f923112ccda4ec8222508d Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Mon, 6 Nov 2023 19:49:29 -0600 Subject: [PATCH 012/116] Add documentation comments, assertions and tests for TreeExploreProver, and strengthen type signatures --- src/pyk/proof/parallel.py | 102 +++++++-------- .../integration/proof/test_parallel_prove.py | 122 ++++++++++++------ 2 files changed, 135 insertions(+), 89 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index 18d193381..b9d938085 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -1,7 +1,8 @@ from __future__ import annotations from abc import ABC, abstractmethod -from typing import TYPE_CHECKING, Any, Generic, TypeVar +from collections.abc import Hashable +from typing import TYPE_CHECKING, Generic, TypeVar if TYPE_CHECKING: from collections.abc import Iterable @@ -10,78 +11,75 @@ P = TypeVar('P', bound='Proof') S = TypeVar('S', bound='ProofStep') -U = TypeVar('U', bound='Any') +U = TypeVar('U', bound='ProofResult') class Prover(ABC, Generic[P, S, U]): + """ + Should contain all data needed to make progress on a `P` (proof). + May be specific to a single `P` (proof) or may be able to handle multiple. + """ + @abstractmethod def steps(self, proof: P) -> Iterable[S]: + """ + Return a list of `ProofStep[U]` which represents all the computation jobs as defined by `ProofStep`, which have not yet been computed and committed, and are available given the current state of `proof`. Note that this is a requirement which is not enforced by the type system. + If `steps()` or `commit()` has been called on a proof `proof`, `steps()` may never again be called on `proof`. + Must not modify `self` or `proof`. + The output of this function must only change with calls to `self.commit()`. + """ ... @abstractmethod def commit(self, proof: P, update: U) -> None: + """ + Should update `proof` according to `update`. + If `steps()` or `commit()` has been called on a proof `proof`, `commit()` may never again be called on `proof`. + Must only be called with an `update` that was returned by `step.execute()` where `step` was returned by `self.steps(proof)`. + Steps for a proof `proof` can have their results submitted any time after they are made available by `self.steps(proof)`, including in any order and multiple times, and the Prover must be able to handle this. + """ ... class Proof(ABC): + """Should represent a computer proof of a single claim""" + @property @abstractmethod def status(self) -> ProofStatus: + """ + ProofStatus.PASSED: the claim has been proven + ProofStatus.FAILED: the claim has not been proven, but the proof cannot advance further. + ProofStatus.PENDING: the claim has not yet been proven, but the proof can advance further. + Must not change, except with calls to `prover.commit(self, update)` for some `prover,update`. + """ ... -class ProofStep(ABC, Generic[U]): +class ProofStep(ABC, Hashable, Generic[U]): + """ + Should be a description of a computation needed to make progress on a `Proof`. + Must be frozen dataclass. + Must be pickable. + Should be small. + """ + @abstractmethod def exec(self) -> U: + """ + Should perform some nontrivial computation given by `self`, which can be done independently of other calls to `exec()`. + Allowed to be nondeterministic. + Able to be called on any `ProofStep` returned by `prover.steps(proof)`. + """ ... -# @dataclass -# class APRProverTask: -# cterm: CTerm -# node_id: int -# module_name: str -# execute_depth: int -# cut_point_rules: Iterable[str] -# terminal_rules: Iterable[str] -# -# -# class APRProverResult: -# ... -# -# -# class NewAPRProver(Prover[APRProof, APRProverTask, APRProverResult]): -# prover: APRProver -# execute_depth: int -# cut_point_rules: Iterable[str] -# terminal_rules: Iterable[str] -# -# def __init__(self, prover: APRProver) -> None: -# self.prover = prover -# -# def steps(self, proof: APRProof) -> Iterable[APRProverTask]: -# steps = [] -# for node in proof.pending: -# module_name = ( -# self.prover.circularities_module_name -# if self.prover.nonzero_depth(node) -# else self.prover.dependencies_module_name -# ) -# steps.append( -# APRProverTask( -# cterm=node.cterm, -# node_id=node.id, -# module_name=module_name, -# execute_depth=self.execute_depth, -# cut_point_rules=self.cut_point_rules, -# terminal_rules=self.terminal_rules, -# ) -# ) -# return steps -# -# @classmethod -# def advance(cls, step: APRProverTask) -> APRProverResult: -# return APRProverResult() -# -# def commit(self, proof: APRProof, update: APRProverResult) -> None: -# ... +class ProofResult(ABC): + """ + Should be a description of how to make a small update to a `Proof` based on the results of a computation specified by a `ProofStep`. + Must be picklable. + Must be frozen dataclass. + Should be small. + """ + + ... diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index db401c0f1..c50231c45 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -3,10 +3,13 @@ import sys import time from concurrent.futures import ProcessPoolExecutor, wait +from copy import deepcopy from dataclasses import dataclass from typing import TYPE_CHECKING -from pyk.proof.parallel import Proof, ProofStep, Prover +import pytest + +from pyk.proof.parallel import Proof, ProofResult, ProofStep, Prover from pyk.proof.proof import ProofStatus if TYPE_CHECKING: @@ -54,21 +57,28 @@ def status(self) -> ProofStatus: return ProofStatus.PENDING -@dataclass -class TreeExploreProofStep(ProofStep): +@dataclass(frozen=True) +class TreeExploreProofResult(ProofResult): + node: int + + +@dataclass(frozen=True) +class TreeExploreProofStep(ProofStep[TreeExploreProofResult]): node: int def __hash__(self) -> int: return self.node.__hash__() - def exec(self) -> int: - print(f'Advancing node {self.node}\n', file=sys.stderr) + def exec(self) -> TreeExploreProofResult: +# print(f'Advancing node {self.node}\n', file=sys.stderr) time.sleep(5) - print(f'Done advancing node {self.node}\n', file=sys.stderr) - return self.node +# print(f'Done advancing node {self.node}\n', file=sys.stderr) + return TreeExploreProofResult(self.node) + +class TreeExploreProver(Prover[TreeExploreProof, TreeExploreProofStep, TreeExploreProofResult]): + proofs: dict[TreeExploreProof, TreeExploreProver] = {} -class TreeExploreProver(Prover[TreeExploreProof, TreeExploreProofStep, int]): def __init__(self) -> None: return @@ -76,6 +86,14 @@ def steps(self, proof: TreeExploreProof) -> Iterable[TreeExploreProofStep]: def parents(node_id: int) -> Iterable[int]: return [source for source, targets in proof.edges.items() if node_id in targets] + if proof in TreeExploreProver.proofs: + assert TreeExploreProver.proofs[proof] == self + else: + TreeExploreProver.proofs[proof] = self + + if proof.target in proof.reached: + return [] + nodes = set(range(10)) return [ @@ -84,8 +102,12 @@ def parents(node_id: int) -> Iterable[int]: if node_id not in proof.reached and all(parent in proof.reached for parent in parents(node_id)) ] - def commit(self, proof: TreeExploreProof, update: int) -> None: - proof.reached.add(update) + def commit(self, proof: TreeExploreProof, update: TreeExploreProofResult) -> None: + if proof in TreeExploreProver.proofs: + assert TreeExploreProver.proofs[proof] == self + else: + TreeExploreProver.proofs[proof] = self + proof.reached.add(update.node) def prove_parallel( @@ -93,7 +115,7 @@ def prove_parallel( # We need a way to map proofs to provers, but for simplicity, I'll assume it as a given provers: dict[TreeExploreProof, TreeExploreProver], ) -> Iterable[TreeExploreProof]: - pending: dict[Future[int], TreeExploreProof] = {} + pending: dict[Future[TreeExploreProofResult], TreeExploreProof] = {} explored: set[TreeExploreProofStep] = set() def submit(proof: TreeExploreProof, pool: Executor) -> None: @@ -119,10 +141,12 @@ def submit(proof: TreeExploreProof, pool: Executor) -> None: match proof.status: # terminate on first failure, yield partial results, etc. case ProofStatus.FAILED: + assert len(list(prover.steps(proof))) == 0 break case ProofStatus.PENDING: - ... + assert len(list(prover.steps(proof))) > 0 case ProofStatus.PASSED: + assert len(list(prover.steps(proof))) == 0 break submit(proof, pool) @@ -130,34 +154,58 @@ def submit(proof: TreeExploreProof, pool: Executor) -> None: return proofs +def test_multiple_provers_fails() -> None: + prover1 = TreeExploreProver() + prover2 = TreeExploreProver() + proof = TreeExploreProof() + step = list(prover1.steps(proof))[0] + with pytest.raises(AssertionError): + prover2.steps(proof) + with pytest.raises(AssertionError): + prover2.commit(proof, step.exec()) + + +def test_steps_read_only() -> None: + def assert_proof_equals(p1: TreeExploreProof, p2: TreeExploreProof) -> None: + assert p1.edges == p2.edges + assert p1.init == p2.init + assert p1.reached == p2.reached + assert p1.target == p2.target + + prover = TreeExploreProver() + proof = TreeExploreProof() + while True: + initial_proof = deepcopy(proof) + steps = prover.steps(proof) + if len(list(steps)) == 0: + break + final_proof = deepcopy(proof) + assert_proof_equals(initial_proof, final_proof) + for step in steps: + prover.commit(proof, step.exec()) + + +def test_commit_after_finished() -> None: + prover = TreeExploreProver() + proof = TreeExploreProof() + results: list[TreeExploreProofResult] = [] + while True: + steps = prover.steps(proof) + if len(list(steps)) == 0: + break + for step in steps: + result = step.exec() + results.append(result) + prover.commit(proof, result) + prover.commit(proof, result) + for result in results: + prover.commit(proof, result) + + def test_parallel_prove() -> None: prover = TreeExploreProver() proof = TreeExploreProof() results = prove_parallel([proof], {proof: prover}) assert len(list(results)) == 1 + assert len(list(prover.steps(proof))) == 0 assert list(results)[0].status == ProofStatus.PASSED - - -# @pytest.fixture(scope='function') -# def proof_dir(tmp_path_factory: TempPathFactory) -> Path: -# return tmp_path_factory.mktemp('proofs') -# -# -# class TestAPRProofParallel(KCFGExploreTest, KProveTest): -# KOMPILE_MAIN_FILE = K_FILES / 'imp.k' -# -# def test_parallel_prove(self, kprove: KProve, kcfg_explore: KCFGExplore) -> None: -# spec_file = K_FILES / 'imp-simple-spec.k' -# spec_module = 'IMP-SPEC' -# claim_id = 'concrete-addition' -# -# claim = single( -# kprove.get_claims(Path(spec_file), spec_module_name=spec_module, claim_labels=[f'{spec_module}.{claim_id}']) -# ) -# -# proof = APRProof.from_claim(defn=kprove.definition, claim=claim, logs={}) -# prover = APRProver(proof=proof, kcfg_explore=kcfg_explore) -# new_prover = NewAPRProver(prover=prover) -# results = prove_parallel([proof], {proof: new_prover}) -# assert len(list(results)) == 1 -# assert list(results)[0].status == ProofStatus.PASSED From 89527d3e4fe7309ff7a9fd15b9a6710a69866015 Mon Sep 17 00:00:00 2001 From: Noah Watson <107630091+nwatson22@users.noreply.github.com> Date: Mon, 6 Nov 2023 19:50:25 -0600 Subject: [PATCH 013/116] Update src/pyk/proof/parallel.py --- src/pyk/proof/parallel.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index b9d938085..ac73a811f 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -81,5 +81,4 @@ class ProofResult(ABC): Must be frozen dataclass. Should be small. """ - ... From a06448667183790e4415ce83e7bd60e2a7016004 Mon Sep 17 00:00:00 2001 From: Noah Watson <107630091+nwatson22@users.noreply.github.com> Date: Mon, 6 Nov 2023 19:51:02 -0600 Subject: [PATCH 014/116] Update src/tests/integration/proof/test_parallel_prove.py --- src/tests/integration/proof/test_parallel_prove.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index c50231c45..e4bbd23ec 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -70,7 +70,6 @@ def __hash__(self) -> int: return self.node.__hash__() def exec(self) -> TreeExploreProofResult: -# print(f'Advancing node {self.node}\n', file=sys.stderr) time.sleep(5) # print(f'Done advancing node {self.node}\n', file=sys.stderr) return TreeExploreProofResult(self.node) From 513286471fffb61c56d2cbc216960daef4401bae Mon Sep 17 00:00:00 2001 From: Noah Watson <107630091+nwatson22@users.noreply.github.com> Date: Mon, 6 Nov 2023 19:51:35 -0600 Subject: [PATCH 015/116] Update src/tests/integration/proof/test_parallel_prove.py --- src/tests/integration/proof/test_parallel_prove.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index e4bbd23ec..aa41a0446 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -70,8 +70,7 @@ def __hash__(self) -> int: return self.node.__hash__() def exec(self) -> TreeExploreProofResult: - time.sleep(5) -# print(f'Done advancing node {self.node}\n', file=sys.stderr) + time.sleep(1) return TreeExploreProofResult(self.node) From 9517856eca6fd7e09ea6b8905bb1869f07c11e03 Mon Sep 17 00:00:00 2001 From: devops Date: Tue, 7 Nov 2023 01:52:46 +0000 Subject: [PATCH 016/116] Set Version: 0.1.492 --- package/version | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package/version b/package/version index 1b101c737..c3f5022ec 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.491 \ No newline at end of file +0.1.492 diff --git a/pyproject.toml b/pyproject.toml index efbfed4a7..3f6d616cc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.491" +version = "0.1.492" description = "" authors = [ "Runtime Verification, Inc. ", From 08c0e813b0ff89f405effb90ae9874b016d828b5 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Mon, 6 Nov 2023 20:00:59 -0600 Subject: [PATCH 017/116] Remove unused import --- src/pyk/proof/parallel.py | 1 + src/tests/integration/proof/test_parallel_prove.py | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index ac73a811f..b9d938085 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -81,4 +81,5 @@ class ProofResult(ABC): Must be frozen dataclass. Should be small. """ + ... diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index aa41a0446..30e5ad27c 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -1,6 +1,5 @@ from __future__ import annotations -import sys import time from concurrent.futures import ProcessPoolExecutor, wait from copy import deepcopy From 2c5b42fcc5d465af5c59abe5975142c3e787dfa6 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Tue, 7 Nov 2023 09:25:50 -0600 Subject: [PATCH 018/116] Address comments --- src/pyk/proof/parallel.py | 70 ++++++++-- .../integration/proof/test_parallel_prove.py | 127 ++++++------------ 2 files changed, 96 insertions(+), 101 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index b9d938085..28e6bfaef 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -2,26 +2,35 @@ from abc import ABC, abstractmethod from collections.abc import Hashable -from typing import TYPE_CHECKING, Generic, TypeVar +from concurrent.futures import ProcessPoolExecutor, wait +from typing import TYPE_CHECKING, Any, Generic, TypeVar + +from pyk.proof.proof import ProofStatus if TYPE_CHECKING: from collections.abc import Iterable + from concurrent.futures import Executor, Future - from pyk.proof.proof import ProofStatus P = TypeVar('P', bound='Proof') S = TypeVar('S', bound='ProofStep') -U = TypeVar('U', bound='ProofResult') +U = TypeVar('U', bound='Any') -class Prover(ABC, Generic[P, S, U]): +class Prover(ABC, Generic[P, U]): """ Should contain all data needed to make progress on a `P` (proof). May be specific to a single `P` (proof) or may be able to handle multiple. + + Type parameter requirements: + `U` should be a description of how to make a small update to a `Proof` based on the results of a computation specified by a `ProofStep`. + `U` must be picklable. + `U` must be frozen dataclass. + `U` should be small. """ @abstractmethod - def steps(self, proof: P) -> Iterable[S]: + def steps(self, proof: P) -> Iterable[ProofStep[U]]: """ Return a list of `ProofStep[U]` which represents all the computation jobs as defined by `ProofStep`, which have not yet been computed and committed, and are available given the current state of `proof`. Note that this is a requirement which is not enforced by the type system. If `steps()` or `commit()` has been called on a proof `proof`, `steps()` may never again be called on `proof`. @@ -74,12 +83,45 @@ def exec(self) -> U: ... -class ProofResult(ABC): - """ - Should be a description of how to make a small update to a `Proof` based on the results of a computation specified by a `ProofStep`. - Must be picklable. - Must be frozen dataclass. - Should be small. - """ - - ... +def prove_parallel( + proofs: list[Proof], + # We need a way to map proofs to provers, but for simplicity, I'll assume it as a given + provers: dict[Proof, Prover], +) -> Iterable[Proof]: + pending: dict[Future[Any], Proof] = {} + explored: set[ProofStep] = set() + + def submit(proof: Proof, pool: Executor) -> None: + prover = provers[proof] + for step in prover.steps(proof): # <-- get next steps (represented by e.g. pending nodes, ...) + if step in explored: + continue + explored.add(step) + future = pool.submit(step.exec) # <-- schedule steps for execution + pending[future] = proof + + with ProcessPoolExecutor(max_workers=2) as pool: + for proof in proofs: + submit(proof, pool) + + while pending: + future = list(wait(pending).done)[0] + proof = pending[future] + prover = provers[proof] + update = future.result() + prover.commit(proof, update) # <-- update the proof (can be in-memory, access disk with locking, ...) + + match proof.status: + # terminate on first failure, yield partial results, etc. + case ProofStatus.FAILED: + assert len(list(prover.steps(proof))) == 0 + break + case ProofStatus.PENDING: + assert len(list(prover.steps(proof))) > 0 + case ProofStatus.PASSED: + assert len(list(prover.steps(proof))) == 0 + break + + submit(proof, pool) + pending.pop(future) + return proofs diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index 30e5ad27c..7960d2531 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -1,19 +1,17 @@ from __future__ import annotations import time -from concurrent.futures import ProcessPoolExecutor, wait from copy import deepcopy from dataclasses import dataclass from typing import TYPE_CHECKING import pytest -from pyk.proof.parallel import Proof, ProofResult, ProofStep, Prover +from pyk.proof.parallel import Proof, ProofStep, Prover, prove_parallel from pyk.proof.proof import ProofStatus if TYPE_CHECKING: from collections.abc import Iterable - from concurrent.futures import Executor, Future class TreeExploreProof(Proof): @@ -22,31 +20,11 @@ class TreeExploreProof(Proof): edges: dict[int, set[int]] reached: set[int] - def __init__(self) -> None: - self.init = 0 + def __init__(self, init: int, target: int, edges: dict[int, set[int]]) -> None: + self.init = init self.reached = set() - self.target = 9 - self.edges = {} - - # 0 - # / \ - # 1 2 - # / \ - # 3 4 - # / \ \ - # 5 6 7 - # / \ - # 8 9 - self.edges[0] = {1, 2} - self.edges[1] = set() - self.edges[2] = {3, 4} - self.edges[3] = {5, 6} - self.edges[4] = {7} - self.edges[5] = set() - self.edges[6] = set() - self.edges[7] = {8, 9} - self.edges[8] = set() - self.edges[9] = set() + self.target = target + self.edges = edges @property def status(self) -> ProofStatus: @@ -57,23 +35,18 @@ def status(self) -> ProofStatus: @dataclass(frozen=True) -class TreeExploreProofResult(ProofResult): - node: int - - -@dataclass(frozen=True) -class TreeExploreProofStep(ProofStep[TreeExploreProofResult]): +class TreeExploreProofStep(ProofStep[int]): node: int def __hash__(self) -> int: return self.node.__hash__() - def exec(self) -> TreeExploreProofResult: + def exec(self) -> int: time.sleep(1) - return TreeExploreProofResult(self.node) + return self.node -class TreeExploreProver(Prover[TreeExploreProof, TreeExploreProofStep, TreeExploreProofResult]): +class TreeExploreProver(Prover[TreeExploreProof, int]): proofs: dict[TreeExploreProof, TreeExploreProver] = {} def __init__(self) -> None: @@ -99,62 +72,42 @@ def parents(node_id: int) -> Iterable[int]: if node_id not in proof.reached and all(parent in proof.reached for parent in parents(node_id)) ] - def commit(self, proof: TreeExploreProof, update: TreeExploreProofResult) -> None: + def commit(self, proof: TreeExploreProof, update: int) -> None: if proof in TreeExploreProver.proofs: assert TreeExploreProver.proofs[proof] == self else: TreeExploreProver.proofs[proof] = self - proof.reached.add(update.node) - - -def prove_parallel( - proofs: list[TreeExploreProof], - # We need a way to map proofs to provers, but for simplicity, I'll assume it as a given - provers: dict[TreeExploreProof, TreeExploreProver], -) -> Iterable[TreeExploreProof]: - pending: dict[Future[TreeExploreProofResult], TreeExploreProof] = {} - explored: set[TreeExploreProofStep] = set() - - def submit(proof: TreeExploreProof, pool: Executor) -> None: - prover = provers[proof] - for step in prover.steps(proof): # <-- get next steps (represented by e.g. pending nodes, ...) - if step in explored: - continue - explored.add(step) - future = pool.submit(step.exec) # <-- schedule steps for execution - pending[future] = proof - - with ProcessPoolExecutor(max_workers=2) as pool: - for proof in proofs: - submit(proof, pool) - - while pending: - future = list(wait(pending).done)[0] - proof = pending[future] - prover = provers[proof] - update = future.result() - prover.commit(proof, update) # <-- update the proof (can be in-memory, access disk with locking, ...) - - match proof.status: - # terminate on first failure, yield partial results, etc. - case ProofStatus.FAILED: - assert len(list(prover.steps(proof))) == 0 - break - case ProofStatus.PENDING: - assert len(list(prover.steps(proof))) > 0 - case ProofStatus.PASSED: - assert len(list(prover.steps(proof))) == 0 - break - - submit(proof, pool) - pending.pop(future) - return proofs + proof.reached.add(update) + + +def simple_tree() -> dict[int, set[int]]: + edges: dict[int, set[int]] = {} + # 0 + # / \ + # 1 2 + # / \ + # 3 4 + # / \ \ + # 5 6 7 + # / \ + # 8 9 + edges[0] = {1, 2} + edges[1] = set() + edges[2] = {3, 4} + edges[3] = {5, 6} + edges[4] = {7} + edges[5] = set() + edges[6] = set() + edges[7] = {8, 9} + edges[8] = set() + edges[9] = set() + return edges def test_multiple_provers_fails() -> None: prover1 = TreeExploreProver() prover2 = TreeExploreProver() - proof = TreeExploreProof() + proof = TreeExploreProof(0, 9, simple_tree()) step = list(prover1.steps(proof))[0] with pytest.raises(AssertionError): prover2.steps(proof) @@ -170,7 +123,7 @@ def assert_proof_equals(p1: TreeExploreProof, p2: TreeExploreProof) -> None: assert p1.target == p2.target prover = TreeExploreProver() - proof = TreeExploreProof() + proof = TreeExploreProof(0, 9, simple_tree()) while True: initial_proof = deepcopy(proof) steps = prover.steps(proof) @@ -184,8 +137,8 @@ def assert_proof_equals(p1: TreeExploreProof, p2: TreeExploreProof) -> None: def test_commit_after_finished() -> None: prover = TreeExploreProver() - proof = TreeExploreProof() - results: list[TreeExploreProofResult] = [] + proof = TreeExploreProof(0, 9, simple_tree()) + results: list[int] = [] while True: steps = prover.steps(proof) if len(list(steps)) == 0: @@ -201,7 +154,7 @@ def test_commit_after_finished() -> None: def test_parallel_prove() -> None: prover = TreeExploreProver() - proof = TreeExploreProof() + proof = TreeExploreProof(0, 9, simple_tree()) results = prove_parallel([proof], {proof: prover}) assert len(list(results)) == 1 assert len(list(prover.steps(proof))) == 0 From ff9d0c86a25feab5ae6636af0b13757c0e0fc11e Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Tue, 7 Nov 2023 09:31:01 -0600 Subject: [PATCH 019/116] Remove unused typevar --- src/pyk/proof/parallel.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index 28e6bfaef..ce59eca1a 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -13,7 +13,6 @@ P = TypeVar('P', bound='Proof') -S = TypeVar('S', bound='ProofStep') U = TypeVar('U', bound='Any') From c8072a49d0dde5f5c10ffe5e941f090feb917e1c Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Tue, 7 Nov 2023 12:42:55 -0600 Subject: [PATCH 020/116] Address comments --- src/pyk/proof/parallel.py | 31 +++++------ .../integration/proof/test_parallel_prove.py | 53 +------------------ 2 files changed, 18 insertions(+), 66 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index ce59eca1a..153e4fed1 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -13,7 +13,7 @@ P = TypeVar('P', bound='Proof') -U = TypeVar('U', bound='Any') +U = TypeVar('U') class Prover(ABC, Generic[P, U]): @@ -83,30 +83,31 @@ def exec(self) -> U: def prove_parallel( - proofs: list[Proof], - # We need a way to map proofs to provers, but for simplicity, I'll assume it as a given - provers: dict[Proof, Prover], + proofs: dict[str, Proof], + provers: dict[str, Prover], ) -> Iterable[Proof]: - pending: dict[Future[Any], Proof] = {} + pending: dict[Future[Any], str] = {} explored: set[ProofStep] = set() - def submit(proof: Proof, pool: Executor) -> None: - prover = provers[proof] + def submit(proof_id: str, pool: Executor) -> None: + proof = proofs[proof_id] + prover = provers[proof_id] for step in prover.steps(proof): # <-- get next steps (represented by e.g. pending nodes, ...) if step in explored: continue explored.add(step) future = pool.submit(step.exec) # <-- schedule steps for execution - pending[future] = proof + pending[future] = proof_id with ProcessPoolExecutor(max_workers=2) as pool: - for proof in proofs: - submit(proof, pool) + for proof_id in proofs.keys(): + submit(proof_id, pool) while pending: - future = list(wait(pending).done)[0] - proof = pending[future] - prover = provers[proof] + future = wait(pending).done.pop() + proof_id = pending[future] + proof = proofs[proof_id] + prover = provers[proof_id] update = future.result() prover.commit(proof, update) # <-- update the proof (can be in-memory, access disk with locking, ...) @@ -121,6 +122,6 @@ def submit(proof: Proof, pool: Executor) -> None: assert len(list(prover.steps(proof))) == 0 break - submit(proof, pool) + submit(proof_id, pool) pending.pop(future) - return proofs + return proofs.values() diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index 7960d2531..e92e35255 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -1,7 +1,6 @@ from __future__ import annotations import time -from copy import deepcopy from dataclasses import dataclass from typing import TYPE_CHECKING @@ -81,7 +80,6 @@ def commit(self, proof: TreeExploreProof, update: int) -> None: def simple_tree() -> dict[int, set[int]]: - edges: dict[int, set[int]] = {} # 0 # / \ # 1 2 @@ -91,17 +89,7 @@ def simple_tree() -> dict[int, set[int]]: # 5 6 7 # / \ # 8 9 - edges[0] = {1, 2} - edges[1] = set() - edges[2] = {3, 4} - edges[3] = {5, 6} - edges[4] = {7} - edges[5] = set() - edges[6] = set() - edges[7] = {8, 9} - edges[8] = set() - edges[9] = set() - return edges + return {0: {1, 2}, 1: set(), 2: {3, 4}, 3: {5, 6}, 4: {7}, 5: set(), 6: set(), 7: {8, 9}, 8: set(), 9: set()} def test_multiple_provers_fails() -> None: @@ -115,47 +103,10 @@ def test_multiple_provers_fails() -> None: prover2.commit(proof, step.exec()) -def test_steps_read_only() -> None: - def assert_proof_equals(p1: TreeExploreProof, p2: TreeExploreProof) -> None: - assert p1.edges == p2.edges - assert p1.init == p2.init - assert p1.reached == p2.reached - assert p1.target == p2.target - - prover = TreeExploreProver() - proof = TreeExploreProof(0, 9, simple_tree()) - while True: - initial_proof = deepcopy(proof) - steps = prover.steps(proof) - if len(list(steps)) == 0: - break - final_proof = deepcopy(proof) - assert_proof_equals(initial_proof, final_proof) - for step in steps: - prover.commit(proof, step.exec()) - - -def test_commit_after_finished() -> None: - prover = TreeExploreProver() - proof = TreeExploreProof(0, 9, simple_tree()) - results: list[int] = [] - while True: - steps = prover.steps(proof) - if len(list(steps)) == 0: - break - for step in steps: - result = step.exec() - results.append(result) - prover.commit(proof, result) - prover.commit(proof, result) - for result in results: - prover.commit(proof, result) - - def test_parallel_prove() -> None: prover = TreeExploreProver() proof = TreeExploreProof(0, 9, simple_tree()) - results = prove_parallel([proof], {proof: prover}) + results = prove_parallel({'proof1': proof}, {'proof1': prover}) assert len(list(results)) == 1 assert len(list(prover.steps(proof))) == 0 assert list(results)[0].status == ProofStatus.PASSED From fdb4233677215d7e4256d1c193fc30e9f21d767c Mon Sep 17 00:00:00 2001 From: devops Date: Tue, 7 Nov 2023 18:43:14 +0000 Subject: [PATCH 021/116] Set Version: 0.1.493 --- package/version | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package/version b/package/version index c3f5022ec..a8662cc26 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.492 +0.1.493 diff --git a/pyproject.toml b/pyproject.toml index 3f6d616cc..95f62803b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.492" +version = "0.1.493" description = "" authors = [ "Runtime Verification, Inc. ", From 7a63164b137433ad9438c9e4b13cdd4bb00fe0e3 Mon Sep 17 00:00:00 2001 From: devops Date: Wed, 8 Nov 2023 16:32:59 +0000 Subject: [PATCH 022/116] Set Version: 0.1.494 --- package/version | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package/version b/package/version index a8662cc26..b1afbaa3e 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.493 +0.1.494 diff --git a/pyproject.toml b/pyproject.toml index 95f62803b..c0dc91ce1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.493" +version = "0.1.494" description = "" authors = [ "Runtime Verification, Inc. ", From 296e6f8890ee243224c83caa7c3abd3938966749 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Thu, 9 Nov 2023 12:54:14 -0600 Subject: [PATCH 023/116] Address comments --- src/pyk/proof/parallel.py | 7 ++-- .../integration/proof/test_parallel_prove.py | 32 +++---------------- 2 files changed, 8 insertions(+), 31 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index 153e4fed1..c154037c3 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -1,7 +1,6 @@ from __future__ import annotations from abc import ABC, abstractmethod -from collections.abc import Hashable from concurrent.futures import ProcessPoolExecutor, wait from typing import TYPE_CHECKING, Any, Generic, TypeVar @@ -64,9 +63,10 @@ def status(self) -> ProofStatus: ... -class ProofStep(ABC, Hashable, Generic[U]): +class ProofStep(ABC, Generic[U]): """ Should be a description of a computation needed to make progress on a `Proof`. + Must be hashable. Must be frozen dataclass. Must be pickable. Should be small. @@ -104,7 +104,8 @@ def submit(proof_id: str, pool: Executor) -> None: submit(proof_id, pool) while pending: - future = wait(pending).done.pop() + done, _ = wait(pending, return_when='FIRST_COMPLETED') + future = done.pop() proof_id = pending[future] proof = proofs[proof_id] prover = provers[proof_id] diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index e92e35255..4587c3955 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -1,11 +1,10 @@ from __future__ import annotations +import sys import time from dataclasses import dataclass from typing import TYPE_CHECKING -import pytest - from pyk.proof.parallel import Proof, ProofStep, Prover, prove_parallel from pyk.proof.proof import ProofStatus @@ -37,17 +36,14 @@ def status(self) -> ProofStatus: class TreeExploreProofStep(ProofStep[int]): node: int - def __hash__(self) -> int: - return self.node.__hash__() - def exec(self) -> int: - time.sleep(1) + print(f'advancing node {self.node}', file=sys.stderr) + time.sleep(self.node % 5) + print(f'done node {self.node}', file=sys.stderr) return self.node class TreeExploreProver(Prover[TreeExploreProof, int]): - proofs: dict[TreeExploreProof, TreeExploreProver] = {} - def __init__(self) -> None: return @@ -55,11 +51,6 @@ def steps(self, proof: TreeExploreProof) -> Iterable[TreeExploreProofStep]: def parents(node_id: int) -> Iterable[int]: return [source for source, targets in proof.edges.items() if node_id in targets] - if proof in TreeExploreProver.proofs: - assert TreeExploreProver.proofs[proof] == self - else: - TreeExploreProver.proofs[proof] = self - if proof.target in proof.reached: return [] @@ -72,10 +63,6 @@ def parents(node_id: int) -> Iterable[int]: ] def commit(self, proof: TreeExploreProof, update: int) -> None: - if proof in TreeExploreProver.proofs: - assert TreeExploreProver.proofs[proof] == self - else: - TreeExploreProver.proofs[proof] = self proof.reached.add(update) @@ -92,17 +79,6 @@ def simple_tree() -> dict[int, set[int]]: return {0: {1, 2}, 1: set(), 2: {3, 4}, 3: {5, 6}, 4: {7}, 5: set(), 6: set(), 7: {8, 9}, 8: set(), 9: set()} -def test_multiple_provers_fails() -> None: - prover1 = TreeExploreProver() - prover2 = TreeExploreProver() - proof = TreeExploreProof(0, 9, simple_tree()) - step = list(prover1.steps(proof))[0] - with pytest.raises(AssertionError): - prover2.steps(proof) - with pytest.raises(AssertionError): - prover2.commit(proof, step.exec()) - - def test_parallel_prove() -> None: prover = TreeExploreProver() proof = TreeExploreProof(0, 9, simple_tree()) From 7945c7e505a82662c03ee8bf2133dffd1d285f43 Mon Sep 17 00:00:00 2001 From: devops Date: Thu, 9 Nov 2023 18:54:37 +0000 Subject: [PATCH 024/116] Set Version: 0.1.495 --- package/version | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package/version b/package/version index b1afbaa3e..576d33993 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.494 +0.1.495 diff --git a/pyproject.toml b/pyproject.toml index c0dc91ce1..f628bfac2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.494" +version = "0.1.495" description = "" authors = [ "Runtime Verification, Inc. ", From a95458b3ec152992959c3ccc96769c0d2d5d669d Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Thu, 9 Nov 2023 13:43:15 -0600 Subject: [PATCH 025/116] Remove log messages --- src/tests/integration/proof/test_parallel_prove.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index 4587c3955..fd7e20f4a 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -37,9 +37,7 @@ class TreeExploreProofStep(ProofStep[int]): node: int def exec(self) -> int: - print(f'advancing node {self.node}', file=sys.stderr) - time.sleep(self.node % 5) - print(f'done node {self.node}', file=sys.stderr) + time.sleep(1) return self.node From 7c8c5388302660cbc536d398603124a7d4bb8f1d Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Thu, 9 Nov 2023 13:44:06 -0600 Subject: [PATCH 026/116] Fix formatting --- src/tests/integration/proof/test_parallel_prove.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index fd7e20f4a..7f75a339c 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -1,6 +1,5 @@ from __future__ import annotations -import sys import time from dataclasses import dataclass from typing import TYPE_CHECKING From 17ea4173f7c991e6b3482e84edb6b3c6defb205e Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Thu, 9 Nov 2023 13:57:50 -0600 Subject: [PATCH 027/116] Add APRProof/Prover implementations of Prover interface --- src/pyk/proof/parallel.py | 5 ++- src/pyk/proof/reachability.py | 76 ++++++++++++++++++++++++++++++++++- 2 files changed, 78 insertions(+), 3 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index 153e4fed1..735fd88b6 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -3,7 +3,7 @@ from abc import ABC, abstractmethod from collections.abc import Hashable from concurrent.futures import ProcessPoolExecutor, wait -from typing import TYPE_CHECKING, Any, Generic, TypeVar +from typing import TYPE_CHECKING, Any, Callable, Generic, TypeVar from pyk.proof.proof import ProofStatus @@ -85,6 +85,7 @@ def exec(self) -> U: def prove_parallel( proofs: dict[str, Proof], provers: dict[str, Prover], + init: Callable[..., None] = lambda *args: None, ) -> Iterable[Proof]: pending: dict[Future[Any], str] = {} explored: set[ProofStep] = set() @@ -99,7 +100,7 @@ def submit(proof_id: str, pool: Executor) -> None: future = pool.submit(step.exec) # <-- schedule steps for execution pending[future] = proof_id - with ProcessPoolExecutor(max_workers=2) as pool: + with ProcessPoolExecutor(max_workers=2, initializer=init) as pool: for proof_id in proofs.keys(): submit(proof_id, pool) diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index f1a7e1b33..15c066a68 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -6,6 +6,7 @@ from dataclasses import dataclass from typing import TYPE_CHECKING +import pyk.proof.parallel as parallel from pyk.kore.rpc import LogEntry from ..kast.inner import KInner, KRewrite, KSort, Subst @@ -27,6 +28,7 @@ from ..cterm import CTerm from ..kast.outer import KDefinition, KFlatModuleList from ..kcfg import KCFGExplore + from ..kcfg.explore import ExtendResult from ..kcfg.kcfg import NodeIdLike from ..ktool.kprint import KPrint @@ -35,7 +37,7 @@ _LOGGER: Final = logging.getLogger(__name__) -class APRProof(Proof, KCFGExploration): +class APRProof(Proof, KCFGExploration, parallel.Proof): """APRProof and APRProver implement all-path reachability logic, as introduced by A. Stefanescu and others in their paper 'All-Path Reachability Logic': https://doi.org/10.23638/LMCS-15(2:5)2019 @@ -1053,3 +1055,75 @@ def lines(self) -> list[str]: f' bounded: {self.bounded}', f'Subproofs: {self.subproofs}', ] + + +@dataclass(frozen=True) +class APRProofResult: + extend_result: ExtendResult + node_id: int + + +class ParallelAPRProver(parallel.Prover[APRProof, APRProofResult]): + prover: APRProver + + def steps(self, proof: APRProof) -> Iterable[APRProofStep]: + """ + Return a list of `ProofStep[U]` which represents all the computation jobs as defined by `ProofStep`, which have not yet been computed and committed, and are available given the current state of `proof`. Note that this is a requirement which is not enforced by the type system. + If `steps()` or `commit()` has been called on a proof `proof`, `steps()` may never again be called on `proof`. + Must not modify `self` or `proof`. + The output of this function must only change with calls to `self.commit()`. + """ + steps: list[APRProofStep] = [] + for pending_node in proof.pending: + module_name = ( + self.prover.circularities_module_name + if self.prover.nonzero_depth(pending_node) + else self.prover.dependencies_module_name + ) + steps.append(APRProofStep(cterm=pending_node.cterm, node_id=pending_node.id, module_name=module_name)) + return steps + + def commit(self, proof: APRProof, update: APRProofResult) -> None: + """ + Should update `proof` according to `update`. + If `steps()` or `commit()` has been called on a proof `proof`, `commit()` may never again be called on `proof`. + Must only be called with an `update` that was returned by `step.execute()` where `step` was returned by `self.steps(proof)`. + Steps for a proof `proof` can have their results submitted any time after they are made available by `self.steps(proof)`, including in any order and multiple times, and the Prover must be able to handle this. + """ + ... + + +@dataclass(frozen=True) +class APRProofExtendData: + kcfg_explore: KCFGExplore + cut_point_rules: Iterable[str] + terminal_rules: Iterable[str] + execute_depth: int + + +data: APRProofExtendData + + +@dataclass(frozen=True, eq=True) +class APRProofStep(parallel.ProofStep[APRProofResult]): + cterm: CTerm + node_id: int + module_name: str + + def __hash__(self) -> int: + return hash((self.cterm, self.node_id)) + + def exec(self) -> APRProofResult: + """ + Should perform some nontrivial computation given by `self`, which can be done independently of other calls to `exec()`. + Allowed to be nondeterministic. + Able to be called on any `ProofStep` returned by `prover.steps(proof)`. + """ + result = data.kcfg_explore.extend_cterm( + self.cterm, + module_name=self.module_name, + execute_depth=data.execute_depth, + terminal_rules=data.terminal_rules, + cut_point_rules=data.cut_point_rules, + ) + return APRProofResult(result, self.node_id) From 4445d29e55d7383d73b84a2c9f7ae1641980ac92 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Thu, 9 Nov 2023 16:49:37 -0600 Subject: [PATCH 028/116] Address comments --- src/pyk/proof/parallel.py | 2 +- .../integration/proof/test_parallel_prove.py | 42 +++++++++++-------- 2 files changed, 25 insertions(+), 19 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index c154037c3..001f2e081 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -100,7 +100,7 @@ def submit(proof_id: str, pool: Executor) -> None: pending[future] = proof_id with ProcessPoolExecutor(max_workers=2) as pool: - for proof_id in proofs.keys(): + for proof_id in proofs: submit(proof_id, pool) while pending: diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index 7f75a339c..d4fc09ec0 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -2,14 +2,10 @@ import time from dataclasses import dataclass -from typing import TYPE_CHECKING from pyk.proof.parallel import Proof, ProofStep, Prover, prove_parallel from pyk.proof.proof import ProofStatus -if TYPE_CHECKING: - from collections.abc import Iterable - class TreeExploreProof(Proof): init: int @@ -44,8 +40,8 @@ class TreeExploreProver(Prover[TreeExploreProof, int]): def __init__(self) -> None: return - def steps(self, proof: TreeExploreProof) -> Iterable[TreeExploreProofStep]: - def parents(node_id: int) -> Iterable[int]: + def steps(self, proof: TreeExploreProof) -> list[TreeExploreProofStep]: + def parents(node_id: int) -> list[int]: return [source for source, targets in proof.edges.items() if node_id in targets] if proof.target in proof.reached: @@ -63,22 +59,32 @@ def commit(self, proof: TreeExploreProof, update: int) -> None: proof.reached.add(update) -def simple_tree() -> dict[int, set[int]]: - # 0 - # / \ - # 1 2 - # / \ - # 3 4 - # / \ \ - # 5 6 7 - # / \ - # 8 9 - return {0: {1, 2}, 1: set(), 2: {3, 4}, 3: {5, 6}, 4: {7}, 5: set(), 6: set(), 7: {8, 9}, 8: set(), 9: set()} +# 0 +# / \ +# 1 2 +# / \ +# 3 4 +# / \ \ +# 5 6 7 +# / \ +# 8 9 +SIMPLE_TREE: dict[int, set[int]] = { + 0: {1, 2}, + 1: set(), + 2: {3, 4}, + 3: {5, 6}, + 4: {7}, + 5: set(), + 6: set(), + 7: {8, 9}, + 8: set(), + 9: set(), +} def test_parallel_prove() -> None: prover = TreeExploreProver() - proof = TreeExploreProof(0, 9, simple_tree()) + proof = TreeExploreProof(0, 9, SIMPLE_TREE) results = prove_parallel({'proof1': proof}, {'proof1': prover}) assert len(list(results)) == 1 assert len(list(prover.steps(proof))) == 0 From 0cda3c414e5ddeaaef11826fb865136c10d32785 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Thu, 9 Nov 2023 18:03:55 -0600 Subject: [PATCH 029/116] Fix working with multiple proofs at once --- src/pyk/proof/parallel.py | 11 +++++------ .../integration/proof/test_parallel_prove.py | 18 +++++++++++++++++- 2 files changed, 22 insertions(+), 7 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index 001f2e081..d60b7ec65 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -85,21 +85,22 @@ def exec(self) -> U: def prove_parallel( proofs: dict[str, Proof], provers: dict[str, Prover], + max_workers: int, ) -> Iterable[Proof]: pending: dict[Future[Any], str] = {} - explored: set[ProofStep] = set() + explored: set[tuple[str, ProofStep]] = set() def submit(proof_id: str, pool: Executor) -> None: proof = proofs[proof_id] prover = provers[proof_id] for step in prover.steps(proof): # <-- get next steps (represented by e.g. pending nodes, ...) - if step in explored: + if (proof_id, step) in explored: continue - explored.add(step) + explored.add((proof_id, step)) future = pool.submit(step.exec) # <-- schedule steps for execution pending[future] = proof_id - with ProcessPoolExecutor(max_workers=2) as pool: + with ProcessPoolExecutor(max_workers=max_workers) as pool: for proof_id in proofs: submit(proof_id, pool) @@ -116,12 +117,10 @@ def submit(proof_id: str, pool: Executor) -> None: # terminate on first failure, yield partial results, etc. case ProofStatus.FAILED: assert len(list(prover.steps(proof))) == 0 - break case ProofStatus.PENDING: assert len(list(prover.steps(proof))) > 0 case ProofStatus.PASSED: assert len(list(prover.steps(proof))) == 0 - break submit(proof_id, pool) pending.pop(future) diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index d4fc09ec0..85220f6ce 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -85,7 +85,23 @@ def commit(self, proof: TreeExploreProof, update: int) -> None: def test_parallel_prove() -> None: prover = TreeExploreProver() proof = TreeExploreProof(0, 9, SIMPLE_TREE) - results = prove_parallel({'proof1': proof}, {'proof1': prover}) + results = prove_parallel({'proof1': proof}, {'proof1': prover}, max_workers=2) assert len(list(results)) == 1 assert len(list(prover.steps(proof))) == 0 assert list(results)[0].status == ProofStatus.PASSED + + +def test_multiple_proofs() -> None: + prover = TreeExploreProver() + proofs = {f'proof{i}': TreeExploreProof(0, 9, SIMPLE_TREE) for i in range(3)} + provers_map = {f'proof{i}': prover for i in range(3)} + results = prove_parallel( + proofs, + provers_map, + max_workers=4, + ) + assert len(list(results)) == 3 + for proof in proofs.values(): + assert len(list(prover.steps(proof))) == 0 + for result in results: + assert result.status == ProofStatus.PASSED From a3c210fc4c9e06bc11e63cd3b8fa21e07f0a47b3 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Thu, 9 Nov 2023 18:05:52 -0600 Subject: [PATCH 030/116] Add missing requirements for status --- src/pyk/proof/parallel.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index d60b7ec65..2c7401173 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -59,6 +59,8 @@ def status(self) -> ProofStatus: ProofStatus.FAILED: the claim has not been proven, but the proof cannot advance further. ProofStatus.PENDING: the claim has not yet been proven, but the proof can advance further. Must not change, except with calls to `prover.commit(self, update)` for some `prover,update`. + If proof.status() is ProofStatus.PENDING, prover.steps(proof) must be nonempty. + Once proof.status() is ProofStatus.PASSED or ProofStatus.FAILED, it must remain so. """ ... From 366e89e814a265d5518acb71770bbe9827e905eb Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Thu, 9 Nov 2023 18:20:45 -0600 Subject: [PATCH 031/116] Improve typing on prove_parallel --- src/pyk/proof/parallel.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index 2c7401173..1ed86d846 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -7,7 +7,7 @@ from pyk.proof.proof import ProofStatus if TYPE_CHECKING: - from collections.abc import Iterable + from collections.abc import Iterable, Mapping from concurrent.futures import Executor, Future @@ -85,8 +85,8 @@ def exec(self) -> U: def prove_parallel( - proofs: dict[str, Proof], - provers: dict[str, Prover], + proofs: Mapping[str, Proof], + provers: Mapping[str, Prover], max_workers: int, ) -> Iterable[Proof]: pending: dict[Future[Any], str] = {} From 199e46ff41a6c296f08e9a3c92ea102fb33ff259 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Thu, 9 Nov 2023 20:02:39 -0600 Subject: [PATCH 032/116] Add fail_fast and max_iterations, and tests for these --- src/pyk/proof/parallel.py | 16 ++++++- .../integration/proof/test_parallel_prove.py | 43 ++++++++++++++++--- 2 files changed, 53 insertions(+), 6 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index 1ed86d846..459b281bc 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -60,6 +60,7 @@ def status(self) -> ProofStatus: ProofStatus.PENDING: the claim has not yet been proven, but the proof can advance further. Must not change, except with calls to `prover.commit(self, update)` for some `prover,update`. If proof.status() is ProofStatus.PENDING, prover.steps(proof) must be nonempty. + If proof.status() is ProofStatus.PASSED, prover.steps(proof) must be empty. Once proof.status() is ProofStatus.PASSED or ProofStatus.FAILED, it must remain so. """ ... @@ -88,9 +89,12 @@ def prove_parallel( proofs: Mapping[str, Proof], provers: Mapping[str, Prover], max_workers: int, + fail_fast: bool = False, + max_iterations: int | None = None, ) -> Iterable[Proof]: pending: dict[Future[Any], str] = {} explored: set[tuple[str, ProofStep]] = set() + iterations: dict[str, int] = {} def submit(proof_id: str, pool: Executor) -> None: proof = proofs[proof_id] @@ -105,6 +109,7 @@ def submit(proof_id: str, pool: Executor) -> None: with ProcessPoolExecutor(max_workers=max_workers) as pool: for proof_id in proofs: submit(proof_id, pool) + iterations[proof_id] = 0 while pending: done, _ = wait(pending, return_when='FIRST_COMPLETED') @@ -113,12 +118,21 @@ def submit(proof_id: str, pool: Executor) -> None: proof = proofs[proof_id] prover = provers[proof_id] update = future.result() + + if max_iterations is not None and iterations[proof_id] >= max_iterations: + pending.pop(future) + continue + prover.commit(proof, update) # <-- update the proof (can be in-memory, access disk with locking, ...) + iterations[proof_id] += 1 + match proof.status: # terminate on first failure, yield partial results, etc. case ProofStatus.FAILED: - assert len(list(prover.steps(proof))) == 0 + if fail_fast: + pending.pop(future) + continue case ProofStatus.PENDING: assert len(list(prover.steps(proof))) > 0 case ProofStatus.PASSED: diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index 85220f6ce..94c03cb03 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -3,6 +3,8 @@ import time from dataclasses import dataclass +import pytest + from pyk.proof.parallel import Proof, ProofStep, Prover, prove_parallel from pyk.proof.proof import ProofStatus @@ -12,16 +14,20 @@ class TreeExploreProof(Proof): target: int edges: dict[int, set[int]] reached: set[int] + failure_nodes: set[int] - def __init__(self, init: int, target: int, edges: dict[int, set[int]]) -> None: + def __init__(self, init: int, target: int, edges: dict[int, set[int]], failure_nodes: set[int]) -> None: self.init = init self.reached = set() self.target = target self.edges = edges + self.failure_nodes = failure_nodes @property def status(self) -> ProofStatus: - if self.target in self.reached: + if len(self.reached.intersection(self.failure_nodes)) > 0: + return ProofStatus.FAILED + elif self.target in self.reached: return ProofStatus.PASSED else: return ProofStatus.PENDING @@ -84,16 +90,43 @@ def commit(self, proof: TreeExploreProof, update: int) -> None: def test_parallel_prove() -> None: prover = TreeExploreProver() - proof = TreeExploreProof(0, 9, SIMPLE_TREE) + proof = TreeExploreProof(0, 9, SIMPLE_TREE, set()) results = prove_parallel({'proof1': proof}, {'proof1': prover}, max_workers=2) assert len(list(results)) == 1 assert len(list(prover.steps(proof))) == 0 assert list(results)[0].status == ProofStatus.PASSED -def test_multiple_proofs() -> None: +def test_parallel_fail() -> None: + prover = TreeExploreProver() + proof = TreeExploreProof(0, 9, SIMPLE_TREE, {6}) + results = prove_parallel({'proof1': proof}, {'proof1': prover}, max_workers=2) + assert len(list(results)) == 1 + assert len(list(prover.steps(proof))) == 0 + assert list(results)[0].status == ProofStatus.FAILED + + +def test_parallel_fail_fast() -> None: + prover = TreeExploreProver() + proof = TreeExploreProof(0, 9, SIMPLE_TREE, {3}) + results = prove_parallel({'proof1': proof}, {'proof1': prover}, max_workers=2, fail_fast=True) + assert len(list(results)) == 1 + assert len(list(prover.steps(proof))) > 0 + assert list(results)[0].status == ProofStatus.FAILED + + +@pytest.mark.parametrize('iterations', [0, 1, 6, 7, 8, 9, 10]) +def test_parallel_max_iterations(iterations: int) -> None: + prover = TreeExploreProver() + proof = TreeExploreProof(0, 9, SIMPLE_TREE, set()) + results = prove_parallel({'proof1': proof}, {'proof1': prover}, max_workers=2, max_iterations=iterations) + assert len(list(results)) == 1 + assert len(list(proof.reached)) == iterations + + +def test_parallel_multiple_proofs() -> None: prover = TreeExploreProver() - proofs = {f'proof{i}': TreeExploreProof(0, 9, SIMPLE_TREE) for i in range(3)} + proofs = {f'proof{i}': TreeExploreProof(0, 9, SIMPLE_TREE, set()) for i in range(3)} provers_map = {f'proof{i}': prover for i in range(3)} results = prove_parallel( proofs, From 0d7e523cf3915bc9056cbb20f4b4f59b6a6ac0f2 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Thu, 9 Nov 2023 20:03:38 -0600 Subject: [PATCH 033/116] Remove test cases --- src/tests/integration/proof/test_parallel_prove.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index 94c03cb03..689cfe240 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -115,7 +115,7 @@ def test_parallel_fail_fast() -> None: assert list(results)[0].status == ProofStatus.FAILED -@pytest.mark.parametrize('iterations', [0, 1, 6, 7, 8, 9, 10]) +@pytest.mark.parametrize('iterations', [0, 1, 9, 10]) def test_parallel_max_iterations(iterations: int) -> None: prover = TreeExploreProver() proof = TreeExploreProof(0, 9, SIMPLE_TREE, set()) From 5777bc6ad7bacabc88ace09d4cf20f2201351c4e Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Thu, 9 Nov 2023 20:05:26 -0600 Subject: [PATCH 034/116] Add to AprProofStep exec --- src/pyk/proof/parallel.py | 17 +++++++++++- src/pyk/proof/reachability.py | 52 +++++++++++++++++++++++++++++++---- 2 files changed, 63 insertions(+), 6 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index f52fb2cb1..2161f9adf 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -51,6 +51,11 @@ def commit(self, proof: P, update: U) -> None: class Proof(ABC): """Should represent a computer proof of a single claim""" + iterations: int + + def __init__(self) -> None: + self.iterations = 0 + @property @abstractmethod def status(self) -> ProofStatus: @@ -59,6 +64,8 @@ def status(self) -> ProofStatus: ProofStatus.FAILED: the claim has not been proven, but the proof cannot advance further. ProofStatus.PENDING: the claim has not yet been proven, but the proof can advance further. Must not change, except with calls to `prover.commit(self, update)` for some `prover,update`. + If proof.status() is ProofStatus.PENDING, prover.steps(proof) must be nonempty. + Once proof.status() is ProofStatus.PASSED or ProofStatus.FAILED, it must remain so. """ ... @@ -86,6 +93,8 @@ def prove_parallel( proofs: dict[str, Proof], provers: dict[str, Prover], init: Callable[..., None] = lambda *args: None, + max_iterations: int | None = None, + fail_fast: bool = False, ) -> Iterable[Proof]: pending: dict[Future[Any], str] = {} explored: set[ProofStep] = set() @@ -102,6 +111,7 @@ def submit(proof_id: str, pool: Executor) -> None: with ProcessPoolExecutor(max_workers=2, initializer=init) as pool: for proof_id in proofs.keys(): + iterations[proof_id] = 0 submit(proof_id, pool) while pending: @@ -112,18 +122,23 @@ def submit(proof_id: str, pool: Executor) -> None: prover = provers[proof_id] update = future.result() prover.commit(proof, update) # <-- update the proof (can be in-memory, access disk with locking, ...) + proot.iterations += 1 match proof.status: # terminate on first failure, yield partial results, etc. case ProofStatus.FAILED: assert len(list(prover.steps(proof))) == 0 - break + if fail_fast: + break case ProofStatus.PENDING: assert len(list(prover.steps(proof))) > 0 case ProofStatus.PASSED: assert len(list(prover.steps(proof))) == 0 break + if max_iterations and proof.iterations >= max_iterations: + continue + if fail_fast and submit(proof_id, pool) pending.pop(future) return proofs.values() diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index 15c066a68..9c962c4f5 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -3,6 +3,7 @@ import graphlib import json import logging +from abc import ABC from dataclasses import dataclass from typing import TYPE_CHECKING @@ -25,7 +26,7 @@ from pathlib import Path from typing import Any, Final, TypeVar - from ..cterm import CTerm + from ..cterm import CSubst, CTerm from ..kast.outer import KDefinition, KFlatModuleList from ..kcfg import KCFGExplore from ..kcfg.explore import ExtendResult @@ -51,6 +52,7 @@ class APRProof(Proof, KCFGExploration, parallel.Proof): logs: dict[int, tuple[LogEntry, ...]] circularity: bool failure_info: APRFailureInfo | None + iterations: int def __init__( self, @@ -76,6 +78,7 @@ def __init__( self.circularity = circularity self.node_refutations = {} self.kcfg.cfg_dir = self.proof_subdir / 'kcfg' if self.proof_subdir else None + self.iterations = 0 if self.proof_dir is not None and self.proof_subdir is not None: ensure_dir_path(self.proof_dir) @@ -1058,14 +1061,29 @@ def lines(self) -> list[str]: @dataclass(frozen=True) -class APRProofResult: +class APRProofResult(ABC): + ... + + +@dataclass(frozen=True) +class APRProofExtendResult(APRProofResult): extend_result: ExtendResult node_id: int +@dataclass(frozen=True) +class APRProofSubsumeResult(APRProofResult): + node_id: int + subsume_node_id: int + csubst: CSubst + + class ParallelAPRProver(parallel.Prover[APRProof, APRProofResult]): prover: APRProver + def __init__(self, prover: APRProver) -> None: + self.prover = prover + def steps(self, proof: APRProof) -> Iterable[APRProofStep]: """ Return a list of `ProofStep[U]` which represents all the computation jobs as defined by `ProofStep`, which have not yet been computed and committed, and are available given the current state of `proof`. Note that this is a requirement which is not enforced by the type system. @@ -1074,13 +1092,22 @@ def steps(self, proof: APRProof) -> Iterable[APRProofStep]: The output of this function must only change with calls to `self.commit()`. """ steps: list[APRProofStep] = [] + target_node = proof.kcfg.node(proof.target) for pending_node in proof.pending: module_name = ( self.prover.circularities_module_name if self.prover.nonzero_depth(pending_node) else self.prover.dependencies_module_name ) - steps.append(APRProofStep(cterm=pending_node.cterm, node_id=pending_node.id, module_name=module_name)) + steps.append( + APRProofStep( + cterm=pending_node.cterm, + node_id=pending_node.id, + module_name=module_name, + target_cterm=target_node.cterm, + target_node_id=target_node.id, + ) + ) return steps def commit(self, proof: APRProof, update: APRProofResult) -> None: @@ -1090,7 +1117,15 @@ def commit(self, proof: APRProof, update: APRProofResult) -> None: Must only be called with an `update` that was returned by `step.execute()` where `step` was returned by `self.steps(proof)`. Steps for a proof `proof` can have their results submitted any time after they are made available by `self.steps(proof)`, including in any order and multiple times, and the Prover must be able to handle this. """ - ... + + # Extend proof as per `update` + if type(update) is APRProofExtendResult: + node = proof.kcfg.node(update.node_id) + self.prover.kcfg_explore.extend_kcfg( + extend_result=update.extend_result, kcfg=proof.kcfg, node=node, logs=proof.logs + ) + elif type(update) is APRProofSubsumeResult: + proof.kcfg.create_cover(update.node_id, proof.target, csubst=update.csubst) @dataclass(frozen=True) @@ -1109,6 +1144,8 @@ class APRProofStep(parallel.ProofStep[APRProofResult]): cterm: CTerm node_id: int module_name: str + target_cterm: CTerm + target_node_id: int def __hash__(self) -> int: return hash((self.cterm, self.node_id)) @@ -1119,6 +1156,11 @@ def exec(self) -> APRProofResult: Allowed to be nondeterministic. Able to be called on any `ProofStep` returned by `prover.steps(proof)`. """ + + csubst = data.kcfg_explore.cterm_implies(self.cterm, self.target_cterm) + if csubst is not None: + return APRProofSubsumeResult(node_id=self.node_id, subsume_node_id=self.target_node_id, csubst=csubst) + result = data.kcfg_explore.extend_cterm( self.cterm, module_name=self.module_name, @@ -1126,4 +1168,4 @@ def exec(self) -> APRProofResult: terminal_rules=data.terminal_rules, cut_point_rules=data.cut_point_rules, ) - return APRProofResult(result, self.node_id) + return APRProofExtendResult(result, self.node_id) From e65992890f1b763bb935838974213480ca920141 Mon Sep 17 00:00:00 2001 From: devops Date: Fri, 10 Nov 2023 16:23:25 +0000 Subject: [PATCH 035/116] Set Version: 0.1.497 --- package/version | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package/version b/package/version index 833597fbe..2b78369ba 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.496 +0.1.497 diff --git a/pyproject.toml b/pyproject.toml index 01a2f653e..2dc031974 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.496" +version = "0.1.497" description = "" authors = [ "Runtime Verification, Inc. ", From c2744a5c30f779ad33958fc9e6fb18c870067dbb Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Fri, 10 Nov 2023 12:35:12 -0600 Subject: [PATCH 036/116] Fix possibility of TreeExloreProof to switch status after passing/failing --- src/tests/integration/proof/test_parallel_prove.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index 689cfe240..c50f1c4e5 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -15,6 +15,7 @@ class TreeExploreProof(Proof): edges: dict[int, set[int]] reached: set[int] failure_nodes: set[int] + reached_target_before_failing: bool def __init__(self, init: int, target: int, edges: dict[int, set[int]], failure_nodes: set[int]) -> None: self.init = init @@ -22,13 +23,14 @@ def __init__(self, init: int, target: int, edges: dict[int, set[int]], failure_n self.target = target self.edges = edges self.failure_nodes = failure_nodes + self.reached_target_before_failing = False @property def status(self) -> ProofStatus: - if len(self.reached.intersection(self.failure_nodes)) > 0: - return ProofStatus.FAILED - elif self.target in self.reached: + if self.reached_target_before_failing: return ProofStatus.PASSED + elif len(self.reached.intersection(self.failure_nodes)) > 0: + return ProofStatus.FAILED else: return ProofStatus.PENDING @@ -62,6 +64,8 @@ def parents(node_id: int) -> list[int]: ] def commit(self, proof: TreeExploreProof, update: int) -> None: + if proof.status is ProofStatus.PENDING and update == proof.target: + proof.reached_target_before_failing = True proof.reached.add(update) From 4c2219305ca8f307e3e24b54824bd830090df9fd Mon Sep 17 00:00:00 2001 From: devops Date: Mon, 13 Nov 2023 18:48:24 +0000 Subject: [PATCH 037/116] Set Version: 0.1.499 --- package/version | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package/version b/package/version index 1138e2876..5a62dabe5 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.498 \ No newline at end of file +0.1.499 diff --git a/pyproject.toml b/pyproject.toml index 3333479cc..cdea936bd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.498" +version = "0.1.499" description = "" authors = [ "Runtime Verification, Inc. ", From 0f5a6b3d4cb10ba4f25ba50094a58e4d9903fe94 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Mon, 13 Nov 2023 18:57:56 -0600 Subject: [PATCH 038/116] Add APRProver Implementations --- src/pyk/proof/parallel.py | 199 ++++++++++++++---- src/pyk/proof/reachability.py | 28 ++- .../integration/proof/test_imp_parallel.py | 75 +++++++ .../integration/proof/test_parallel_prove.py | 93 ++++---- 4 files changed, 302 insertions(+), 93 deletions(-) create mode 100644 src/tests/integration/proof/test_imp_parallel.py diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index 728f2b3dc..8aceeed6b 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -1,14 +1,24 @@ from __future__ import annotations from abc import ABC, abstractmethod -from concurrent.futures import ProcessPoolExecutor, wait -from typing import TYPE_CHECKING, Any, Callable, Generic, TypeVar +from dataclasses import dataclass +from multiprocessing import Process, Queue +from typing import TYPE_CHECKING, Generic, TypeVar +from pyk.kcfg.explore import KCFGExplore +from pyk.kore.rpc import KoreClient, TransportType, kore_server from pyk.proof.proof import ProofStatus +from ..ktool.kprove import KoreExecLogFormat + if TYPE_CHECKING: from collections.abc import Iterable, Mapping - from concurrent.futures import Executor, Future + from pathlib import Path + + from pyk.kcfg.semantics import KCFGSemantics + from pyk.ktool.kprint import KPrint + + from ..utils import BugReport P = TypeVar('P', bound='Proof') @@ -66,6 +76,51 @@ def status(self) -> ProofStatus: ... +class ProcessData(ABC): + ... + + +@dataclass(frozen=True) +class APRProofExtendData(ProcessData): + # kcfg_explore: KCFGExplore + cut_point_rules: Iterable[str] + terminal_rules: Iterable[str] + execute_depth: int + + definition_dir: str | Path + module_name: str + + kprint: KPrint + + llvm_definition_dir: Path | None = None + port: int | None = None + command: str | Iterable[str] | None = None + bug_report: BugReport | None = None + smt_timeout: int | None = None + smt_retry_limit: int | None = None + smt_tactic: str | None = None + haskell_log_format: KoreExecLogFormat = KoreExecLogFormat.ONELINE + haskell_log_entries: Iterable[str] = () + log_axioms_file: Path | None = None + + timeout: int | None = None + bug_report_id: str | None = None + transport: TransportType = TransportType.SINGLE_SOCKET + dispatch: dict[str, list[tuple[str, int, TransportType]]] | None = None + + kcfg_semantics: KCFGSemantics | None = None + id: str | None = None + trace_rewrites: bool = False + + +@dataclass(frozen=True) +class APRProofExtendData2(ProcessData): + kcfg_explore: KCFGExplore + cut_point_rules: Iterable[str] + terminal_rules: Iterable[str] + execute_depth: int + + class ProofStep(ABC, Generic[U]): """ Should be a description of a computation needed to make progress on a `Proof`. @@ -76,7 +131,7 @@ class ProofStep(ABC, Generic[U]): """ @abstractmethod - def exec(self) -> U: + def exec(self, data: APRProofExtendData2) -> U: """ Should perform some nontrivial computation given by `self`, which can be done independently of other calls to `exec()`. Allowed to be nondeterministic. @@ -91,54 +146,118 @@ def prove_parallel( max_workers: int, fail_fast: bool = False, max_iterations: int | None = None, - init: Callable[..., None] = lambda *args: None, + process_data: ProcessData | None = None, ) -> Iterable[Proof]: - pending: dict[Future[Any], str] = {} explored: set[tuple[str, ProofStep]] = set() iterations: dict[str, int] = {} - def submit(proof_id: str, pool: Executor) -> None: + in_queue: Queue = Queue() + out_queue: Queue = Queue() + + pending_jobs: int = 0 + + def run_process(data: APRProofExtendData) -> None: + with kore_server( + definition_dir=data.definition_dir, + llvm_definition_dir=data.llvm_definition_dir, + module_name=data.module_name, + command=data.command, + bug_report=data.bug_report, + smt_timeout=data.smt_timeout, + smt_retry_limit=data.smt_retry_limit, + smt_tactic=data.smt_tactic, + haskell_log_format=data.haskell_log_format, + haskell_log_entries=data.haskell_log_entries, + log_axioms_file=data.log_axioms_file, + ) as server: + with KoreClient( + 'localhost', server.port, bug_report=data.bug_report, bug_report_id=data.bug_report_id + ) as client: + kcfg_explore = KCFGExplore( + kprint=data.kprint, + kore_client=client, + kcfg_semantics=data.kcfg_semantics, + id=data.id, + trace_rewrites=data.trace_rewrites, + ) + + data2 = APRProofExtendData2( + kcfg_explore=kcfg_explore, + cut_point_rules=data.cut_point_rules, + execute_depth=data.execute_depth, + terminal_rules=data.terminal_rules, + ) + + kcfg_explore.add_dependencies_module( + data.module_name, + data.module_name + '-DEPENDS-MODULE', + [], + priority=1, + ) + kcfg_explore.add_dependencies_module( + data.module_name, + data.module_name + '-CIRCULARITIES-MODULE', + [], + priority=1, + ) + + while True: + dequeued = in_queue.get() + if dequeued == 0: + break + proof_id, proof_step = dequeued + update = proof_step.exec(data2) + out_queue.put((proof_id, update)) + + def submit(proof_id: str) -> None: proof = proofs[proof_id] prover = provers[proof_id] for step in prover.steps(proof): # <-- get next steps (represented by e.g. pending nodes, ...) if (proof_id, step) in explored: continue explored.add((proof_id, step)) - future = pool.submit(step.exec) # <-- schedule steps for execution - pending[future] = proof_id - - with ProcessPoolExecutor(max_workers=max_workers, initializer=init) as pool: - for proof_id in proofs.keys(): - submit(proof_id, pool) - iterations[proof_id] = 0 - - while pending: - done, _ = wait(pending, return_when='FIRST_COMPLETED') - future = done.pop() - proof_id = pending[future] - proof = proofs[proof_id] - prover = provers[proof_id] - update = future.result() - - if max_iterations is not None and iterations[proof_id] >= max_iterations: - pending.pop(future) - continue + in_queue.put((proof_id, step)) + nonlocal pending_jobs + pending_jobs += 1 + + processes = [Process(target=run_process, args=(process_data,)) for _ in range(max_workers)] + for process in processes: + process.start() + + for proof_id in proofs.keys(): + submit(proof_id) + iterations[proof_id] = 0 + + while pending_jobs > 0: + proof_id, update = out_queue.get() + pending_jobs -= 1 + + proof = proofs[proof_id] + prover = provers[proof_id] + + if max_iterations is not None and iterations[proof_id] >= max_iterations: + continue + + prover.commit(proof, update) # <-- update the proof (can be in-memory, access disk with locking, ...) + + iterations[proof_id] += 1 + + match proof.status: + # terminate on first failure, yield partial results, etc. + case ProofStatus.FAILED: + if fail_fast: + continue + case ProofStatus.PENDING: + assert len(list(prover.steps(proof))) > 0 + case ProofStatus.PASSED: + assert len(list(prover.steps(proof))) == 0 - prover.commit(proof, update) # <-- update the proof (can be in-memory, access disk with locking, ...) + submit(proof_id) - iterations[proof_id] += 1 + for _ in range(max_workers): + in_queue.put(0) - match proof.status: - # terminate on first failure, yield partial results, etc. - case ProofStatus.FAILED: - if fail_fast: - pending.pop(future) - continue - case ProofStatus.PENDING: - assert len(list(prover.steps(proof))) > 0 - case ProofStatus.PASSED: - assert len(list(prover.steps(proof))) == 0 + for process in processes: + process.join() - submit(proof_id, pool) - pending.pop(future) return proofs.values() diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index 0f95b4e3f..ba67b64fc 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -1125,16 +1125,27 @@ def commit(self, proof: APRProof, update: APRProofResult) -> None: elif type(update) is APRProofSubsumeResult: proof.kcfg.create_cover(update.node_id, proof.target, csubst=update.csubst) + self.prover._check_all_terminals() -@dataclass(frozen=True) -class APRProofExtendData: - kcfg_explore: KCFGExplore - cut_point_rules: Iterable[str] - terminal_rules: Iterable[str] - execute_depth: int + if proof.failed: + self.prover.save_failure_info() + + proof.write_proof_data() -data: APRProofExtendData +# aprproof_data: APRProofExtendData + +# +# @dataclass(frozen=True) +# class APRProofExtendData(parallel.ProcessData): +# kcfg_explore: KCFGExplore +# cut_point_rules: Iterable[str] +# terminal_rules: Iterable[str] +# execute_depth: int +# +# def initializer(self) -> None: +# global aprproof_data +# aprproof_data = self @dataclass(frozen=True, eq=True) @@ -1148,13 +1159,12 @@ class APRProofStep(parallel.ProofStep[APRProofResult]): def __hash__(self) -> int: return hash((self.cterm, self.node_id)) - def exec(self) -> APRProofResult: + def exec(self, data: parallel.APRProofExtendData2) -> APRProofResult: """ Should perform some nontrivial computation given by `self`, which can be done independently of other calls to `exec()`. Allowed to be nondeterministic. Able to be called on any `ProofStep` returned by `prover.steps(proof)`. """ - csubst = data.kcfg_explore.cterm_implies(self.cterm, self.target_cterm) if csubst is not None: return APRProofSubsumeResult(node_id=self.node_id, subsume_node_id=self.target_node_id, csubst=csubst) diff --git a/src/tests/integration/proof/test_imp_parallel.py b/src/tests/integration/proof/test_imp_parallel.py new file mode 100644 index 000000000..735fedff9 --- /dev/null +++ b/src/tests/integration/proof/test_imp_parallel.py @@ -0,0 +1,75 @@ +from __future__ import annotations + +from pathlib import Path +from typing import TYPE_CHECKING + +import pytest + +from pyk.proof.parallel import APRProofExtendData, prove_parallel +from pyk.proof.proof import ProofStatus +from pyk.proof.reachability import APRProof, APRProver, ParallelAPRProver +from pyk.testing import KCFGExploreTest, KPrintTest, KProveTest +from pyk.utils import single + +from ..utils import K_FILES +from .test_imp import ImpSemantics + +if TYPE_CHECKING: + from pytest import TempPathFactory + + from pyk.kast.outer import KDefinition + from pyk.kcfg.explore import KCFGExplore + from pyk.kcfg.semantics import KCFGSemantics + from pyk.ktool.kprint import KPrint + from pyk.ktool.kprove import KProve + + +@pytest.fixture(scope='function') +def proof_dir(tmp_path_factory: TempPathFactory) -> Path: + return tmp_path_factory.mktemp('proofs') + + +class TestImpParallelProve(KCFGExploreTest, KProveTest, KPrintTest): + KOMPILE_MAIN_FILE = K_FILES / 'imp-verification.k' + + def semantics(self, definition: KDefinition) -> KCFGSemantics: + return ImpSemantics(definition) + + def test_imp_parallel_prove( + self, kcfg_explore: KCFGExplore, proof_dir: Path, kprove: KProve, kprint: KPrint + ) -> None: + # claim_id = 'addition-1' + claim_id = 'failing-if' + spec_file = K_FILES / 'imp-simple-spec.k' + spec_module = 'IMP-SIMPLE-SPEC' + + claim = single( + kprove.get_claims(Path(spec_file), spec_module_name=spec_module, claim_labels=[f'{spec_module}.{claim_id}']) + ) + + proof = APRProof.from_claim(kprove.definition, claim, logs={}, proof_dir=proof_dir) + prover = APRProver( + proof, + kcfg_explore=kcfg_explore, + ) + + process_data = APRProofExtendData( + cut_point_rules=[], + terminal_rules=[], + execute_depth=1000, + definition_dir=kprove.definition_dir, + module_name=kprove.main_module, + kprint=kprint, + ) + + parallel_prover = ParallelAPRProver(prover=prover) + + results = prove_parallel( + proofs={'proof1': proof}, + provers={'proof1': parallel_prover}, + max_workers=2, + process_data=process_data, + ) + + assert len(list(results)) == 1 + assert list(results)[0].status == ProofStatus.FAILED diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index c50f1c4e5..3d69cf632 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -1,13 +1,16 @@ from __future__ import annotations +import sys import time from dataclasses import dataclass - -import pytest +from typing import TYPE_CHECKING from pyk.proof.parallel import Proof, ProofStep, Prover, prove_parallel from pyk.proof.proof import ProofStatus +if TYPE_CHECKING: + import pyk.proof.parallel as parallel + class TreeExploreProof(Proof): init: int @@ -39,8 +42,10 @@ def status(self) -> ProofStatus: class TreeExploreProofStep(ProofStep[int]): node: int - def exec(self) -> int: + def exec(self, data: parallel.APRProofExtendData2) -> int: + print(f'exec {self.node}', file=sys.stderr) time.sleep(1) + print(f'done {self.node}', file=sys.stderr) return self.node @@ -101,44 +106,44 @@ def test_parallel_prove() -> None: assert list(results)[0].status == ProofStatus.PASSED -def test_parallel_fail() -> None: - prover = TreeExploreProver() - proof = TreeExploreProof(0, 9, SIMPLE_TREE, {6}) - results = prove_parallel({'proof1': proof}, {'proof1': prover}, max_workers=2) - assert len(list(results)) == 1 - assert len(list(prover.steps(proof))) == 0 - assert list(results)[0].status == ProofStatus.FAILED - - -def test_parallel_fail_fast() -> None: - prover = TreeExploreProver() - proof = TreeExploreProof(0, 9, SIMPLE_TREE, {3}) - results = prove_parallel({'proof1': proof}, {'proof1': prover}, max_workers=2, fail_fast=True) - assert len(list(results)) == 1 - assert len(list(prover.steps(proof))) > 0 - assert list(results)[0].status == ProofStatus.FAILED - - -@pytest.mark.parametrize('iterations', [0, 1, 9, 10]) -def test_parallel_max_iterations(iterations: int) -> None: - prover = TreeExploreProver() - proof = TreeExploreProof(0, 9, SIMPLE_TREE, set()) - results = prove_parallel({'proof1': proof}, {'proof1': prover}, max_workers=2, max_iterations=iterations) - assert len(list(results)) == 1 - assert len(list(proof.reached)) == iterations - - -def test_parallel_multiple_proofs() -> None: - prover = TreeExploreProver() - proofs = {f'proof{i}': TreeExploreProof(0, 9, SIMPLE_TREE, set()) for i in range(3)} - provers_map = {f'proof{i}': prover for i in range(3)} - results = prove_parallel( - proofs, - provers_map, - max_workers=4, - ) - assert len(list(results)) == 3 - for proof in proofs.values(): - assert len(list(prover.steps(proof))) == 0 - for result in results: - assert result.status == ProofStatus.PASSED +# def test_parallel_fail() -> None: +# prover = TreeExploreProver() +# proof = TreeExploreProof(0, 9, SIMPLE_TREE, {6}) +# results = prove_parallel({'proof1': proof}, {'proof1': prover}, max_workers=2) +# assert len(list(results)) == 1 +# assert len(list(prover.steps(proof))) == 0 +# assert list(results)[0].status == ProofStatus.FAILED +# +# +# def test_parallel_fail_fast() -> None: +# prover = TreeExploreProver() +# proof = TreeExploreProof(0, 9, SIMPLE_TREE, {3}) +# results = prove_parallel({'proof1': proof}, {'proof1': prover}, max_workers=2, fail_fast=True) +# assert len(list(results)) == 1 +# assert len(list(prover.steps(proof))) > 0 +# assert list(results)[0].status == ProofStatus.FAILED +# +# +# @pytest.mark.parametrize('iterations', [0, 1, 9, 10]) +# def test_parallel_max_iterations(iterations: int) -> None: +# prover = TreeExploreProver() +# proof = TreeExploreProof(0, 9, SIMPLE_TREE, set()) +# results = prove_parallel({'proof1': proof}, {'proof1': prover}, max_workers=2, max_iterations=iterations) +# assert len(list(results)) == 1 +# assert len(list(proof.reached)) == iterations +# +# +# def test_parallel_multiple_proofs() -> None: +# prover = TreeExploreProver() +# proofs = {f'proof{i}': TreeExploreProof(0, 9, SIMPLE_TREE, set()) for i in range(3)} +# provers_map = {f'proof{i}': prover for i in range(3)} +# results = prove_parallel( +# proofs, +# provers_map, +# max_workers=4, +# ) +# assert len(list(results)) == 3 +# for proof in proofs.values(): +# assert len(list(prover.steps(proof))) == 0 +# for result in results: +# assert result.status == ProofStatus.PASSED From 1950f60a8948c73ce4e421bb0bdb91257da2bb45 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Mon, 13 Nov 2023 20:23:11 -0600 Subject: [PATCH 039/116] Create interface for ProcessData --- src/pyk/proof/parallel.py | 131 +++------------- src/pyk/proof/reachability.py | 146 +++++++++++++++--- .../integration/proof/test_imp_parallel.py | 4 +- .../integration/proof/test_parallel_prove.py | 19 ++- 4 files changed, 162 insertions(+), 138 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index 8aceeed6b..06e0ef495 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -1,31 +1,21 @@ from __future__ import annotations from abc import ABC, abstractmethod -from dataclasses import dataclass from multiprocessing import Process, Queue from typing import TYPE_CHECKING, Generic, TypeVar -from pyk.kcfg.explore import KCFGExplore -from pyk.kore.rpc import KoreClient, TransportType, kore_server from pyk.proof.proof import ProofStatus -from ..ktool.kprove import KoreExecLogFormat - if TYPE_CHECKING: from collections.abc import Iterable, Mapping - from pathlib import Path - - from pyk.kcfg.semantics import KCFGSemantics - from pyk.ktool.kprint import KPrint - - from ..utils import BugReport P = TypeVar('P', bound='Proof') U = TypeVar('U') +D = TypeVar('D', bound='ProcessData') -class Prover(ABC, Generic[P, U]): +class Prover(ABC, Generic[P, U, D]): """ Should contain all data needed to make progress on a `P` (proof). May be specific to a single `P` (proof) or may be able to handle multiple. @@ -38,7 +28,7 @@ class Prover(ABC, Generic[P, U]): """ @abstractmethod - def steps(self, proof: P) -> Iterable[ProofStep[U]]: + def steps(self, proof: P) -> Iterable[ProofStep[U, D]]: """ Return a list of `ProofStep[U]` which represents all the computation jobs as defined by `ProofStep`, which have not yet been computed and committed, and are available given the current state of `proof`. Note that this is a requirement which is not enforced by the type system. If `steps()` or `commit()` has been called on a proof `proof`, `steps()` may never again be called on `proof`. @@ -77,51 +67,16 @@ def status(self) -> ProofStatus: class ProcessData(ABC): - ... - - -@dataclass(frozen=True) -class APRProofExtendData(ProcessData): - # kcfg_explore: KCFGExplore - cut_point_rules: Iterable[str] - terminal_rules: Iterable[str] - execute_depth: int - - definition_dir: str | Path - module_name: str - - kprint: KPrint - - llvm_definition_dir: Path | None = None - port: int | None = None - command: str | Iterable[str] | None = None - bug_report: BugReport | None = None - smt_timeout: int | None = None - smt_retry_limit: int | None = None - smt_tactic: str | None = None - haskell_log_format: KoreExecLogFormat = KoreExecLogFormat.ONELINE - haskell_log_entries: Iterable[str] = () - log_axioms_file: Path | None = None - - timeout: int | None = None - bug_report_id: str | None = None - transport: TransportType = TransportType.SINGLE_SOCKET - dispatch: dict[str, list[tuple[str, int, TransportType]]] | None = None - - kcfg_semantics: KCFGSemantics | None = None - id: str | None = None - trace_rewrites: bool = False - + @abstractmethod + def init(self) -> None: + ... -@dataclass(frozen=True) -class APRProofExtendData2(ProcessData): - kcfg_explore: KCFGExplore - cut_point_rules: Iterable[str] - terminal_rules: Iterable[str] - execute_depth: int + @abstractmethod + def cleanup(self) -> None: + ... -class ProofStep(ABC, Generic[U]): +class ProofStep(ABC, Generic[U, D]): """ Should be a description of a computation needed to make progress on a `Proof`. Must be hashable. @@ -131,7 +86,7 @@ class ProofStep(ABC, Generic[U]): """ @abstractmethod - def exec(self, data: APRProofExtendData2) -> U: + def exec(self, data: D) -> U: """ Should perform some nontrivial computation given by `self`, which can be done independently of other calls to `exec()`. Allowed to be nondeterministic. @@ -156,58 +111,18 @@ def prove_parallel( pending_jobs: int = 0 - def run_process(data: APRProofExtendData) -> None: - with kore_server( - definition_dir=data.definition_dir, - llvm_definition_dir=data.llvm_definition_dir, - module_name=data.module_name, - command=data.command, - bug_report=data.bug_report, - smt_timeout=data.smt_timeout, - smt_retry_limit=data.smt_retry_limit, - smt_tactic=data.smt_tactic, - haskell_log_format=data.haskell_log_format, - haskell_log_entries=data.haskell_log_entries, - log_axioms_file=data.log_axioms_file, - ) as server: - with KoreClient( - 'localhost', server.port, bug_report=data.bug_report, bug_report_id=data.bug_report_id - ) as client: - kcfg_explore = KCFGExplore( - kprint=data.kprint, - kore_client=client, - kcfg_semantics=data.kcfg_semantics, - id=data.id, - trace_rewrites=data.trace_rewrites, - ) - - data2 = APRProofExtendData2( - kcfg_explore=kcfg_explore, - cut_point_rules=data.cut_point_rules, - execute_depth=data.execute_depth, - terminal_rules=data.terminal_rules, - ) - - kcfg_explore.add_dependencies_module( - data.module_name, - data.module_name + '-DEPENDS-MODULE', - [], - priority=1, - ) - kcfg_explore.add_dependencies_module( - data.module_name, - data.module_name + '-CIRCULARITIES-MODULE', - [], - priority=1, - ) - - while True: - dequeued = in_queue.get() - if dequeued == 0: - break - proof_id, proof_step = dequeued - update = proof_step.exec(data2) - out_queue.put((proof_id, update)) + def run_process(data: ProcessData) -> None: + data.init() + + while True: + dequeued = in_queue.get() + if dequeued == 0: + break + proof_id, proof_step = dequeued + update = proof_step.exec(data) + out_queue.put((proof_id, update)) + + data.cleanup() def submit(proof_id: str) -> None: proof = proofs[proof_id] diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index ba67b64fc..42147d105 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -8,12 +8,13 @@ from typing import TYPE_CHECKING import pyk.proof.parallel as parallel -from pyk.kore.rpc import LogEntry +from pyk.kore.rpc import KoreClient, LogEntry, kore_server +from pyk.ktool.kprove import KoreExecLogFormat from ..kast.inner import KInner, KRewrite, KSort, Subst from ..kast.manip import flatten_label, ml_pred_to_bool from ..kast.outer import KClaim -from ..kcfg import KCFG +from ..kcfg import KCFG, KCFGExplore from ..kcfg.exploration import KCFGExploration from ..prelude.kbool import BOOL, TRUE from ..prelude.ml import mlAnd, mlEquals, mlTop @@ -26,12 +27,15 @@ from pathlib import Path from typing import Any, Final, TypeVar + from pyk.kcfg.semantics import KCFGSemantics + from pyk.kore.rpc import KoreServer + from ..cterm import CSubst, CTerm from ..kast.outer import KDefinition, KFlatModuleList - from ..kcfg import KCFGExplore from ..kcfg.explore import ExtendResult from ..kcfg.kcfg import NodeIdLike from ..ktool.kprint import KPrint + from ..utils import BugReport T = TypeVar('T', bound='Proof') @@ -1076,7 +1080,122 @@ class APRProofSubsumeResult(APRProofResult): csubst: CSubst -class ParallelAPRProver(parallel.Prover[APRProof, APRProofResult]): +class APRProofExtendData(parallel.ProcessData): + cut_point_rules: Iterable[str] + terminal_rules: Iterable[str] + execute_depth: int + definition_dir: str | Path + module_name: str + kprint: KPrint + llvm_definition_dir: Path | None + command: str | Iterable[str] | None + bug_report: BugReport | None + smt_timeout: int | None + smt_retry_limit: int | None + smt_tactic: str | None + haskell_log_format: KoreExecLogFormat + haskell_log_entries: Iterable[str] + log_axioms_file: Path | None + bug_report_id: str | None + kcfg_semantics: KCFGSemantics | None + id: str | None + trace_rewrites: bool + server: KoreServer + client: KoreClient + + kcfg_explore: KCFGExplore + # cut_point_rules: Iterable[str] + # terminal_rules: Iterable[str] + # execute_depth: int + + def __init__( + self, + cut_point_rules: Iterable[str], + terminal_rules: Iterable[str], + execute_depth: int, + definition_dir: str | Path, + module_name: str, + kprint: KPrint, + llvm_definition_dir: Path | None = None, + command: str | Iterable[str] | None = None, + bug_report: BugReport | None = None, + smt_timeout: int | None = None, + smt_retry_limit: int | None = None, + smt_tactic: str | None = None, + haskell_log_format: KoreExecLogFormat = KoreExecLogFormat.ONELINE, + haskell_log_entries: Iterable[str] = (), + log_axioms_file: Path | None = None, + bug_report_id: str | None = None, + id: str | None = None, + trace_rewrites: bool = False, + kcfg_semantics: KCFGSemantics | None = None, + ) -> None: + self.cut_point_rules = cut_point_rules + self.terminal_rules = terminal_rules + self.execute_depth = execute_depth + self.definition_dir = definition_dir + self.module_name = module_name + self.kprint = kprint + self.llvm_definition_dir = llvm_definition_dir + self.command = command + self.bug_report = bug_report + self.smt_timeout = smt_timeout + self.smt_retry_limit = smt_retry_limit + self.smt_tactic = smt_tactic + self.haskell_log_format = haskell_log_format + self.haskell_log_entries = haskell_log_entries + self.log_axioms_file = log_axioms_file + self.bug_report_id = bug_report_id + self.id = id + self.trace_rewrites = trace_rewrites + self.kcfg_semantics = kcfg_semantics + + + def init(self) -> None: + self.server = kore_server( + definition_dir=self.definition_dir, + llvm_definition_dir=self.llvm_definition_dir, + module_name=self.module_name, + command=self.command, + bug_report=self.bug_report, + smt_timeout=self.smt_timeout, + smt_retry_limit=self.smt_retry_limit, + smt_tactic=self.smt_tactic, + haskell_log_format=self.haskell_log_format, + haskell_log_entries=self.haskell_log_entries, + log_axioms_file=self.log_axioms_file, + ) + self.client = KoreClient( + 'localhost', self.server.port, bug_report=self.bug_report, bug_report_id=self.bug_report_id + ) + + self.kcfg_explore = KCFGExplore( + kprint=self.kprint, + kore_client=self.client, + kcfg_semantics=self.kcfg_semantics, + id=self.id, + trace_rewrites=self.trace_rewrites, + ) + + self.kcfg_explore.add_dependencies_module( + self.module_name, + self.module_name + '-DEPENDS-MODULE', + [], + priority=1, + ) + self.kcfg_explore.add_dependencies_module( + self.module_name, + self.module_name + '-CIRCULARITIES-MODULE', + [], + priority=1, + ) + + def cleanup(self) -> None: + self.client.close() + self.server.close() + + +class ParallelAPRProver(parallel.Prover[APRProof, APRProofResult, APRProofExtendData]): prover: APRProver def __init__(self, prover: APRProver) -> None: @@ -1133,23 +1252,8 @@ def commit(self, proof: APRProof, update: APRProofResult) -> None: proof.write_proof_data() -# aprproof_data: APRProofExtendData - -# -# @dataclass(frozen=True) -# class APRProofExtendData(parallel.ProcessData): -# kcfg_explore: KCFGExplore -# cut_point_rules: Iterable[str] -# terminal_rules: Iterable[str] -# execute_depth: int -# -# def initializer(self) -> None: -# global aprproof_data -# aprproof_data = self - - @dataclass(frozen=True, eq=True) -class APRProofStep(parallel.ProofStep[APRProofResult]): +class APRProofStep(parallel.ProofStep[APRProofResult, APRProofExtendData]): cterm: CTerm node_id: int module_name: str @@ -1159,7 +1263,7 @@ class APRProofStep(parallel.ProofStep[APRProofResult]): def __hash__(self) -> int: return hash((self.cterm, self.node_id)) - def exec(self, data: parallel.APRProofExtendData2) -> APRProofResult: + def exec(self, data: APRProofExtendData) -> APRProofResult: """ Should perform some nontrivial computation given by `self`, which can be done independently of other calls to `exec()`. Allowed to be nondeterministic. diff --git a/src/tests/integration/proof/test_imp_parallel.py b/src/tests/integration/proof/test_imp_parallel.py index 735fedff9..dbd958a1d 100644 --- a/src/tests/integration/proof/test_imp_parallel.py +++ b/src/tests/integration/proof/test_imp_parallel.py @@ -5,9 +5,9 @@ import pytest -from pyk.proof.parallel import APRProofExtendData, prove_parallel +from pyk.proof.parallel import prove_parallel from pyk.proof.proof import ProofStatus -from pyk.proof.reachability import APRProof, APRProver, ParallelAPRProver +from pyk.proof.reachability import APRProof, APRProofExtendData, APRProver, ParallelAPRProver from pyk.testing import KCFGExploreTest, KPrintTest, KProveTest from pyk.utils import single diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index 3d69cf632..fbacd6c99 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -3,14 +3,11 @@ import sys import time from dataclasses import dataclass -from typing import TYPE_CHECKING +import pyk.proof.parallel as parallel from pyk.proof.parallel import Proof, ProofStep, Prover, prove_parallel from pyk.proof.proof import ProofStatus -if TYPE_CHECKING: - import pyk.proof.parallel as parallel - class TreeExploreProof(Proof): init: int @@ -38,18 +35,26 @@ def status(self) -> ProofStatus: return ProofStatus.PENDING +class TreeExploreProofData(parallel.ProcessData): + def init(self) -> None: + ... + + def cleanup(self) -> None: + ... + + @dataclass(frozen=True) -class TreeExploreProofStep(ProofStep[int]): +class TreeExploreProofStep(ProofStep[int, TreeExploreProofData]): node: int - def exec(self, data: parallel.APRProofExtendData2) -> int: + def exec(self, data: TreeExploreProofData) -> int: print(f'exec {self.node}', file=sys.stderr) time.sleep(1) print(f'done {self.node}', file=sys.stderr) return self.node -class TreeExploreProver(Prover[TreeExploreProof, int]): +class TreeExploreProver(Prover[TreeExploreProof, int, TreeExploreProofData]): def __init__(self) -> None: return From 243623435ef96b4b7c7fdedbf5e489bcc7e95630 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Tue, 14 Nov 2023 17:13:27 -0600 Subject: [PATCH 040/116] Switch to using 1 server per prover and sharing between threads, init client and kcfg_explore in exec() --- src/pyk/proof/parallel.py | 19 +- src/pyk/proof/reachability.py | 190 +++++++++--------- .../integration/proof/test_imp_parallel.py | 27 ++- .../integration/proof/test_parallel_prove.py | 15 +- 4 files changed, 116 insertions(+), 135 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index 06e0ef495..5a5c38128 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -15,7 +15,7 @@ D = TypeVar('D', bound='ProcessData') -class Prover(ABC, Generic[P, U, D]): +class Prover(ABC, Generic[P, U]): """ Should contain all data needed to make progress on a `P` (proof). May be specific to a single `P` (proof) or may be able to handle multiple. @@ -28,7 +28,7 @@ class Prover(ABC, Generic[P, U, D]): """ @abstractmethod - def steps(self, proof: P) -> Iterable[ProofStep[U, D]]: + def steps(self, proof: P) -> Iterable[ProofStep[U]]: """ Return a list of `ProofStep[U]` which represents all the computation jobs as defined by `ProofStep`, which have not yet been computed and committed, and are available given the current state of `proof`. Note that this is a requirement which is not enforced by the type system. If `steps()` or `commit()` has been called on a proof `proof`, `steps()` may never again be called on `proof`. @@ -76,7 +76,7 @@ def cleanup(self) -> None: ... -class ProofStep(ABC, Generic[U, D]): +class ProofStep(ABC, Generic[U]): """ Should be a description of a computation needed to make progress on a `Proof`. Must be hashable. @@ -86,7 +86,7 @@ class ProofStep(ABC, Generic[U, D]): """ @abstractmethod - def exec(self, data: D) -> U: + def exec(self) -> U: """ Should perform some nontrivial computation given by `self`, which can be done independently of other calls to `exec()`. Allowed to be nondeterministic. @@ -101,7 +101,6 @@ def prove_parallel( max_workers: int, fail_fast: bool = False, max_iterations: int | None = None, - process_data: ProcessData | None = None, ) -> Iterable[Proof]: explored: set[tuple[str, ProofStep]] = set() iterations: dict[str, int] = {} @@ -111,19 +110,15 @@ def prove_parallel( pending_jobs: int = 0 - def run_process(data: ProcessData) -> None: - data.init() - + def run_process() -> None: while True: dequeued = in_queue.get() if dequeued == 0: break proof_id, proof_step = dequeued - update = proof_step.exec(data) + update = proof_step.exec() out_queue.put((proof_id, update)) - data.cleanup() - def submit(proof_id: str) -> None: proof = proofs[proof_id] prover = provers[proof_id] @@ -135,7 +130,7 @@ def submit(proof_id: str) -> None: nonlocal pending_jobs pending_jobs += 1 - processes = [Process(target=run_process, args=(process_data,)) for _ in range(max_workers)] + processes = [Process(target=run_process) for _ in range(max_workers)] for process in processes: process.start() diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index 42147d105..034b1492e 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -8,8 +8,7 @@ from typing import TYPE_CHECKING import pyk.proof.parallel as parallel -from pyk.kore.rpc import KoreClient, LogEntry, kore_server -from pyk.ktool.kprove import KoreExecLogFormat +from pyk.kore.rpc import KoreClient, KoreExecLogFormat, LogEntry, kore_server from ..kast.inner import KInner, KRewrite, KSort, Subst from ..kast.manip import flatten_label, ml_pred_to_bool @@ -29,13 +28,13 @@ from pyk.kcfg.semantics import KCFGSemantics from pyk.kore.rpc import KoreServer + from pyk.utils import BugReport from ..cterm import CSubst, CTerm from ..kast.outer import KDefinition, KFlatModuleList from ..kcfg.explore import ExtendResult from ..kcfg.kcfg import NodeIdLike from ..ktool.kprint import KPrint - from ..utils import BugReport T = TypeVar('T', bound='Proof') @@ -1080,42 +1079,36 @@ class APRProofSubsumeResult(APRProofResult): csubst: CSubst -class APRProofExtendData(parallel.ProcessData): +class ParallelAPRProver(parallel.Prover[APRProof, APRProofResult]): + prover: APRProver + server: KoreServer + kcfg_explore: KCFGExplore + + execute_depth: int | None cut_point_rules: Iterable[str] terminal_rules: Iterable[str] - execute_depth: int - definition_dir: str | Path - module_name: str + kprint: KPrint - llvm_definition_dir: Path | None - command: str | Iterable[str] | None - bug_report: BugReport | None - smt_timeout: int | None - smt_retry_limit: int | None - smt_tactic: str | None - haskell_log_format: KoreExecLogFormat - haskell_log_entries: Iterable[str] - log_axioms_file: Path | None - bug_report_id: str | None kcfg_semantics: KCFGSemantics | None id: str | None trace_rewrites: bool - server: KoreServer - client: KoreClient - kcfg_explore: KCFGExplore - # cut_point_rules: Iterable[str] - # terminal_rules: Iterable[str] - # execute_depth: int + bug_report: BugReport | None + bug_report_id: str | None def __init__( self, - cut_point_rules: Iterable[str], - terminal_rules: Iterable[str], - execute_depth: int, - definition_dir: str | Path, + proof: APRProof, module_name: str, + definition_dir: str | Path, + execute_depth: int | None, kprint: KPrint, + kcfg_semantics: KCFGSemantics | None, + id: str | None, + trace_rewrites: bool, + cut_point_rules: Iterable[str], + terminal_rules: Iterable[str], + bug_report_id: str | None, llvm_definition_dir: Path | None = None, command: str | Iterable[str] | None = None, bug_report: BugReport | None = None, @@ -1125,50 +1118,35 @@ def __init__( haskell_log_format: KoreExecLogFormat = KoreExecLogFormat.ONELINE, haskell_log_entries: Iterable[str] = (), log_axioms_file: Path | None = None, - bug_report_id: str | None = None, - id: str | None = None, - trace_rewrites: bool = False, - kcfg_semantics: KCFGSemantics | None = None, ) -> None: + self.execute_depth = execute_depth self.cut_point_rules = cut_point_rules self.terminal_rules = terminal_rules - self.execute_depth = execute_depth - self.definition_dir = definition_dir - self.module_name = module_name self.kprint = kprint - self.llvm_definition_dir = llvm_definition_dir - self.command = command - self.bug_report = bug_report - self.smt_timeout = smt_timeout - self.smt_retry_limit = smt_retry_limit - self.smt_tactic = smt_tactic - self.haskell_log_format = haskell_log_format - self.haskell_log_entries = haskell_log_entries - self.log_axioms_file = log_axioms_file - self.bug_report_id = bug_report_id + self.kcfg_semantics = kcfg_semantics self.id = id self.trace_rewrites = trace_rewrites - self.kcfg_semantics = kcfg_semantics - - - def init(self) -> None: + self.bug_report = bug_report + self.bug_report_id = bug_report_id self.server = kore_server( - definition_dir=self.definition_dir, - llvm_definition_dir=self.llvm_definition_dir, - module_name=self.module_name, - command=self.command, - bug_report=self.bug_report, - smt_timeout=self.smt_timeout, - smt_retry_limit=self.smt_retry_limit, - smt_tactic=self.smt_tactic, - haskell_log_format=self.haskell_log_format, - haskell_log_entries=self.haskell_log_entries, - log_axioms_file=self.log_axioms_file, + definition_dir=definition_dir, + llvm_definition_dir=llvm_definition_dir, + module_name=module_name, + command=command, + bug_report=bug_report, + smt_timeout=smt_timeout, + smt_retry_limit=smt_retry_limit, + smt_tactic=smt_tactic, + haskell_log_format=haskell_log_format, + haskell_log_entries=haskell_log_entries, + log_axioms_file=log_axioms_file, ) self.client = KoreClient( - 'localhost', self.server.port, bug_report=self.bug_report, bug_report_id=self.bug_report_id + host='localhost', + port=self.server.port, + bug_report=self.bug_report, + bug_report_id=self.bug_report_id, ) - self.kcfg_explore = KCFGExplore( kprint=self.kprint, kore_client=self.client, @@ -1176,31 +1154,12 @@ def init(self) -> None: id=self.id, trace_rewrites=self.trace_rewrites, ) + self.prover = APRProver(proof=proof, kcfg_explore=self.kcfg_explore) - self.kcfg_explore.add_dependencies_module( - self.module_name, - self.module_name + '-DEPENDS-MODULE', - [], - priority=1, - ) - self.kcfg_explore.add_dependencies_module( - self.module_name, - self.module_name + '-CIRCULARITIES-MODULE', - [], - priority=1, - ) - - def cleanup(self) -> None: + def shutdown(self) -> None: self.client.close() self.server.close() - -class ParallelAPRProver(parallel.Prover[APRProof, APRProofResult, APRProofExtendData]): - prover: APRProver - - def __init__(self, prover: APRProver) -> None: - self.prover = prover - def steps(self, proof: APRProof) -> Iterable[APRProofStep]: """ Return a list of `ProofStep[U]` which represents all the computation jobs as defined by `ProofStep`, which have not yet been computed and committed, and are available given the current state of `proof`. Note that this is a requirement which is not enforced by the type system. @@ -1223,6 +1182,16 @@ def steps(self, proof: APRProof) -> Iterable[APRProofStep]: module_name=module_name, target_cterm=target_node.cterm, target_node_id=target_node.id, + port=self.server.port, + execute_depth=self.execute_depth, + terminal_rules=self.terminal_rules, + cut_point_rules=self.cut_point_rules, + kprint=self.kprint, + kcfg_semantics=self.kcfg_semantics, + id=self.id, + trace_rewrites=self.trace_rewrites, + bug_report=self.bug_report, + bug_report_id=self.bug_report_id, ) ) return steps @@ -1238,7 +1207,7 @@ def commit(self, proof: APRProof, update: APRProofResult) -> None: # Extend proof as per `update` if type(update) is APRProofExtendResult: node = proof.kcfg.node(update.node_id) - self.prover.kcfg_explore.extend_kcfg( + self.kcfg_explore.extend_kcfg( extend_result=update.extend_result, kcfg=proof.kcfg, node=node, logs=proof.logs ) elif type(update) is APRProofSubsumeResult: @@ -1253,31 +1222,58 @@ def commit(self, proof: APRProof, update: APRProofResult) -> None: @dataclass(frozen=True, eq=True) -class APRProofStep(parallel.ProofStep[APRProofResult, APRProofExtendData]): +class APRProofStep(parallel.ProofStep[APRProofResult]): cterm: CTerm node_id: int module_name: str target_cterm: CTerm target_node_id: int + port: int + execute_depth: int | None + cut_point_rules: Iterable[str] + terminal_rules: Iterable[str] + + bug_report: BugReport | None + bug_report_id: str | None + + kprint: KPrint + kcfg_semantics: KCFGSemantics | None + id: str | None + trace_rewrites: bool def __hash__(self) -> int: return hash((self.cterm, self.node_id)) - def exec(self, data: APRProofExtendData) -> APRProofResult: + def exec(self) -> APRProofResult: """ Should perform some nontrivial computation given by `self`, which can be done independently of other calls to `exec()`. Allowed to be nondeterministic. Able to be called on any `ProofStep` returned by `prover.steps(proof)`. """ - csubst = data.kcfg_explore.cterm_implies(self.cterm, self.target_cterm) - if csubst is not None: - return APRProofSubsumeResult(node_id=self.node_id, subsume_node_id=self.target_node_id, csubst=csubst) - - result = data.kcfg_explore.extend_cterm( - self.cterm, - module_name=self.module_name, - execute_depth=data.execute_depth, - terminal_rules=data.terminal_rules, - cut_point_rules=data.cut_point_rules, - ) - return APRProofExtendResult(result, self.node_id) + + with KoreClient( + host='localhost', + port=self.port, + bug_report=self.bug_report, + bug_report_id=self.bug_report_id, + ) as client: + kcfg_explore = KCFGExplore( + kprint=self.kprint, + kore_client=client, + kcfg_semantics=self.kcfg_semantics, + id=self.id, + trace_rewrites=self.trace_rewrites, + ) + + csubst = kcfg_explore.cterm_implies(self.cterm, self.target_cterm) + if csubst is not None: + return APRProofSubsumeResult(node_id=self.node_id, subsume_node_id=self.target_node_id, csubst=csubst) + + result = kcfg_explore.extend_cterm( + self.cterm, + module_name=self.module_name, + execute_depth=self.execute_depth, + terminal_rules=self.terminal_rules, + cut_point_rules=self.cut_point_rules, + ) + return APRProofExtendResult(result, self.node_id) diff --git a/src/tests/integration/proof/test_imp_parallel.py b/src/tests/integration/proof/test_imp_parallel.py index dbd958a1d..6bf162c31 100644 --- a/src/tests/integration/proof/test_imp_parallel.py +++ b/src/tests/integration/proof/test_imp_parallel.py @@ -7,7 +7,7 @@ from pyk.proof.parallel import prove_parallel from pyk.proof.proof import ProofStatus -from pyk.proof.reachability import APRProof, APRProofExtendData, APRProver, ParallelAPRProver +from pyk.proof.reachability import APRProof, ParallelAPRProver from pyk.testing import KCFGExploreTest, KPrintTest, KProveTest from pyk.utils import single @@ -39,7 +39,7 @@ def test_imp_parallel_prove( self, kcfg_explore: KCFGExplore, proof_dir: Path, kprove: KProve, kprint: KPrint ) -> None: # claim_id = 'addition-1' - claim_id = 'failing-if' + claim_id = 'fail-branch' spec_file = K_FILES / 'imp-simple-spec.k' spec_module = 'IMP-SIMPLE-SPEC' @@ -48,27 +48,26 @@ def test_imp_parallel_prove( ) proof = APRProof.from_claim(kprove.definition, claim, logs={}, proof_dir=proof_dir) - prover = APRProver( - proof, - kcfg_explore=kcfg_explore, - ) - process_data = APRProofExtendData( - cut_point_rules=[], - terminal_rules=[], - execute_depth=1000, - definition_dir=kprove.definition_dir, + parallel_prover = ParallelAPRProver( + proof=proof, module_name=kprove.main_module, + definition_dir=kprove.definition_dir, + execute_depth=1000, kprint=kprint, + kcfg_semantics=self.semantics(kprove.definition), + id=claim_id, + trace_rewrites=False, + cut_point_rules=(), + terminal_rules=(), + bug_report=None, + bug_report_id=None, ) - parallel_prover = ParallelAPRProver(prover=prover) - results = prove_parallel( proofs={'proof1': proof}, provers={'proof1': parallel_prover}, max_workers=2, - process_data=process_data, ) assert len(list(results)) == 1 diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index fbacd6c99..19db4d5b1 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -4,7 +4,6 @@ import time from dataclasses import dataclass -import pyk.proof.parallel as parallel from pyk.proof.parallel import Proof, ProofStep, Prover, prove_parallel from pyk.proof.proof import ProofStatus @@ -35,26 +34,18 @@ def status(self) -> ProofStatus: return ProofStatus.PENDING -class TreeExploreProofData(parallel.ProcessData): - def init(self) -> None: - ... - - def cleanup(self) -> None: - ... - - @dataclass(frozen=True) -class TreeExploreProofStep(ProofStep[int, TreeExploreProofData]): +class TreeExploreProofStep(ProofStep[int]): node: int - def exec(self, data: TreeExploreProofData) -> int: + def exec(self) -> int: print(f'exec {self.node}', file=sys.stderr) time.sleep(1) print(f'done {self.node}', file=sys.stderr) return self.node -class TreeExploreProver(Prover[TreeExploreProof, int, TreeExploreProofData]): +class TreeExploreProver(Prover[TreeExploreProof, int]): def __init__(self) -> None: return From a3db6eec48c8d077a07ecd87c60b86b8d198a606 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Thu, 16 Nov 2023 14:37:49 -0600 Subject: [PATCH 041/116] Parameterize imp parallel tests, add test to catch check_terminal order issue --- .../integration/proof/test_imp_parallel.py | 22 ++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/src/tests/integration/proof/test_imp_parallel.py b/src/tests/integration/proof/test_imp_parallel.py index 6bf162c31..db36ab212 100644 --- a/src/tests/integration/proof/test_imp_parallel.py +++ b/src/tests/integration/proof/test_imp_parallel.py @@ -23,6 +23,11 @@ from pyk.ktool.kprint import KPrint from pyk.ktool.kprove import KProve +PARALLEL_PROVE_TEST_DATA = ( + ('imp-simple-addition-1', 'addition-1', ProofStatus.PASSED), + ('imp-simple-sum-10', 'sum-10', ProofStatus.PASSED), +) + @pytest.fixture(scope='function') def proof_dir(tmp_path_factory: TempPathFactory) -> Path: @@ -35,11 +40,22 @@ class TestImpParallelProve(KCFGExploreTest, KProveTest, KPrintTest): def semantics(self, definition: KDefinition) -> KCFGSemantics: return ImpSemantics(definition) + @pytest.mark.parametrize( + 'test_id,claim_id,expected_status', + PARALLEL_PROVE_TEST_DATA, + ids=[test_id for test_id, *_ in PARALLEL_PROVE_TEST_DATA], + ) def test_imp_parallel_prove( - self, kcfg_explore: KCFGExplore, proof_dir: Path, kprove: KProve, kprint: KPrint + self, + test_id: str, + claim_id: str, + expected_status: ProofStatus, + kcfg_explore: KCFGExplore, + proof_dir: Path, + kprove: KProve, + kprint: KPrint, ) -> None: # claim_id = 'addition-1' - claim_id = 'fail-branch' spec_file = K_FILES / 'imp-simple-spec.k' spec_module = 'IMP-SIMPLE-SPEC' @@ -71,4 +87,4 @@ def test_imp_parallel_prove( ) assert len(list(results)) == 1 - assert list(results)[0].status == ProofStatus.FAILED + assert list(results)[0].status == expected_status From 5b86dfb7b3e846b17ba6e1f9fbd70bbb2c342a5e Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Thu, 16 Nov 2023 15:32:30 -0600 Subject: [PATCH 042/116] Fix check_terminal bug --- src/pyk/proof/reachability.py | 12 +++++++----- src/tests/integration/proof/test_imp_parallel.py | 8 ++++---- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index 034b1492e..6926aa0a5 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -1207,14 +1207,16 @@ def commit(self, proof: APRProof, update: APRProofResult) -> None: # Extend proof as per `update` if type(update) is APRProofExtendResult: node = proof.kcfg.node(update.node_id) - self.kcfg_explore.extend_kcfg( - extend_result=update.extend_result, kcfg=proof.kcfg, node=node, logs=proof.logs - ) + + if self.kcfg_explore.kcfg_semantics.is_terminal(node.cterm): + proof._terminal.add(node.id) + else: + self.kcfg_explore.extend_kcfg( + extend_result=update.extend_result, kcfg=proof.kcfg, node=node, logs=proof.logs + ) elif type(update) is APRProofSubsumeResult: proof.kcfg.create_cover(update.node_id, proof.target, csubst=update.csubst) - self.prover._check_all_terminals() - if proof.failed: self.prover.save_failure_info() diff --git a/src/tests/integration/proof/test_imp_parallel.py b/src/tests/integration/proof/test_imp_parallel.py index db36ab212..d5bd33f8f 100644 --- a/src/tests/integration/proof/test_imp_parallel.py +++ b/src/tests/integration/proof/test_imp_parallel.py @@ -24,8 +24,9 @@ from pyk.ktool.kprove import KProve PARALLEL_PROVE_TEST_DATA = ( - ('imp-simple-addition-1', 'addition-1', ProofStatus.PASSED), - ('imp-simple-sum-10', 'sum-10', ProofStatus.PASSED), + ('addition-1', ProofStatus.PASSED), + ('sum-10', ProofStatus.PASSED), + ('failing-if', ProofStatus.FAILED), ) @@ -41,13 +42,12 @@ def semantics(self, definition: KDefinition) -> KCFGSemantics: return ImpSemantics(definition) @pytest.mark.parametrize( - 'test_id,claim_id,expected_status', + 'claim_id,expected_status', PARALLEL_PROVE_TEST_DATA, ids=[test_id for test_id, *_ in PARALLEL_PROVE_TEST_DATA], ) def test_imp_parallel_prove( self, - test_id: str, claim_id: str, expected_status: ProofStatus, kcfg_explore: KCFGExplore, From 923a8d7cdd4a97a6ce6af35025615df16f6edd0d Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Thu, 16 Nov 2023 19:06:33 -0600 Subject: [PATCH 043/116] Remove parameters fromm prove_parallel and make shutdown() into destructor --- src/pyk/proof/parallel.py | 12 +----------- src/pyk/proof/reachability.py | 2 +- 2 files changed, 2 insertions(+), 12 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index 5a5c38128..b8ff6c077 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -99,11 +99,8 @@ def prove_parallel( proofs: Mapping[str, Proof], provers: Mapping[str, Prover], max_workers: int, - fail_fast: bool = False, - max_iterations: int | None = None, ) -> Iterable[Proof]: explored: set[tuple[str, ProofStep]] = set() - iterations: dict[str, int] = {} in_queue: Queue = Queue() out_queue: Queue = Queue() @@ -136,7 +133,6 @@ def submit(proof_id: str) -> None: for proof_id in proofs.keys(): submit(proof_id) - iterations[proof_id] = 0 while pending_jobs > 0: proof_id, update = out_queue.get() @@ -145,18 +141,12 @@ def submit(proof_id: str) -> None: proof = proofs[proof_id] prover = provers[proof_id] - if max_iterations is not None and iterations[proof_id] >= max_iterations: - continue - prover.commit(proof, update) # <-- update the proof (can be in-memory, access disk with locking, ...) - iterations[proof_id] += 1 - match proof.status: # terminate on first failure, yield partial results, etc. case ProofStatus.FAILED: - if fail_fast: - continue + ... case ProofStatus.PENDING: assert len(list(prover.steps(proof))) > 0 case ProofStatus.PASSED: diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index 6926aa0a5..72e7e6839 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -1156,7 +1156,7 @@ def __init__( ) self.prover = APRProver(proof=proof, kcfg_explore=self.kcfg_explore) - def shutdown(self) -> None: + def __del__(self) -> None: self.client.close() self.server.close() From a682b220a831a7d8813cf1dbc0e40f9ef837cc65 Mon Sep 17 00:00:00 2001 From: devops Date: Fri, 17 Nov 2023 01:12:46 +0000 Subject: [PATCH 044/116] Set Version: 0.1.505 --- package/version | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package/version b/package/version index 9605b83b4..67876d1e5 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.504 +0.1.505 diff --git a/pyproject.toml b/pyproject.toml index dade360b6..97e6c8406 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.504" +version = "0.1.505" description = "" authors = [ "Runtime Verification, Inc. ", From 1f46262eac5fc3a13f7c23e0fcac0cdfdee6aa34 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Thu, 16 Nov 2023 19:27:33 -0600 Subject: [PATCH 045/116] Add ParallelAPRBMCProver --- src/pyk/proof/reachability.py | 62 +++++++++++++++++++++++++++++++++++ 1 file changed, 62 insertions(+) diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index 72e7e6839..e6c920cb7 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -1223,6 +1223,68 @@ def commit(self, proof: APRProof, update: APRProofResult) -> None: proof.write_proof_data() +class ParallelAPRBMCProver(ParallelAPRProver): + def __init__( + self, + proof: APRBMCProof, + module_name: str, + definition_dir: str | Path, + execute_depth: int | None, + kprint: KPrint, + kcfg_semantics: KCFGSemantics | None, + id: str | None, + trace_rewrites: bool, + cut_point_rules: Iterable[str], + terminal_rules: Iterable[str], + bug_report_id: str | None, + llvm_definition_dir: Path | None = None, + command: str | Iterable[str] | None = None, + bug_report: BugReport | None = None, + smt_timeout: int | None = None, + smt_retry_limit: int | None = None, + smt_tactic: str | None = None, + haskell_log_format: KoreExecLogFormat = KoreExecLogFormat.ONELINE, + haskell_log_entries: Iterable[str] = (), + log_axioms_file: Path | None = None, + ) -> None: + self.execute_depth = execute_depth + self.cut_point_rules = cut_point_rules + self.terminal_rules = terminal_rules + self.kprint = kprint + self.kcfg_semantics = kcfg_semantics + self.id = id + self.trace_rewrites = trace_rewrites + self.bug_report = bug_report + self.bug_report_id = bug_report_id + self.server = kore_server( + definition_dir=definition_dir, + llvm_definition_dir=llvm_definition_dir, + module_name=module_name, + command=command, + bug_report=bug_report, + smt_timeout=smt_timeout, + smt_retry_limit=smt_retry_limit, + smt_tactic=smt_tactic, + haskell_log_format=haskell_log_format, + haskell_log_entries=haskell_log_entries, + log_axioms_file=log_axioms_file, + ) + self.client = KoreClient( + host='localhost', + port=self.server.port, + bug_report=self.bug_report, + bug_report_id=self.bug_report_id, + ) + self.kcfg_explore = KCFGExplore( + kprint=self.kprint, + kore_client=self.client, + kcfg_semantics=self.kcfg_semantics, + id=self.id, + trace_rewrites=self.trace_rewrites, + ) + self.prover = APRBMCProver(proof=proof, kcfg_explore=self.kcfg_explore) + + @dataclass(frozen=True, eq=True) class APRProofStep(parallel.ProofStep[APRProofResult]): cterm: CTerm From e029479a9e695b6c1b684151114e986e95f35a46 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Fri, 17 Nov 2023 13:18:55 -0600 Subject: [PATCH 046/116] Display profiling information --- src/pyk/proof/parallel.py | 41 ++++++++++++++++++++++++++++++++++++--- 1 file changed, 38 insertions(+), 3 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index bb5c1c866..ef3099b29 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -1,5 +1,6 @@ from __future__ import annotations +import time from abc import ABC, abstractmethod from concurrent.futures import CancelledError, ProcessPoolExecutor, wait from typing import TYPE_CHECKING, Any, Generic, TypeVar @@ -85,6 +86,13 @@ def exec(self) -> U: ... +def run_job(step: ProofStep) -> Any: + init_process_time = time.time_ns() + result = step.exec() + process_time = time.time_ns() - init_process_time + return (result, process_time) + + def prove_parallel( proofs: Mapping[str, Proof], provers: Mapping[str, Prover], @@ -93,14 +101,25 @@ def prove_parallel( pending: dict[Future[Any], str] = {} explored: set[tuple[str, ProofStep]] = set() + total_commit_time = 0 + total_steps_time = 0 + total_process_time = 0 + total_time = 0 + + total_init_time = time.time_ns() + def submit(proof_id: str, pool: Executor) -> None: proof = proofs[proof_id] prover = provers[proof_id] - for step in prover.steps(proof): # <-- get next steps (represented by e.g. pending nodes, ...) + steps_init_time = time.time_ns() + steps = prover.steps(proof) + nonlocal total_steps_time + total_steps_time += time.time_ns() - steps_init_time + for step in steps: # <-- get next steps (represented by e.g. pending nodes, ...) if (proof_id, step) in explored: continue explored.add((proof_id, step)) - future = pool.submit(step.exec) # <-- schedule steps for execution + future = pool.submit(run_job, step) # <-- schedule steps for execution pending[future] = proof_id with ProcessPoolExecutor(max_workers=max_workers) as pool: @@ -115,7 +134,7 @@ def submit(proof_id: str, pool: Executor) -> None: proof = proofs[proof_id] prover = provers[proof_id] try: - update = future.result() + update, process_time = future.result() except CancelledError as err: raise RuntimeError(f'Task was cancelled for proof {proof_id}') from err except TimeoutError as err: @@ -125,19 +144,35 @@ def submit(proof_id: str, pool: Executor) -> None: except Exception as err: raise RuntimeError('Exception was raised in ProofStep.exec() for proof {proof_id}.') from err + total_process_time += process_time + + commit_init_time = time.time_ns() prover.commit(proof, update) # <-- update the proof (can be in-memory, access disk with locking, ...) + total_commit_time += time.time_ns() - commit_init_time match proof.status: # terminate on first failure, yield partial results, etc. case ProofStatus.FAILED: ... case ProofStatus.PENDING: + steps_init_time = time.time_ns() if not list(prover.steps(proof)): raise ValueError('Prover violated expectation. status is pending with no further steps.') + total_steps_time += time.time_ns() - steps_init_time case ProofStatus.PASSED: + steps_init_time = time.time_ns() if list(prover.steps(proof)): raise ValueError('Prover violated expectation. status is passed with further steps.') + total_steps_time += time.time_ns() - steps_init_time submit(proof_id, pool) pending.pop(future) + + total_time = time.time_ns() - total_init_time + + print(f'total time: {total_time / 1000000000}') + print(f'steps time: {total_steps_time / 1000000000}') + print(f'commit time: {total_commit_time / 1000000000}') + print(f'process time: {total_process_time / 1000000000}') + return proofs.values() From 5b82535592e3539f2673ffdb2eaea9b608848322 Mon Sep 17 00:00:00 2001 From: devops Date: Fri, 17 Nov 2023 19:19:17 +0000 Subject: [PATCH 047/116] Set Version: 0.1.506 --- package/version | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package/version b/package/version index 67876d1e5..7b54ca6be 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.505 +0.1.506 diff --git a/pyproject.toml b/pyproject.toml index 97e6c8406..9def3d493 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.505" +version = "0.1.506" description = "" authors = [ "Runtime Verification, Inc. ", From b7cd7e18475b1a2851963b188acf18fb89f2146d Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Fri, 17 Nov 2023 20:08:52 -0600 Subject: [PATCH 048/116] Reduce subsumption checking --- src/pyk/proof/reachability.py | 39 +++++++++++++++++++++++++---------- 1 file changed, 28 insertions(+), 11 deletions(-) diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index e6c920cb7..5bd6a8c99 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -1076,7 +1076,7 @@ class APRProofExtendResult(APRProofResult): class APRProofSubsumeResult(APRProofResult): node_id: int subsume_node_id: int - csubst: CSubst + csubst: CSubst | None class ParallelAPRProver(parallel.Prover[APRProof, APRProofResult]): @@ -1155,6 +1155,7 @@ def __init__( trace_rewrites=self.trace_rewrites, ) self.prover = APRProver(proof=proof, kcfg_explore=self.kcfg_explore) + self.prover._check_all_terminals() def __del__(self) -> None: self.client.close() @@ -1175,6 +1176,7 @@ def steps(self, proof: APRProof) -> Iterable[APRProofStep]: if self.prover.nonzero_depth(pending_node) else self.prover.dependencies_module_name ) + steps.append( APRProofStep( cterm=pending_node.cterm, @@ -1192,6 +1194,8 @@ def steps(self, proof: APRProof) -> Iterable[APRProofStep]: trace_rewrites=self.trace_rewrites, bug_report=self.bug_report, bug_report_id=self.bug_report_id, + is_terminal=(self.kcfg_explore.kcfg_semantics.is_terminal(pending_node.cterm)), + target_is_terminal=(proof.target not in proof._terminal), ) ) return steps @@ -1204,18 +1208,28 @@ def commit(self, proof: APRProof, update: APRProofResult) -> None: Steps for a proof `proof` can have their results submitted any time after they are made available by `self.steps(proof)`, including in any order and multiple times, and the Prover must be able to handle this. """ + self.prover._check_all_terminals() + # Extend proof as per `update` if type(update) is APRProofExtendResult: node = proof.kcfg.node(update.node_id) - if self.kcfg_explore.kcfg_semantics.is_terminal(node.cterm): - proof._terminal.add(node.id) - else: - self.kcfg_explore.extend_kcfg( - extend_result=update.extend_result, kcfg=proof.kcfg, node=node, logs=proof.logs - ) +# if self.kcfg_explore.kcfg_semantics.is_terminal(node.cterm): +# proof._terminal.add(node.id) +# else: +# self.kcfg_explore.extend_kcfg( +# extend_result=update.extend_result, kcfg=proof.kcfg, node=node, logs=proof.logs +# ) + self.kcfg_explore.extend_kcfg( + extend_result=update.extend_result, kcfg=proof.kcfg, node=node, logs=proof.logs + ) elif type(update) is APRProofSubsumeResult: - proof.kcfg.create_cover(update.node_id, proof.target, csubst=update.csubst) + if update.csubst is None: + proof._terminal.add(update.node_id) + else: + proof.kcfg.create_cover(update.node_id, proof.target, csubst=update.csubst) + +# self.prover._check_all_terminals() if proof.failed: self.prover.save_failure_info() @@ -1296,6 +1310,8 @@ class APRProofStep(parallel.ProofStep[APRProofResult]): execute_depth: int | None cut_point_rules: Iterable[str] terminal_rules: Iterable[str] + is_terminal: bool + target_is_terminal: bool bug_report: BugReport | None bug_report_id: str | None @@ -1329,9 +1345,10 @@ def exec(self) -> APRProofResult: trace_rewrites=self.trace_rewrites, ) - csubst = kcfg_explore.cterm_implies(self.cterm, self.target_cterm) - if csubst is not None: - return APRProofSubsumeResult(node_id=self.node_id, subsume_node_id=self.target_node_id, csubst=csubst) + if self.is_terminal or self.target_is_terminal: + csubst = kcfg_explore.cterm_implies(self.cterm, self.target_cterm) + if csubst is not None or self.is_terminal: + return APRProofSubsumeResult(node_id=self.node_id, subsume_node_id=self.target_node_id, csubst=csubst) result = kcfg_explore.extend_cterm( self.cterm, From fcd2d14520ae62d37cdaa76283679b186c74e067 Mon Sep 17 00:00:00 2001 From: devops Date: Sat, 18 Nov 2023 02:09:14 +0000 Subject: [PATCH 049/116] Set Version: 0.1.507 --- package/version | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package/version b/package/version index 7b54ca6be..2efcdc1fd 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.506 +0.1.507 diff --git a/pyproject.toml b/pyproject.toml index 9def3d493..c819144c6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.506" +version = "0.1.507" description = "" authors = [ "Runtime Verification, Inc. ", From 426486dc7ff66b11ff7708c697267e4a0d606931 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Fri, 17 Nov 2023 20:12:56 -0600 Subject: [PATCH 050/116] Fix formatting --- src/pyk/proof/reachability.py | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index 5bd6a8c99..214a30a6c 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -1213,13 +1213,6 @@ def commit(self, proof: APRProof, update: APRProofResult) -> None: # Extend proof as per `update` if type(update) is APRProofExtendResult: node = proof.kcfg.node(update.node_id) - -# if self.kcfg_explore.kcfg_semantics.is_terminal(node.cterm): -# proof._terminal.add(node.id) -# else: -# self.kcfg_explore.extend_kcfg( -# extend_result=update.extend_result, kcfg=proof.kcfg, node=node, logs=proof.logs -# ) self.kcfg_explore.extend_kcfg( extend_result=update.extend_result, kcfg=proof.kcfg, node=node, logs=proof.logs ) @@ -1229,8 +1222,6 @@ def commit(self, proof: APRProof, update: APRProofResult) -> None: else: proof.kcfg.create_cover(update.node_id, proof.target, csubst=update.csubst) -# self.prover._check_all_terminals() - if proof.failed: self.prover.save_failure_info() @@ -1348,7 +1339,9 @@ def exec(self) -> APRProofResult: if self.is_terminal or self.target_is_terminal: csubst = kcfg_explore.cterm_implies(self.cterm, self.target_cterm) if csubst is not None or self.is_terminal: - return APRProofSubsumeResult(node_id=self.node_id, subsume_node_id=self.target_node_id, csubst=csubst) + return APRProofSubsumeResult( + node_id=self.node_id, subsume_node_id=self.target_node_id, csubst=csubst + ) result = kcfg_explore.extend_cterm( self.cterm, From e85a70c905aae9c97f022cb47306412c95a6e214 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Mon, 20 Nov 2023 13:35:16 -0600 Subject: [PATCH 051/116] Profile cterm_implies and extend_cterm in ParallelAPRProver --- src/pyk/proof/reachability.py | 32 +++++++++++++++++++++++++++++--- 1 file changed, 29 insertions(+), 3 deletions(-) diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index 214a30a6c..21ee64e3d 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -3,6 +3,7 @@ import graphlib import json import logging +import time from abc import ABC from dataclasses import dataclass from typing import TYPE_CHECKING @@ -1063,7 +1064,8 @@ def lines(self) -> list[str]: @dataclass(frozen=True) class APRProofResult(ABC): - ... + cterm_implies_time: int + extend_cterm_time: int @dataclass(frozen=True) @@ -1096,6 +1098,9 @@ class ParallelAPRProver(parallel.Prover[APRProof, APRProofResult]): bug_report: BugReport | None bug_report_id: str | None + total_cterm_implies_time: int + total_cterm_extend_time: int + def __init__( self, proof: APRProof, @@ -1128,6 +1133,8 @@ def __init__( self.trace_rewrites = trace_rewrites self.bug_report = bug_report self.bug_report_id = bug_report_id + self.total_cterm_extend_time = 0 + self.total_cterm_implies_time = 0 self.server = kore_server( definition_dir=definition_dir, llvm_definition_dir=llvm_definition_dir, @@ -1210,6 +1217,9 @@ def commit(self, proof: APRProof, update: APRProofResult) -> None: self.prover._check_all_terminals() + self.total_cterm_extend_time += update.extend_cterm_time + self.total_cterm_implies_time += update.cterm_implies_time + # Extend proof as per `update` if type(update) is APRProofExtendResult: node = proof.kcfg.node(update.node_id) @@ -1336,13 +1346,23 @@ def exec(self) -> APRProofResult: trace_rewrites=self.trace_rewrites, ) + cterm_implies_time = 0 + extend_cterm_time = 0 + if self.is_terminal or self.target_is_terminal: + init_cterm_implies_time = time.time_ns() csubst = kcfg_explore.cterm_implies(self.cterm, self.target_cterm) + cterm_implies_time = time.time_ns() - init_cterm_implies_time if csubst is not None or self.is_terminal: return APRProofSubsumeResult( - node_id=self.node_id, subsume_node_id=self.target_node_id, csubst=csubst + node_id=self.node_id, + subsume_node_id=self.target_node_id, + csubst=csubst, + cterm_implies_time=cterm_implies_time, + extend_cterm_time=extend_cterm_time, ) + init_extend_cterm_time = time.time_ns() result = kcfg_explore.extend_cterm( self.cterm, module_name=self.module_name, @@ -1350,4 +1370,10 @@ def exec(self) -> APRProofResult: terminal_rules=self.terminal_rules, cut_point_rules=self.cut_point_rules, ) - return APRProofExtendResult(result, self.node_id) + extend_cterm_time = init_extend_cterm_time - time.time_ns() + return APRProofExtendResult( + extend_result=result, + node_id=self.node_id, + cterm_implies_time=cterm_implies_time, + extend_cterm_time=extend_cterm_time, + ) From 5de5299ea94dcbac366f93d852dd0319a3c5f56e Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Mon, 20 Nov 2023 16:40:43 -0600 Subject: [PATCH 052/116] Use Process and Queue to keep data between workers on the same process --- src/pyk/proof/parallel.py | 137 ++++++++++-------- src/pyk/proof/reachability.py | 22 +-- .../integration/proof/test_imp_parallel.py | 5 +- .../integration/proof/test_parallel_prove.py | 21 ++- 4 files changed, 105 insertions(+), 80 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index ef3099b29..b66a9a488 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -2,21 +2,23 @@ import time from abc import ABC, abstractmethod -from concurrent.futures import CancelledError, ProcessPoolExecutor, wait -from typing import TYPE_CHECKING, Any, Generic, TypeVar +from multiprocessing import Process, Queue + +# from concurrent.futures import CancelledError, ProcessPoolExecutor, wait +from typing import TYPE_CHECKING, Generic, TypeVar from pyk.proof.proof import ProofStatus if TYPE_CHECKING: from collections.abc import Iterable, Mapping - from concurrent.futures import Executor, Future P = TypeVar('P', bound='Proof') U = TypeVar('U') +D = TypeVar('D', bound='ProcessData') -class Prover(ABC, Generic[P, U]): +class Prover(ABC, Generic[P, U, D]): """ Should contain all data needed to make progress on a `P` (proof). May be specific to a single `P` (proof) or may be able to handle multiple. @@ -29,7 +31,7 @@ class Prover(ABC, Generic[P, U]): """ @abstractmethod - def steps(self, proof: P) -> Iterable[ProofStep[U]]: + def steps(self, proof: P) -> Iterable[ProofStep[U, D]]: """ Return a list of `ProofStep[U]` which represents all the computation jobs as defined by `ProofStep`, which have not yet been computed and committed, and are available given the current state of `proof`. Note that this is a requirement which is not enforced by the type system. If `steps()` or `commit()` has been called on a proof `proof`, `steps()` may never again be called on `proof`. @@ -67,7 +69,11 @@ def status(self) -> ProofStatus: ... -class ProofStep(ABC, Generic[U]): +class ProcessData(ABC): + ... + + +class ProofStep(ABC, Generic[U, D]): """ Should be a description of a computation needed to make progress on a `Proof`. Must be hashable. @@ -77,7 +83,7 @@ class ProofStep(ABC, Generic[U]): """ @abstractmethod - def exec(self) -> U: + def exec(self, data: D) -> U: """ Should perform some nontrivial computation given by `self`, which can be done independently of other calls to `exec()`. Allowed to be nondeterministic. @@ -86,21 +92,19 @@ def exec(self) -> U: ... -def run_job(step: ProofStep) -> Any: - init_process_time = time.time_ns() - result = step.exec() - process_time = time.time_ns() - init_process_time - return (result, process_time) - - def prove_parallel( proofs: Mapping[str, Proof], provers: Mapping[str, Prover], max_workers: int, + process_data: ProcessData, ) -> Iterable[Proof]: - pending: dict[Future[Any], str] = {} explored: set[tuple[str, ProofStep]] = set() + in_queue: Queue = Queue() + out_queue: Queue = Queue() + + pending_jobs: int = 0 + total_commit_time = 0 total_steps_time = 0 total_process_time = 0 @@ -108,7 +112,16 @@ def prove_parallel( total_init_time = time.time_ns() - def submit(proof_id: str, pool: Executor) -> None: + def run_process(data: ProcessData) -> None: + while True: + dequeued = in_queue.get() + if dequeued == 0: + break + proof_id, proof_step = dequeued + update = proof_step.exec(data) + out_queue.put((proof_id, update)) + + def submit(proof_id: str) -> None: proof = proofs[proof_id] prover = provers[proof_id] steps_init_time = time.time_ns() @@ -119,54 +132,50 @@ def submit(proof_id: str, pool: Executor) -> None: if (proof_id, step) in explored: continue explored.add((proof_id, step)) - future = pool.submit(run_job, step) # <-- schedule steps for execution - pending[future] = proof_id - - with ProcessPoolExecutor(max_workers=max_workers) as pool: - for proof_id in proofs: - submit(proof_id, pool) - - while pending: - done, _ = wait(pending, return_when='FIRST_COMPLETED') - future = done.pop() - - proof_id = pending[future] - proof = proofs[proof_id] - prover = provers[proof_id] - try: - update, process_time = future.result() - except CancelledError as err: - raise RuntimeError(f'Task was cancelled for proof {proof_id}') from err - except TimeoutError as err: - raise RuntimeError( - f"Future for proof {proof_id} was not finished executing and timed out. This shouldn't happen since this future was already waited on." - ) from err - except Exception as err: - raise RuntimeError('Exception was raised in ProofStep.exec() for proof {proof_id}.') from err - - total_process_time += process_time - - commit_init_time = time.time_ns() - prover.commit(proof, update) # <-- update the proof (can be in-memory, access disk with locking, ...) - total_commit_time += time.time_ns() - commit_init_time - - match proof.status: - # terminate on first failure, yield partial results, etc. - case ProofStatus.FAILED: - ... - case ProofStatus.PENDING: - steps_init_time = time.time_ns() - if not list(prover.steps(proof)): - raise ValueError('Prover violated expectation. status is pending with no further steps.') - total_steps_time += time.time_ns() - steps_init_time - case ProofStatus.PASSED: - steps_init_time = time.time_ns() - if list(prover.steps(proof)): - raise ValueError('Prover violated expectation. status is passed with further steps.') - total_steps_time += time.time_ns() - steps_init_time - - submit(proof_id, pool) - pending.pop(future) + in_queue.put((proof_id, step)) + nonlocal pending_jobs + pending_jobs += 1 + + processes = [Process(target=run_process, args=(process_data,)) for _ in range(max_workers)] + for process in processes: + process.start() + + for proof_id in proofs.keys(): + submit(proof_id) + + while pending_jobs > 0: + proof_id, update = out_queue.get() + pending_jobs -= 1 + + proof = proofs[proof_id] + prover = provers[proof_id] + + commit_init_time = time.time_ns() + prover.commit(proof, update) # <-- update the proof (can be in-memory, access disk with locking, ...) + total_commit_time += time.time_ns() - commit_init_time + + match proof.status: + # terminate on first failure, yield partial results, etc. + case ProofStatus.FAILED: + ... + case ProofStatus.PENDING: + steps_init_time = time.time_ns() + if not list(prover.steps(proof)): + raise ValueError('Prover violated expectation. status is pending with no further steps.') + total_steps_time += time.time_ns() - steps_init_time + case ProofStatus.PASSED: + steps_init_time = time.time_ns() + if list(prover.steps(proof)): + raise ValueError('Prover violated expectation. status is passed with further steps.') + total_steps_time += time.time_ns() - steps_init_time + + submit(proof_id) + + for _ in range(max_workers): + in_queue.put(0) + + for process in processes: + process.join() total_time = time.time_ns() - total_init_time diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index 21ee64e3d..7340fe739 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -1081,7 +1081,13 @@ class APRProofSubsumeResult(APRProofResult): csubst: CSubst | None -class ParallelAPRProver(parallel.Prover[APRProof, APRProofResult]): +@dataclass(frozen=True) +class APRProofProcessData(parallel.ProcessData): + kprint: KPrint + kcfg_semantics: KCFGSemantics | None + + +class ParallelAPRProver(parallel.Prover[APRProof, APRProofResult, APRProofProcessData]): prover: APRProver server: KoreServer kcfg_explore: KCFGExplore @@ -1195,8 +1201,8 @@ def steps(self, proof: APRProof) -> Iterable[APRProofStep]: execute_depth=self.execute_depth, terminal_rules=self.terminal_rules, cut_point_rules=self.cut_point_rules, - kprint=self.kprint, - kcfg_semantics=self.kcfg_semantics, + # kprint=self.kprint, + # kcfg_semantics=self.kcfg_semantics, id=self.id, trace_rewrites=self.trace_rewrites, bug_report=self.bug_report, @@ -1301,7 +1307,7 @@ def __init__( @dataclass(frozen=True, eq=True) -class APRProofStep(parallel.ProofStep[APRProofResult]): +class APRProofStep(parallel.ProofStep[APRProofResult, APRProofProcessData]): cterm: CTerm node_id: int module_name: str @@ -1317,15 +1323,13 @@ class APRProofStep(parallel.ProofStep[APRProofResult]): bug_report: BugReport | None bug_report_id: str | None - kprint: KPrint - kcfg_semantics: KCFGSemantics | None id: str | None trace_rewrites: bool def __hash__(self) -> int: return hash((self.cterm, self.node_id)) - def exec(self) -> APRProofResult: + def exec(self, data: APRProofProcessData) -> APRProofResult: """ Should perform some nontrivial computation given by `self`, which can be done independently of other calls to `exec()`. Allowed to be nondeterministic. @@ -1339,9 +1343,9 @@ def exec(self) -> APRProofResult: bug_report_id=self.bug_report_id, ) as client: kcfg_explore = KCFGExplore( - kprint=self.kprint, + kprint=data.kprint, kore_client=client, - kcfg_semantics=self.kcfg_semantics, + kcfg_semantics=data.kcfg_semantics, id=self.id, trace_rewrites=self.trace_rewrites, ) diff --git a/src/tests/integration/proof/test_imp_parallel.py b/src/tests/integration/proof/test_imp_parallel.py index d5bd33f8f..3b20c643b 100644 --- a/src/tests/integration/proof/test_imp_parallel.py +++ b/src/tests/integration/proof/test_imp_parallel.py @@ -7,7 +7,7 @@ from pyk.proof.parallel import prove_parallel from pyk.proof.proof import ProofStatus -from pyk.proof.reachability import APRProof, ParallelAPRProver +from pyk.proof.reachability import APRProof, APRProofProcessData, ParallelAPRProver from pyk.testing import KCFGExploreTest, KPrintTest, KProveTest from pyk.utils import single @@ -63,6 +63,8 @@ def test_imp_parallel_prove( kprove.get_claims(Path(spec_file), spec_module_name=spec_module, claim_labels=[f'{spec_module}.{claim_id}']) ) + process_data = APRProofProcessData(kcfg_semantics=self.semantics(kprove.definition), kprint=kprint) + proof = APRProof.from_claim(kprove.definition, claim, logs={}, proof_dir=proof_dir) parallel_prover = ParallelAPRProver( @@ -84,6 +86,7 @@ def test_imp_parallel_prove( proofs={'proof1': proof}, provers={'proof1': parallel_prover}, max_workers=2, + process_data=process_data, ) assert len(list(results)) == 1 diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index 0dfcbe13c..749a8284a 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -4,7 +4,7 @@ import time from dataclasses import dataclass -from pyk.proof.parallel import Proof, ProofStep, Prover, prove_parallel +from pyk.proof.parallel import ProcessData, Proof, ProofStep, Prover, prove_parallel from pyk.proof.proof import ProofStatus @@ -34,18 +34,22 @@ def status(self) -> ProofStatus: return ProofStatus.PENDING +class TreeExploreProofProcessData(ProcessData): + ... + + @dataclass(frozen=True) -class TreeExploreProofStep(ProofStep[int]): +class TreeExploreProofStep(ProofStep[int, TreeExploreProofProcessData]): node: int - def exec(self) -> int: + def exec(self, data: TreeExploreProofProcessData) -> int: print(f'exec {self.node}', file=sys.stderr) time.sleep(1) print(f'done {self.node}', file=sys.stderr) return self.node -class TreeExploreProver(Prover[TreeExploreProof, int]): +class TreeExploreProver(Prover[TreeExploreProof, int, TreeExploreProofProcessData]): def __init__(self) -> None: return @@ -96,7 +100,9 @@ def commit(self, proof: TreeExploreProof, update: int) -> None: def test_parallel_prove() -> None: prover = TreeExploreProver() proof = TreeExploreProof(0, 9, SIMPLE_TREE, set()) - results = prove_parallel({'proof1': proof}, {'proof1': prover}, max_workers=2) + results = prove_parallel( + {'proof1': proof}, {'proof1': prover}, max_workers=2, process_data=TreeExploreProofProcessData() + ) assert len(list(results)) == 1 assert len(list(prover.steps(proof))) == 0 assert list(results)[0].status == ProofStatus.PASSED @@ -105,7 +111,9 @@ def test_parallel_prove() -> None: def test_parallel_fail() -> None: prover = TreeExploreProver() proof = TreeExploreProof(0, 9, SIMPLE_TREE, {6}) - results = prove_parallel({'proof1': proof}, {'proof1': prover}, max_workers=2) + results = prove_parallel( + {'proof1': proof}, {'proof1': prover}, max_workers=2, process_data=TreeExploreProofProcessData() + ) assert len(list(results)) == 1 assert len(list(prover.steps(proof))) == 0 assert list(results)[0].status == ProofStatus.FAILED @@ -119,6 +127,7 @@ def test_parallel_multiple_proofs() -> None: proofs, provers_map, max_workers=4, + process_data=TreeExploreProofProcessData(), ) assert len(list(results)) == 3 for proof in proofs.values(): From caec0713e1e639dddfd19b7529792bf76df1f26b Mon Sep 17 00:00:00 2001 From: devops Date: Mon, 20 Nov 2023 22:41:00 +0000 Subject: [PATCH 053/116] Set Version: 0.1.508 --- package/version | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package/version b/package/version index 2efcdc1fd..e2f99c774 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.507 +0.1.508 diff --git a/pyproject.toml b/pyproject.toml index c819144c6..4ebe9c060 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.507" +version = "0.1.508" description = "" authors = [ "Runtime Verification, Inc. ", From 53431a59bd6d7df98ce00da21ad992fb3150850e Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Mon, 20 Nov 2023 16:52:57 -0600 Subject: [PATCH 054/116] Fix time calculation --- src/pyk/proof/reachability.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index 7340fe739..0889f5b9e 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -1374,7 +1374,7 @@ def exec(self, data: APRProofProcessData) -> APRProofResult: terminal_rules=self.terminal_rules, cut_point_rules=self.cut_point_rules, ) - extend_cterm_time = init_extend_cterm_time - time.time_ns() + extend_cterm_time = time.time_ns() - init_extend_cterm_time return APRProofExtendResult( extend_result=result, node_id=self.node_id, From b86f9d8de29a57460c9b802429409633f98eac71 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Mon, 20 Nov 2023 20:25:39 -0600 Subject: [PATCH 055/116] Dont consider terminal nodes failling --- src/pyk/proof/reachability.py | 4 ++-- src/tests/integration/proof/test_imp_parallel.py | 7 ++++--- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index 0889f5b9e..1e93409c3 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -122,10 +122,10 @@ def is_target(self, node_id: NodeIdLike) -> bool: def is_failing(self, node_id: NodeIdLike) -> bool: return ( self.kcfg.is_leaf(node_id) - and not self.is_explorable(node_id) + and not self.kcfg.is_vacuous(node_id) + and (self.is_terminal(node_id) or self.kcfg.is_stuck(node_id)) and not self.is_target(node_id) and not self.is_refuted(node_id) - and not self.kcfg.is_vacuous(node_id) ) def shortest_path_to(self, node_id: NodeIdLike) -> tuple[KCFG.Successor, ...]: diff --git a/src/tests/integration/proof/test_imp_parallel.py b/src/tests/integration/proof/test_imp_parallel.py index 3b20c643b..38cf6d8b1 100644 --- a/src/tests/integration/proof/test_imp_parallel.py +++ b/src/tests/integration/proof/test_imp_parallel.py @@ -63,17 +63,16 @@ def test_imp_parallel_prove( kprove.get_claims(Path(spec_file), spec_module_name=spec_module, claim_labels=[f'{spec_module}.{claim_id}']) ) - process_data = APRProofProcessData(kcfg_semantics=self.semantics(kprove.definition), kprint=kprint) - proof = APRProof.from_claim(kprove.definition, claim, logs={}, proof_dir=proof_dir) + semantics = self.semantics(kprove.definition) parallel_prover = ParallelAPRProver( proof=proof, module_name=kprove.main_module, definition_dir=kprove.definition_dir, execute_depth=1000, kprint=kprint, - kcfg_semantics=self.semantics(kprove.definition), + kcfg_semantics=semantics, id=claim_id, trace_rewrites=False, cut_point_rules=(), @@ -82,6 +81,8 @@ def test_imp_parallel_prove( bug_report_id=None, ) + process_data = APRProofProcessData(kprint=kprint, kcfg_semantics=semantics) + results = prove_parallel( proofs={'proof1': proof}, provers={'proof1': parallel_prover}, From d9bf236d93f755720c7e0f8856000399409bcf5e Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Mon, 20 Nov 2023 21:31:34 -0600 Subject: [PATCH 056/116] make node failing different from node terminal. a node is failing once it has been checked for subsumption and failed --- src/pyk/proof/reachability.py | 22 ++++++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index 1e93409c3..2f5d5ddb9 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -56,6 +56,7 @@ class APRProof(Proof, KCFGExploration, parallel.Proof): logs: dict[int, tuple[LogEntry, ...]] circularity: bool failure_info: APRFailureInfo | None + checked_for_subsumption: set[int] def __init__( self, @@ -65,6 +66,7 @@ def __init__( init: NodeIdLike, target: NodeIdLike, logs: dict[int, tuple[LogEntry, ...]], + checked_for_subsumption: set[int] | None = None, proof_dir: Path | None = None, node_refutations: dict[int, str] | None = None, subproof_ids: Iterable[str] = (), @@ -74,6 +76,7 @@ def __init__( Proof.__init__(self, id, proof_dir=proof_dir, subproof_ids=subproof_ids, admitted=admitted) KCFGExploration.__init__(self, kcfg, terminal) + self.checked_for_subsumption = checked_for_subsumption if checked_for_subsumption is not None else set() self.failure_info = None self.init = kcfg._resolve(init) self.target = kcfg._resolve(target) @@ -122,10 +125,10 @@ def is_target(self, node_id: NodeIdLike) -> bool: def is_failing(self, node_id: NodeIdLike) -> bool: return ( self.kcfg.is_leaf(node_id) - and not self.kcfg.is_vacuous(node_id) - and (self.is_terminal(node_id) or self.kcfg.is_stuck(node_id)) + and ((node_id in self.checked_for_subsumption) or self.kcfg.is_stuck(node_id)) and not self.is_target(node_id) and not self.is_refuted(node_id) + and not self.kcfg.is_vacuous(node_id) ) def shortest_path_to(self, node_id: NodeIdLike) -> tuple[KCFG.Successor, ...]: @@ -168,6 +171,7 @@ def from_dict(cls: type[APRProof], dct: Mapping[str, Any], proof_dir: Path | Non admitted = dct.get('admitted', False) subproof_ids = dct['subproof_ids'] if 'subproof_ids' in dct else [] node_refutations: dict[int, str] = {} + checked_for_subsumption = dct['_checked_for_subsumption'] if 'node_refutation' in dct: node_refutations = {kcfg._resolve(node_id): proof_id for (node_id, proof_id) in dct['node_refutations']} if 'logs' in dct: @@ -187,6 +191,7 @@ def from_dict(cls: type[APRProof], dct: Mapping[str, Any], proof_dir: Path | Non proof_dir=proof_dir, subproof_ids=subproof_ids, node_refutations=node_refutations, + checked_for_subsumption=checked_for_subsumption, ) @staticmethod @@ -324,6 +329,7 @@ def dict(self) -> dict[str, Any]: dct['target'] = self.target dct['node_refutations'] = {node_id: proof.id for (node_id, proof) in self.node_refutations.items()} dct['circularity'] = self.circularity + dct['checked_for_subsumption'] = list(self.checked_for_subsumption) logs = {int(k): [l.to_dict() for l in ls] for k, ls in self.logs.items()} dct['logs'] = logs return dct @@ -367,6 +373,7 @@ def read_proof_data(proof_dir: Path, id: str) -> APRProof: terminal = proof_dict['terminal'] logs = {int(k): tuple(LogEntry.from_dict(l) for l in ls) for k, ls in proof_dict['logs'].items()} subproof_ids = proof_dict['subproof_ids'] + checked_for_subsumption = proof_dict['checked_for_subsumption'] node_refutations = {kcfg._resolve(node_id): proof_id for (node_id, proof_id) in proof_dict['node_refutations']} return APRProof( @@ -381,6 +388,7 @@ def read_proof_data(proof_dir: Path, id: str) -> APRProof: proof_dir=proof_dir, subproof_ids=subproof_ids, node_refutations=node_refutations, + checked_for_subsumption=checked_for_subsumption, ) def write_proof_data(self) -> None: @@ -403,6 +411,7 @@ def write_proof_data(self) -> None: self.kcfg._resolve(node_id): proof.id for (node_id, proof) in self.node_refutations.items() } dct['circularity'] = self.circularity + dct['checked_for_subsumption'] = list(self.checked_for_subsumption) logs = {int(k): [l.to_dict() for l in ls] for k, ls in self.logs.items()} dct['logs'] = logs @@ -429,6 +438,7 @@ def __init__( bounded: Iterable[int] | None = None, proof_dir: Path | None = None, subproof_ids: Iterable[str] = (), + checked_for_subsumption: set[int] | None = None, node_refutations: dict[int, str] | None = None, circularity: bool = False, admitted: bool = False, @@ -445,6 +455,7 @@ def __init__( node_refutations=node_refutations, circularity=circularity, admitted=admitted, + checked_for_subsumption=checked_for_subsumption, ) self.bmc_depth = bmc_depth self._bounded = set(bounded) if bounded is not None else set() @@ -466,6 +477,7 @@ def read_proof_data(proof_dir: Path, id: str) -> APRBMCProof: node_refutations = {kcfg._resolve(node_id): proof_id for (node_id, proof_id) in proof_dict['node_refutations']} bounded = proof_dict['bounded'] bmc_depth = int(proof_dict['bmc_depth']) + checked_for_subsumption = {kcfg._resolve(node_id) for node_id in proof_dict['checked_for_subsumption']} return APRBMCProof( id=id, @@ -481,6 +493,7 @@ def read_proof_data(proof_dir: Path, id: str) -> APRBMCProof: proof_dir=proof_dir, subproof_ids=subproof_ids, node_refutations=node_refutations, + checked_for_subsumption=checked_for_subsumption, ) def write_proof_data(self) -> None: @@ -507,6 +520,7 @@ def write_proof_data(self) -> None: dct['terminal'] = sorted(self._terminal) dct['bounded'] = sorted(self._bounded) dct['bmc_depth'] = self.bmc_depth + dct['checked_for_subsumption'] = list(self.checked_for_subsumption) proof_json.write_text(json.dumps(dct)) _LOGGER.info(f'Wrote proof data for {self.id}: {proof_json}') @@ -551,6 +565,7 @@ def from_dict(cls: type[APRBMCProof], dct: Mapping[str, Any], proof_dir: Path | logs = {int(k): tuple(LogEntry.from_dict(l) for l in ls) for k, ls in dct['logs'].items()} else: logs = {} + checked_for_subsumption = {kcfg._resolve(node_id) for node_id in dct['checked_for_subsumption']} return APRBMCProof( id, @@ -566,6 +581,7 @@ def from_dict(cls: type[APRBMCProof], dct: Mapping[str, Any], proof_dir: Path | subproof_ids=subproof_ids, node_refutations=node_refutations, admitted=admitted, + checked_for_subsumption=checked_for_subsumption, ) @property @@ -701,6 +717,7 @@ def _check_subsume(self, node: KCFG.Node) -> bool: f'Checking subsumption into target state {self.proof.id}: {shorten_hashes((node.id, self.proof.target))}' ) csubst = self.kcfg_explore.cterm_implies(node.cterm, self.proof.kcfg.node(self.proof.target).cterm) + self.proof.checked_for_subsumption.add(node.id) if csubst is not None: self.proof.kcfg.create_cover(node.id, self.proof.target, csubst=csubst) _LOGGER.info(f'Subsumed into target node {self.proof.id}: {shorten_hashes((node.id, self.proof.target))}') @@ -1233,6 +1250,7 @@ def commit(self, proof: APRProof, update: APRProofResult) -> None: extend_result=update.extend_result, kcfg=proof.kcfg, node=node, logs=proof.logs ) elif type(update) is APRProofSubsumeResult: + proof.checked_for_subsumption.add(update.node_id) if update.csubst is None: proof._terminal.add(update.node_id) else: From 9da8fa904cd628371a2c3311484603358eda6407 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Tue, 21 Nov 2023 14:16:50 -0600 Subject: [PATCH 057/116] Use 1 server per prover per thread --- src/pyk/proof/reachability.py | 95 +++++++++++++++++-- .../integration/proof/test_imp_parallel.py | 7 +- 2 files changed, 95 insertions(+), 7 deletions(-) diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index 2f5d5ddb9..f9412a24f 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -1098,11 +1098,52 @@ class APRProofSubsumeResult(APRProofResult): csubst: CSubst | None -@dataclass(frozen=True) class APRProofProcessData(parallel.ProcessData): kprint: KPrint kcfg_semantics: KCFGSemantics | None + definition_dir: str | Path + llvm_definition_dir: Path | None + module_name: str + command: str | Iterable[str] | None + smt_timeout: int | None + smt_retry_limit: int | None + smt_tactic: str | None + haskell_log_format: KoreExecLogFormat + haskell_log_entries: Iterable[str] + log_axioms_file: Path | None + + kore_servers: dict[str, KoreServer] + + def __init__( + self, + kprint: KPrint, + kcfg_semantics: KCFGSemantics, + definition_dir: str | Path, + module_name: str, + llvm_definition_dir: Path | None = None, + command: str | Iterable[str] | None = None, + smt_timeout: int | None = None, + smt_retry_limit: int | None = None, + smt_tactic: str | None = None, + haskell_log_format: KoreExecLogFormat = KoreExecLogFormat.ONELINE, + haskell_log_entries: Iterable[str] = (), + log_axioms_file: Path | None = None, + ) -> None: + self.kprint = kprint + self.kcfg_semantics = kcfg_semantics + self.kore_servers = {} + self.definition_dir = definition_dir + self.llvm_definition_dir = llvm_definition_dir + self.module_name = module_name + self.command = command + self.smt_timeout = smt_timeout + self.smt_retry_limit = smt_retry_limit + self.smt_tactic = smt_tactic + self.haskell_log_format = haskell_log_format + self.haskell_log_entries = haskell_log_entries + self.log_axioms_file = log_axioms_file + class ParallelAPRProver(parallel.Prover[APRProof, APRProofResult, APRProofProcessData]): prover: APRProver @@ -1209,23 +1250,23 @@ def steps(self, proof: APRProof) -> Iterable[APRProofStep]: steps.append( APRProofStep( + proof_id=proof.id, cterm=pending_node.cterm, node_id=pending_node.id, module_name=module_name, target_cterm=target_node.cterm, target_node_id=target_node.id, - port=self.server.port, + # port=self.server.port, execute_depth=self.execute_depth, terminal_rules=self.terminal_rules, cut_point_rules=self.cut_point_rules, - # kprint=self.kprint, - # kcfg_semantics=self.kcfg_semantics, id=self.id, trace_rewrites=self.trace_rewrites, bug_report=self.bug_report, bug_report_id=self.bug_report_id, is_terminal=(self.kcfg_explore.kcfg_semantics.is_terminal(pending_node.cterm)), target_is_terminal=(proof.target not in proof._terminal), + main_module_name=self.prover.main_module_name, ) ) return steps @@ -1326,18 +1367,21 @@ def __init__( @dataclass(frozen=True, eq=True) class APRProofStep(parallel.ProofStep[APRProofResult, APRProofProcessData]): + proof_id: str cterm: CTerm node_id: int module_name: str target_cterm: CTerm target_node_id: int - port: int + # port: int execute_depth: int | None cut_point_rules: Iterable[str] terminal_rules: Iterable[str] is_terminal: bool target_is_terminal: bool + main_module_name: str + bug_report: BugReport | None bug_report_id: str | None @@ -1354,9 +1398,28 @@ def exec(self, data: APRProofProcessData) -> APRProofResult: Able to be called on any `ProofStep` returned by `prover.steps(proof)`. """ + init_kcfg_explore = False + + if data.kore_servers.get(self.proof_id) is None: + init_kcfg_explore = True + data.kore_servers[self.proof_id] = kore_server( + definition_dir=data.definition_dir, + llvm_definition_dir=data.llvm_definition_dir, + module_name=data.module_name, + command=data.command, + bug_report=self.bug_report, + smt_timeout=data.smt_timeout, + smt_retry_limit=data.smt_retry_limit, + smt_tactic=data.smt_tactic, + haskell_log_format=data.haskell_log_format, + haskell_log_entries=data.haskell_log_entries, + log_axioms_file=data.log_axioms_file, + ) + server = data.kore_servers[self.proof_id] + with KoreClient( host='localhost', - port=self.port, + port=server.port, bug_report=self.bug_report, bug_report_id=self.bug_report_id, ) as client: @@ -1368,6 +1431,26 @@ def exec(self, data: APRProofProcessData) -> APRProofResult: trace_rewrites=self.trace_rewrites, ) + if init_kcfg_explore: + # dependencies_as_claims: list[KClaim] = [d.as_claim(self.kcfg_explore.kprint) for d in apr_subproofs] + + dependencies_module_name = self.main_module_name + '-DEPENDS-MODULE' + kcfg_explore.add_dependencies_module( + self.main_module_name, + dependencies_module_name, + # dependencies_as_claims, + [], + priority=1, + ) + circularities_module_name = self.main_module_name + '-CIRCULARITIES-MODULE' + kcfg_explore.add_dependencies_module( + self.main_module_name, + circularities_module_name, + # dependencies_as_claims + ([proof.as_claim(self.kcfg_explore.kprint)] if proof.circularity else []), + [], + priority=1, + ) + cterm_implies_time = 0 extend_cterm_time = 0 diff --git a/src/tests/integration/proof/test_imp_parallel.py b/src/tests/integration/proof/test_imp_parallel.py index 38cf6d8b1..b9f0461be 100644 --- a/src/tests/integration/proof/test_imp_parallel.py +++ b/src/tests/integration/proof/test_imp_parallel.py @@ -81,7 +81,12 @@ def test_imp_parallel_prove( bug_report_id=None, ) - process_data = APRProofProcessData(kprint=kprint, kcfg_semantics=semantics) + process_data = APRProofProcessData( + kprint=kprint, + kcfg_semantics=semantics, + definition_dir=kprove.definition_dir, + module_name=kprove.main_module, + ) results = prove_parallel( proofs={'proof1': proof}, From 5c105ca661cb18e9da3d33c10f05a82dff4fcf40 Mon Sep 17 00:00:00 2001 From: devops Date: Wed, 22 Nov 2023 21:05:26 +0000 Subject: [PATCH 058/116] Set Version: 0.1.511 --- package/version | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package/version b/package/version index 0e630fe4e..758ace539 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.510 +0.1.511 diff --git a/pyproject.toml b/pyproject.toml index 822460cbb..0b017d7d3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.510" +version = "0.1.511" description = "" authors = [ "Runtime Verification, Inc. ", From 81958c5f27948f2f3d6b0bacf02227e8048bd28a Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Wed, 22 Nov 2023 15:49:54 -0600 Subject: [PATCH 059/116] Add cleanup method to ProcessData --- src/pyk/proof/parallel.py | 5 ++++- src/pyk/proof/reachability.py | 4 ++++ src/tests/integration/proof/test_parallel_prove.py | 3 ++- 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index b66a9a488..603400d4e 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -70,7 +70,9 @@ def status(self) -> ProofStatus: class ProcessData(ABC): - ... + @abstractmethod + def cleanup(self) -> None: + ... class ProofStep(ABC, Generic[U, D]): @@ -120,6 +122,7 @@ def run_process(data: ProcessData) -> None: proof_id, proof_step = dequeued update = proof_step.exec(data) out_queue.put((proof_id, update)) + data.cleanup() def submit(proof_id: str) -> None: proof = proofs[proof_id] diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index f9412a24f..e14869e31 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -1144,6 +1144,10 @@ def __init__( self.haskell_log_entries = haskell_log_entries self.log_axioms_file = log_axioms_file + def cleanup(self) -> None: + for server in self.kore_servers.values(): + server.close() + class ParallelAPRProver(parallel.Prover[APRProof, APRProofResult, APRProofProcessData]): prover: APRProver diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index 749a8284a..c4d011372 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -35,7 +35,8 @@ def status(self) -> ProofStatus: class TreeExploreProofProcessData(ProcessData): - ... + def cleanup(self) -> None: + pass @dataclass(frozen=True) From 04bfed1f4dbe0df52cb5d3169a0fccac27665603 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Wed, 22 Nov 2023 16:21:53 -0600 Subject: [PATCH 060/116] Add checked attribute to printer --- src/pyk/proof/show.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/pyk/proof/show.py b/src/pyk/proof/show.py index 20747610e..ca7d7a769 100644 --- a/src/pyk/proof/show.py +++ b/src/pyk/proof/show.py @@ -40,6 +40,8 @@ def node_attrs(self, kcfg: KCFG, node: KCFG.Node) -> list[str]: attrs.append('terminal') if 'stuck' in attrs: attrs.remove('stuck') + if node.id in self.proof.checked_for_subsumption: + attrs.append('checked') return attrs From 83b87c636d802afe31b7bb176fe28ea0d0d6c4cf Mon Sep 17 00:00:00 2001 From: devops Date: Sun, 3 Dec 2023 13:02:04 +0000 Subject: [PATCH 061/116] Set Version: 0.1.512 --- package/version | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package/version b/package/version index 43efcec7e..b763bfd13 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.526 +0.1.512 diff --git a/pyproject.toml b/pyproject.toml index bf3f10359..75b6ce35a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.526" +version = "0.1.512" description = "" authors = [ "Runtime Verification, Inc. ", From 4354d195a9be576339e3fb2d43ed76b07fb9700a Mon Sep 17 00:00:00 2001 From: devops Date: Tue, 12 Dec 2023 19:47:53 +0000 Subject: [PATCH 062/116] Set Version: 0.1.512 --- package/version | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package/version b/package/version index 3481ae90d..b763bfd13 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.544 +0.1.512 diff --git a/pyproject.toml b/pyproject.toml index 45ee5aa0b..75b6ce35a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.544" +version = "0.1.512" description = "" authors = [ "Runtime Verification, Inc. ", From a5bd37f065b8bc63a226a96db0d9723032707ba1 Mon Sep 17 00:00:00 2001 From: Jan Tusil Date: Tue, 12 Dec 2023 21:02:03 +0100 Subject: [PATCH 063/116] fix a typo --- src/pyk/proof/reachability.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index e14869e31..a1049a615 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -171,7 +171,7 @@ def from_dict(cls: type[APRProof], dct: Mapping[str, Any], proof_dir: Path | Non admitted = dct.get('admitted', False) subproof_ids = dct['subproof_ids'] if 'subproof_ids' in dct else [] node_refutations: dict[int, str] = {} - checked_for_subsumption = dct['_checked_for_subsumption'] + checked_for_subsumption = dct['checked_for_subsumption'] if 'node_refutation' in dct: node_refutations = {kcfg._resolve(node_id): proof_id for (node_id, proof_id) in dct['node_refutations']} if 'logs' in dct: From c9772987612f4b970f342e0c4db09f90bbb4631d Mon Sep 17 00:00:00 2001 From: devops Date: Tue, 12 Dec 2023 20:32:55 +0000 Subject: [PATCH 064/116] Set Version: 0.1.544 --- package/version | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package/version b/package/version index b763bfd13..3481ae90d 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.512 +0.1.544 diff --git a/pyproject.toml b/pyproject.toml index 75b6ce35a..45ee5aa0b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.512" +version = "0.1.544" description = "" authors = [ "Runtime Verification, Inc. ", From fdfd7523729e0eea1d912d4b3a9b4c8b232a9620 Mon Sep 17 00:00:00 2001 From: Jan Tusil Date: Mon, 18 Dec 2023 11:00:09 +0100 Subject: [PATCH 065/116] Update noah/apr-proof-parallel with support of dependencies between proof obligations (#780) Co-authored-by: devops --- package/version | 2 +- pyproject.toml | 2 +- src/pyk/proof/reachability.py | 40 ++++++++++++++----- .../integration/proof/test_imp_parallel.py | 30 ++++++++++---- 4 files changed, 54 insertions(+), 20 deletions(-) diff --git a/package/version b/package/version index 3481ae90d..a9913ae60 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.544 +0.1.545 diff --git a/pyproject.toml b/pyproject.toml index 45ee5aa0b..56a749101 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.544" +version = "0.1.545" description = "" authors = [ "Runtime Verification, Inc. ", diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index a1049a615..7d260a7ea 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -1252,6 +1252,14 @@ def steps(self, proof: APRProof) -> Iterable[APRProofStep]: else self.prover.dependencies_module_name ) + subproofs: list[Proof] = ( + [Proof.read_proof_data(proof.proof_dir, i) for i in proof.subproof_ids] + if proof.proof_dir is not None + else [] + ) + + apr_subproofs: list[APRProof] = [pf for pf in subproofs if isinstance(pf, APRProof)] + steps.append( APRProofStep( proof_id=proof.id, @@ -1271,6 +1279,10 @@ def steps(self, proof: APRProof) -> Iterable[APRProofStep]: is_terminal=(self.kcfg_explore.kcfg_semantics.is_terminal(pending_node.cterm)), target_is_terminal=(proof.target not in proof._terminal), main_module_name=self.prover.main_module_name, + dependencies_as_claims=[d.as_claim(self.kprint) for d in apr_subproofs], + self_proof_as_claim=proof.as_claim(self.kprint), + circularity=proof.circularity, + depth_is_nonzero=self.prover.nonzero_depth(pending_node), ) ) return steps @@ -1392,6 +1404,19 @@ class APRProofStep(parallel.ProofStep[APRProofResult, APRProofProcessData]): id: str | None trace_rewrites: bool + dependencies_as_claims: list[KClaim] + self_proof_as_claim: KClaim + circularity: bool + depth_is_nonzero: bool + + @property + def circularities_module_name(self) -> str: + return self.main_module_name + '-CIRCULARITIES-MODULE' + + @property + def dependencies_module_name(self) -> str: + return self.main_module_name + '-DEPENDS-MODULE' + def __hash__(self) -> int: return hash((self.cterm, self.node_id)) @@ -1436,22 +1461,16 @@ def exec(self, data: APRProofProcessData) -> APRProofResult: ) if init_kcfg_explore: - # dependencies_as_claims: list[KClaim] = [d.as_claim(self.kcfg_explore.kprint) for d in apr_subproofs] - - dependencies_module_name = self.main_module_name + '-DEPENDS-MODULE' kcfg_explore.add_dependencies_module( self.main_module_name, - dependencies_module_name, - # dependencies_as_claims, - [], + self.dependencies_module_name, + self.dependencies_as_claims, priority=1, ) - circularities_module_name = self.main_module_name + '-CIRCULARITIES-MODULE' kcfg_explore.add_dependencies_module( self.main_module_name, - circularities_module_name, - # dependencies_as_claims + ([proof.as_claim(self.kcfg_explore.kprint)] if proof.circularity else []), - [], + self.circularities_module_name, + self.dependencies_as_claims + ([self.self_proof_as_claim] if self.circularity else []), priority=1, ) @@ -1471,6 +1490,7 @@ def exec(self, data: APRProofProcessData) -> APRProofResult: extend_cterm_time=extend_cterm_time, ) + self.circularities_module_name if self.depth_is_nonzero else self.dependencies_module_name init_extend_cterm_time = time.time_ns() result = kcfg_explore.extend_cterm( self.cterm, diff --git a/src/tests/integration/proof/test_imp_parallel.py b/src/tests/integration/proof/test_imp_parallel.py index b9f0461be..2c3202fcd 100644 --- a/src/tests/integration/proof/test_imp_parallel.py +++ b/src/tests/integration/proof/test_imp_parallel.py @@ -24,9 +24,12 @@ from pyk.ktool.kprove import KProve PARALLEL_PROVE_TEST_DATA = ( - ('addition-1', ProofStatus.PASSED), - ('sum-10', ProofStatus.PASSED), - ('failing-if', ProofStatus.FAILED), + ('addition-1', ProofStatus.PASSED, False), + ('sum-10', ProofStatus.PASSED, False), + ('dep-fail-1', ProofStatus.PASSED, True), + ('sum-N', ProofStatus.PASSED, True), + ('sum-loop', ProofStatus.PASSED, False), + ('failing-if', ProofStatus.FAILED, False), ) @@ -42,7 +45,7 @@ def semantics(self, definition: KDefinition) -> KCFGSemantics: return ImpSemantics(definition) @pytest.mark.parametrize( - 'claim_id,expected_status', + 'claim_id,expected_status,admit_deps', PARALLEL_PROVE_TEST_DATA, ids=[test_id for test_id, *_ in PARALLEL_PROVE_TEST_DATA], ) @@ -50,20 +53,31 @@ def test_imp_parallel_prove( self, claim_id: str, expected_status: ProofStatus, + admit_deps: bool, kcfg_explore: KCFGExplore, - proof_dir: Path, kprove: KProve, kprint: KPrint, + proof_dir: Path, ) -> None: # claim_id = 'addition-1' spec_file = K_FILES / 'imp-simple-spec.k' spec_module = 'IMP-SIMPLE-SPEC' - claim = single( - kprove.get_claims(Path(spec_file), spec_module_name=spec_module, claim_labels=[f'{spec_module}.{claim_id}']) + spec_modules = kprove.get_claim_modules(Path(spec_file), spec_module_name=spec_module) + spec_label = f'{spec_module}.{claim_id}' + proofs = APRProof.from_spec_modules( + kprove.definition, + spec_modules, + spec_labels=[spec_label], + logs={}, + proof_dir=proof_dir, ) + proof = single([p for p in proofs if p.id == spec_label]) - proof = APRProof.from_claim(kprove.definition, claim, logs={}, proof_dir=proof_dir) + if admit_deps: + for subproof in proof.subproofs: + subproof.admit() + subproof.write_proof_data() semantics = self.semantics(kprove.definition) parallel_prover = ParallelAPRProver( From cc271a4d53393d0f9e7b19dd8b6540d670bb5116 Mon Sep 17 00:00:00 2001 From: devops Date: Tue, 19 Dec 2023 01:12:54 +0000 Subject: [PATCH 066/116] Set Version: 0.1.556 --- package/version | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package/version b/package/version index 8a7a26b47..dc5a84487 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.555 +0.1.556 diff --git a/pyproject.toml b/pyproject.toml index acfc04124..7cff62e45 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.555" +version = "0.1.556" description = "" authors = [ "Runtime Verification, Inc. ", From ef08872cca39c7ea636ee48afccb6a756f412b4a Mon Sep 17 00:00:00 2001 From: devops Date: Tue, 19 Dec 2023 09:31:49 +0000 Subject: [PATCH 067/116] Set Version: 0.1.558 --- package/version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package/version b/package/version index a1d86ef24..09bc4f7cc 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.558 \ No newline at end of file +0.1.558 From 18f370a92e3d946e376be4849c45227851de887d Mon Sep 17 00:00:00 2001 From: devops Date: Tue, 19 Dec 2023 15:02:40 +0000 Subject: [PATCH 068/116] Set Version: 0.1.559 --- package/version | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package/version b/package/version index 09bc4f7cc..c037e80c7 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.558 +0.1.559 diff --git a/pyproject.toml b/pyproject.toml index 1f4c7061c..0f8ec8b46 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.558" +version = "0.1.559" description = "" authors = [ "Runtime Verification, Inc. ", From 02836d371a213d4e46fa48770b96cb38316e1003 Mon Sep 17 00:00:00 2001 From: devops Date: Thu, 25 Jan 2024 01:26:24 +0000 Subject: [PATCH 069/116] Set Version: 0.1.596 --- docs/conf.py | 4 ++-- package/version | 2 +- pyproject.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 2c97e854a..67eb72434 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -9,8 +9,8 @@ project = 'pyk' author = 'Runtime Verification, Inc' copyright = '2024, Runtime Verification, Inc' -version = '0.1.595' -release = '0.1.595' +version = '0.1.596' +release = '0.1.596' # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration diff --git a/package/version b/package/version index e2aa8fee8..48d068c9e 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.595 +0.1.596 diff --git a/pyproject.toml b/pyproject.toml index d488ea72c..a1e005608 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.595" +version = "0.1.596" description = "" authors = [ "Runtime Verification, Inc. ", From daa74a53020be8c4233de71255084a072a83be76 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Wed, 24 Jan 2024 21:58:30 -0600 Subject: [PATCH 070/116] Update module adding --- src/pyk/kore/rpc.py | 2 + src/pyk/proof/reachability.py | 58 ++++++++++++------- .../integration/proof/test_imp_parallel.py | 10 ++++ 3 files changed, 48 insertions(+), 22 deletions(-) diff --git a/src/pyk/kore/rpc.py b/src/pyk/kore/rpc.py index ddf809a2d..7658a2f09 100644 --- a/src/pyk/kore/rpc.py +++ b/src/pyk/kore/rpc.py @@ -888,6 +888,7 @@ def _request(self, method: str, **params: Any) -> dict[str, Any]: try: return self._client.request(method, **params) except JsonRpcError as err: + print(params, file=sys.stderr) raise self._error(err) from err def _error(self, err: JsonRpcError) -> KoreClientError: @@ -1009,6 +1010,7 @@ def get_model(self, pattern: Pattern, module_name: str | None = None) -> GetMode return GetModelResult.from_dict(result) def add_module(self, module: Module, *, name_as_id: bool | None = None) -> str: + print(f'adding module {module.text}', file=sys.stderr) params = filter_none( { 'module': module.text, diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index 200de63c1..3b8e52603 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -1275,22 +1275,24 @@ def steps(self, proof: APRProof) -> Iterable[APRProofStep]: else self.prover.dependencies_module_name ) - # subproofs: list[Proof] = ( - # [Proof.read_proof_data(proof.proof_dir, i) for i in proof.subproof_ids] - # if proof.proof_dir is not None - # else [] - # ) + subproofs: list[Proof] = ( + [Proof.read_proof_data(proof.proof_dir, i) for i in proof.subproof_ids] + if proof.proof_dir is not None + else [] + ) - # apr_subproofs: list[APRProof] = [pf for pf in subproofs if isinstance(pf, APRProof)] + apr_subproofs: list[APRProof] = [pf for pf in subproofs if isinstance(pf, APRProof)] steps.append( APRProofStep( proof_id=proof.id, cterm=pending_node.cterm, node_id=pending_node.id, - module_name=module_name, + dependencies_module_name=self.prover.dependencies_module_name, + circularities_module_name=self.prover.circularities_module_name, target_cterm=target_node.cterm, target_node_id=target_node.id, + use_module_name=module_name, # port=self.server.port, execute_depth=self.execute_depth, terminal_rules=self.terminal_rules, @@ -1302,8 +1304,8 @@ def steps(self, proof: APRProof) -> Iterable[APRProofStep]: is_terminal=(self.kcfg_explore.kcfg_semantics.is_terminal(pending_node.cterm)), target_is_terminal=(proof.target not in proof._terminal), main_module_name=self.prover.main_module_name, - # dependencies_as_claims=[d.as_rule() for d in apr_subproofs], - # self_proof_as_claim=proof.as_rule(), + dependencies_as_rules=[d.as_rule() for d in apr_subproofs], + self_proof_as_rule=proof.as_rule(), circularity=proof.circularity, depth_is_nonzero=self.prover.nonzero_depth(pending_node), ) @@ -1412,7 +1414,9 @@ class APRProofStep(parallel.ProofStep[APRProofResult, APRProofProcessData]): proof_id: str cterm: CTerm node_id: int - module_name: str + dependencies_module_name: str + circularities_module_name: str + use_module_name: str target_cterm: CTerm target_node_id: int # port: int @@ -1430,18 +1434,18 @@ class APRProofStep(parallel.ProofStep[APRProofResult, APRProofProcessData]): id: str | None trace_rewrites: bool - # dependencies_as_claims: list[KClaim] - # self_proof_as_claim: KClaim + dependencies_as_rules: list[KRule] + self_proof_as_rule: KRule circularity: bool depth_is_nonzero: bool - @property - def circularities_module_name(self) -> str: - return self.main_module_name + '-CIRCULARITIES-MODULE' - - @property - def dependencies_module_name(self) -> str: - return self.main_module_name + '-DEPENDS-MODULE' + # @property + # def circularities_module_name(self) -> str: + # return self.circularities_module_name + # + # @property + # def dependencies_module_name(self) -> str: + # return self.dependencies_module_name def __hash__(self) -> int: return hash((self.cterm, self.node_id)) @@ -1453,10 +1457,10 @@ def exec(self, data: APRProofProcessData) -> APRProofResult: Able to be called on any `ProofStep` returned by `prover.steps(proof)`. """ - # init_kcfg_explore = False + init_kcfg_explore = False if data.kore_servers.get(self.proof_id) is None: - # init_kcfg_explore = True + init_kcfg_explore = True data.kore_servers[self.proof_id] = kore_server( definition_dir=data.definition_dir, llvm_definition_dir=data.llvm_definition_dir, @@ -1502,6 +1506,16 @@ def exec(self, data: APRProofProcessData) -> APRProofResult: # self.dependencies_as_claims + ([self.self_proof_as_claim] if self.circularity else []), # priority=1, # ) + def _inject_module(module_name: str, import_name: str, sentences: list[KRule]) -> None: + _module = KFlatModule(module_name, sentences, [KImport(import_name)]) + _kore_module = kflatmodule_to_kore( + kcfg_explore.kprint.definition, kcfg_explore.kprint.kompiled_kore, _module + ) + kcfg_explore._kore_client.add_module(_kore_module, name_as_id=True) + + if init_kcfg_explore: + _inject_module(self.dependencies_module_name, self.main_module_name, self.dependencies_as_rules) + _inject_module(self.circularities_module_name, self.main_module_name, [self.self_proof_as_rule]) cterm_implies_time = 0 extend_cterm_time = 0 @@ -1523,7 +1537,7 @@ def exec(self, data: APRProofProcessData) -> APRProofResult: init_extend_cterm_time = time.time_ns() result = kcfg_explore.extend_cterm( self.cterm, - module_name=self.module_name, + module_name=self.use_module_name, execute_depth=self.execute_depth, terminal_rules=self.terminal_rules, cut_point_rules=self.cut_point_rules, diff --git a/src/tests/integration/proof/test_imp_parallel.py b/src/tests/integration/proof/test_imp_parallel.py index 2c3202fcd..e02886d0e 100644 --- a/src/tests/integration/proof/test_imp_parallel.py +++ b/src/tests/integration/proof/test_imp_parallel.py @@ -1,5 +1,6 @@ from __future__ import annotations +import sys from pathlib import Path from typing import TYPE_CHECKING @@ -60,11 +61,14 @@ def test_imp_parallel_prove( proof_dir: Path, ) -> None: # claim_id = 'addition-1' + print('a', file=sys.stderr) spec_file = K_FILES / 'imp-simple-spec.k' spec_module = 'IMP-SIMPLE-SPEC' + print('b', file=sys.stderr) spec_modules = kprove.get_claim_modules(Path(spec_file), spec_module_name=spec_module) spec_label = f'{spec_module}.{claim_id}' + print('c', file=sys.stderr) proofs = APRProof.from_spec_modules( kprove.definition, spec_modules, @@ -72,14 +76,17 @@ def test_imp_parallel_prove( logs={}, proof_dir=proof_dir, ) + print('d', file=sys.stderr) proof = single([p for p in proofs if p.id == spec_label]) if admit_deps: for subproof in proof.subproofs: subproof.admit() subproof.write_proof_data() + print('e', file=sys.stderr) semantics = self.semantics(kprove.definition) + print('f', file=sys.stderr) parallel_prover = ParallelAPRProver( proof=proof, module_name=kprove.main_module, @@ -94,6 +101,7 @@ def test_imp_parallel_prove( bug_report=None, bug_report_id=None, ) + print('g', file=sys.stderr) process_data = APRProofProcessData( kprint=kprint, @@ -101,6 +109,7 @@ def test_imp_parallel_prove( definition_dir=kprove.definition_dir, module_name=kprove.main_module, ) + print('h', file=sys.stderr) results = prove_parallel( proofs={'proof1': proof}, @@ -108,6 +117,7 @@ def test_imp_parallel_prove( max_workers=2, process_data=process_data, ) + print('i', file=sys.stderr) assert len(list(results)) == 1 assert list(results)[0].status == expected_status From cccc706d41cf7891feee7e3dc1ccce7992c22807 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Thu, 25 Jan 2024 22:13:12 -0600 Subject: [PATCH 071/116] Use single server --- poetry.lock | 516 +++++++++--------- src/pyk/kore/rpc.py | 12 +- src/pyk/proof/parallel.py | 17 +- src/pyk/proof/reachability.py | 112 ++-- .../integration/proof/test_imp_parallel.py | 9 - 5 files changed, 325 insertions(+), 341 deletions(-) diff --git a/poetry.lock b/poetry.lock index 760b48c07..eada75fd9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "alabaster" @@ -13,21 +13,22 @@ files = [ [[package]] name = "attrs" -version = "23.1.0" +version = "23.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, ] [package.extras] cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]", "pre-commit"] +dev = ["attrs[tests]", "pre-commit"] docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] [[package]] name = "autoflake" @@ -60,29 +61,33 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "black" -version = "23.10.1" +version = "23.12.1" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.10.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:ec3f8e6234c4e46ff9e16d9ae96f4ef69fa328bb4ad08198c8cee45bb1f08c69"}, - {file = "black-23.10.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:1b917a2aa020ca600483a7b340c165970b26e9029067f019e3755b56e8dd5916"}, - {file = "black-23.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c74de4c77b849e6359c6f01987e94873c707098322b91490d24296f66d067dc"}, - {file = "black-23.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:7b4d10b0f016616a0d93d24a448100adf1699712fb7a4efd0e2c32bbb219b173"}, - {file = "black-23.10.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b15b75fc53a2fbcac8a87d3e20f69874d161beef13954747e053bca7a1ce53a0"}, - {file = "black-23.10.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:e293e4c2f4a992b980032bbd62df07c1bcff82d6964d6c9496f2cd726e246ace"}, - {file = "black-23.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d56124b7a61d092cb52cce34182a5280e160e6aff3137172a68c2c2c4b76bcb"}, - {file = "black-23.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:3f157a8945a7b2d424da3335f7ace89c14a3b0625e6593d21139c2d8214d55ce"}, - {file = "black-23.10.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:cfcce6f0a384d0da692119f2d72d79ed07c7159879d0bb1bb32d2e443382bf3a"}, - {file = "black-23.10.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:33d40f5b06be80c1bbce17b173cda17994fbad096ce60eb22054da021bf933d1"}, - {file = "black-23.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:840015166dbdfbc47992871325799fd2dc0dcf9395e401ada6d88fe11498abad"}, - {file = "black-23.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:037e9b4664cafda5f025a1728c50a9e9aedb99a759c89f760bd83730e76ba884"}, - {file = "black-23.10.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:7cb5936e686e782fddb1c73f8aa6f459e1ad38a6a7b0e54b403f1f05a1507ee9"}, - {file = "black-23.10.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:7670242e90dc129c539e9ca17665e39a146a761e681805c54fbd86015c7c84f7"}, - {file = "black-23.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed45ac9a613fb52dad3b61c8dea2ec9510bf3108d4db88422bacc7d1ba1243d"}, - {file = "black-23.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:6d23d7822140e3fef190734216cefb262521789367fbdc0b3f22af6744058982"}, - {file = "black-23.10.1-py3-none-any.whl", hash = "sha256:d431e6739f727bb2e0495df64a6c7a5310758e87505f5f8cde9ff6c0f2d7e4fe"}, - {file = "black-23.10.1.tar.gz", hash = "sha256:1f8ce316753428ff68749c65a5f7844631aa18c8679dfd3ca9dc1a289979c258"}, + {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, + {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, + {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, + {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, + {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, + {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, + {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, + {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, + {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, + {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, + {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, + {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, + {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, + {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, + {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, + {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, + {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, + {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, + {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, + {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, + {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, + {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, ] [package.dependencies] @@ -96,7 +101,7 @@ typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] @@ -287,63 +292,63 @@ cron = ["capturer (>=2.4)"] [[package]] name = "coverage" -version = "7.3.2" +version = "7.4.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, - {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, - {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, - {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, - {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, - {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, - {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, - {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, - {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, - {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, - {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, - {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, - {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, - {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, - {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, - {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, - {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, - {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, - {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, - {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, - {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, - {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, + {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, + {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, + {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, + {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, + {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, + {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, + {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, + {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, + {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, + {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, + {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, + {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, ] [package.dependencies] @@ -365,13 +370,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.1.3" +version = "1.2.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, - {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, ] [package.extras] @@ -393,13 +398,13 @@ testing = ["hatch", "pre-commit", "pytest", "tox"] [[package]] name = "filelock" -version = "3.13.0" +version = "3.13.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.13.0-py3-none-any.whl", hash = "sha256:a552f4fde758f4eab33191e9548f671970f8b06d436d31388c9aa1e5861a710f"}, - {file = "filelock-3.13.0.tar.gz", hash = "sha256:63c6052c82a1a24c873a549fbd39a26982e8f35a3016da231ead11a5be9dad44"}, + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, ] [package.extras] @@ -409,29 +414,29 @@ typing = ["typing-extensions (>=4.8)"] [[package]] name = "flake8" -version = "6.1.0" +version = "7.0.0" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" files = [ - {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, - {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, + {file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"}, + {file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"}, ] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" pycodestyle = ">=2.11.0,<2.12.0" -pyflakes = ">=3.1.0,<3.2.0" +pyflakes = ">=3.2.0,<3.3.0" [[package]] name = "flake8-bugbear" -version = "23.9.16" +version = "24.1.17" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." optional = false python-versions = ">=3.8.1" files = [ - {file = "flake8-bugbear-23.9.16.tar.gz", hash = "sha256:90cf04b19ca02a682feb5aac67cae8de742af70538590509941ab10ae8351f71"}, - {file = "flake8_bugbear-23.9.16-py3-none-any.whl", hash = "sha256:b182cf96ea8f7a8595b2f87321d7d9b28728f4d9c3318012d896543d19742cb5"}, + {file = "flake8-bugbear-24.1.17.tar.gz", hash = "sha256:bcb388a4f3b516258749b1e690ee394c082eff742f44595e3754cf5c7781c2c7"}, + {file = "flake8_bugbear-24.1.17-py3-none-any.whl", hash = "sha256:46cc840ddaed26507cd0ada530d1526418b717ee76c9b5dfdbd238b5eab34139"}, ] [package.dependencies] @@ -470,13 +475,13 @@ flake8 = "*" [[package]] name = "flake8-type-checking" -version = "2.5.1" +version = "2.8.0" description = "A flake8 plugin for managing type-checking imports & forward references" optional = false python-versions = ">=3.8" files = [ - {file = "flake8_type_checking-2.5.1-py3-none-any.whl", hash = "sha256:1cd5cd9731f34921b33640751455643ca1cf7ee4a347a45cd94d3af328a3dd64"}, - {file = "flake8_type_checking-2.5.1.tar.gz", hash = "sha256:bfc51dd6e09a26662ab19191f44102f0606377ec0271a0e764ae993346a206d6"}, + {file = "flake8_type_checking-2.8.0-py3-none-any.whl", hash = "sha256:a6f9ded325f0c9845f073609c557bf481882adc4d18571a39b137ef2d284dc85"}, + {file = "flake8_type_checking-2.8.0.tar.gz", hash = "sha256:07d949b686f39eb0cb828a394aa29d48bd1ca0df92d552d9794d17b22c309cd7"}, ] [package.dependencies] @@ -537,20 +542,20 @@ files = [ [[package]] name = "importlib-metadata" -version = "6.8.0" +version = "7.0.1" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, - {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, + {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, + {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] @@ -567,20 +572,17 @@ files = [ [[package]] name = "isort" -version = "5.12.0" +version = "5.13.2" description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" files = [ - {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, - {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, ] [package.extras] -colors = ["colorama (>=0.4.3)"] -pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] +colors = ["colorama (>=0.4.6)"] [[package]] name = "jinja2" @@ -647,71 +649,71 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.3" +version = "2.1.4" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de8153a7aae3835484ac168a9a9bdaa0c5eee4e0bc595503c95d53b942879c84"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e888ff76ceb39601c59e219f281466c6d7e66bd375b4ec1ce83bcdc68306796b"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0b838c37ba596fcbfca71651a104a611543077156cb0a26fe0c475e1f152ee8"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac1ebf6983148b45b5fa48593950f90ed6d1d26300604f321c74a9ca1609f8e"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbad3d346df8f9d72622ac71b69565e621ada2ce6572f37c2eae8dacd60385d"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5291d98cd3ad9a562883468c690a2a238c4a6388ab3bd155b0c75dd55ece858"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a7cc49ef48a3c7a0005a949f3c04f8baa5409d3f663a1b36f0eba9bfe2a0396e"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b83041cda633871572f0d3c41dddd5582ad7d22f65a72eacd8d3d6d00291df26"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-win32.whl", hash = "sha256:0c26f67b3fe27302d3a412b85ef696792c4a2386293c53ba683a89562f9399b0"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:a76055d5cb1c23485d7ddae533229039b850db711c554a12ea64a0fd8a0129e2"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e9e3c4020aa2dc62d5dd6743a69e399ce3de58320522948af6140ac959ab863"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0042d6a9880b38e1dd9ff83146cc3c9c18a059b9360ceae207805567aacccc69"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d03fea4c4e9fd0ad75dc2e7e2b6757b80c152c032ea1d1de487461d8140efc"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ab3a886a237f6e9c9f4f7d272067e712cdb4efa774bef494dccad08f39d8ae6"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf5ebbec056817057bfafc0445916bb688a255a5146f900445d081db08cbabb"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e1a0d1924a5013d4f294087e00024ad25668234569289650929ab871231668e7"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e7902211afd0af05fbadcc9a312e4cf10f27b779cf1323e78d52377ae4b72bea"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c669391319973e49a7c6230c218a1e3044710bc1ce4c8e6eb71f7e6d43a2c131"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-win32.whl", hash = "sha256:31f57d64c336b8ccb1966d156932f3daa4fee74176b0fdc48ef580be774aae74"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:54a7e1380dfece8847c71bf7e33da5d084e9b889c75eca19100ef98027bd9f56"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a76cd37d229fc385738bd1ce4cba2a121cf26b53864c1772694ad0ad348e509e"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:987d13fe1d23e12a66ca2073b8d2e2a75cec2ecb8eab43ff5624ba0ad42764bc"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5244324676254697fe5c181fc762284e2c5fceeb1c4e3e7f6aca2b6f107e60dc"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78bc995e004681246e85e28e068111a4c3f35f34e6c62da1471e844ee1446250"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4d176cfdfde84f732c4a53109b293d05883e952bbba68b857ae446fa3119b4f"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f9917691f410a2e0897d1ef99619fd3f7dd503647c8ff2475bf90c3cf222ad74"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f06e5a9e99b7df44640767842f414ed5d7bedaaa78cd817ce04bbd6fd86e2dd6"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396549cea79e8ca4ba65525470d534e8a41070e6b3500ce2414921099cb73e8d"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-win32.whl", hash = "sha256:f6be2d708a9d0e9b0054856f07ac7070fbe1754be40ca8525d5adccdbda8f475"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:5045e892cfdaecc5b4c01822f353cf2c8feb88a6ec1c0adef2a2e705eef0f656"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a07f40ef8f0fbc5ef1000d0c78771f4d5ca03b4953fc162749772916b298fc4"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d18b66fe626ac412d96c2ab536306c736c66cf2a31c243a45025156cc190dc8a"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:698e84142f3f884114ea8cf83e7a67ca8f4ace8454e78fe960646c6c91c63bfa"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a3b78a5af63ec10d8604180380c13dcd870aba7928c1fe04e881d5c792dc4e"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:15866d7f2dc60cfdde12ebb4e75e41be862348b4728300c36cdf405e258415ec"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6aa5e2e7fc9bc042ae82d8b79d795b9a62bd8f15ba1e7594e3db243f158b5565"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:54635102ba3cf5da26eb6f96c4b8c53af8a9c0d97b64bdcb592596a6255d8518"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-win32.whl", hash = "sha256:3583a3a3ab7958e354dc1d25be74aee6228938312ee875a22330c4dc2e41beb0"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-win_amd64.whl", hash = "sha256:d6e427c7378c7f1b2bef6a344c925b8b63623d3321c09a237b7cc0e77dd98ceb"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:bf1196dcc239e608605b716e7b166eb5faf4bc192f8a44b81e85251e62584bd2"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4df98d4a9cd6a88d6a585852f56f2155c9cdb6aec78361a19f938810aa020954"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b835aba863195269ea358cecc21b400276747cc977492319fd7682b8cd2c253d"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23984d1bdae01bee794267424af55eef4dfc038dc5d1272860669b2aa025c9e3"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c98c33ffe20e9a489145d97070a435ea0679fddaabcafe19982fe9c971987d5"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9896fca4a8eb246defc8b2a7ac77ef7553b638e04fbf170bff78a40fa8a91474"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b0fe73bac2fed83839dbdbe6da84ae2a31c11cfc1c777a40dbd8ac8a6ed1560f"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c7556bafeaa0a50e2fe7dc86e0382dea349ebcad8f010d5a7dc6ba568eaaa789"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-win32.whl", hash = "sha256:fc1a75aa8f11b87910ffd98de62b29d6520b6d6e8a3de69a70ca34dea85d2a8a"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-win_amd64.whl", hash = "sha256:3a66c36a3864df95e4f62f9167c734b3b1192cb0851b43d7cc08040c074c6279"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:765f036a3d00395a326df2835d8f86b637dbaf9832f90f5d196c3b8a7a5080cb"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:21e7af8091007bf4bebf4521184f4880a6acab8df0df52ef9e513d8e5db23411"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c31fe855c77cad679b302aabc42d724ed87c043b1432d457f4976add1c2c3e"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653fa39578957bc42e5ebc15cf4361d9e0ee4b702d7d5ec96cdac860953c5b4"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47bb5f0142b8b64ed1399b6b60f700a580335c8e1c57f2f15587bd072012decc"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fe8512ed897d5daf089e5bd010c3dc03bb1bdae00b35588c49b98268d4a01e00"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:36d7626a8cca4d34216875aee5a1d3d654bb3dac201c1c003d182283e3205949"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b6f14a9cd50c3cb100eb94b3273131c80d102e19bb20253ac7bd7336118a673a"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-win32.whl", hash = "sha256:c8f253a84dbd2c63c19590fa86a032ef3d8cc18923b8049d91bcdeeb2581fbf6"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:8b570a1537367b52396e53325769608f2a687ec9a4363647af1cded8928af959"}, + {file = "MarkupSafe-2.1.4.tar.gz", hash = "sha256:3aae9af4cac263007fd6309c64c6ab4506dd2b79382d9d19a1994f9240b8db4f"}, ] [[package]] @@ -757,38 +759,38 @@ files = [ [[package]] name = "mypy" -version = "1.6.1" +version = "1.8.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e5012e5cc2ac628177eaac0e83d622b2dd499e28253d4107a08ecc59ede3fc2c"}, - {file = "mypy-1.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d8fbb68711905f8912e5af474ca8b78d077447d8f3918997fecbf26943ff3cbb"}, - {file = "mypy-1.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a1ad938fee7d2d96ca666c77b7c494c3c5bd88dff792220e1afbebb2925b5e"}, - {file = "mypy-1.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b96ae2c1279d1065413965c607712006205a9ac541895004a1e0d4f281f2ff9f"}, - {file = "mypy-1.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:40b1844d2e8b232ed92e50a4bd11c48d2daa351f9deee6c194b83bf03e418b0c"}, - {file = "mypy-1.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81af8adaa5e3099469e7623436881eff6b3b06db5ef75e6f5b6d4871263547e5"}, - {file = "mypy-1.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8c223fa57cb154c7eab5156856c231c3f5eace1e0bed9b32a24696b7ba3c3245"}, - {file = "mypy-1.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8032e00ce71c3ceb93eeba63963b864bf635a18f6c0c12da6c13c450eedb183"}, - {file = "mypy-1.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c46b51de523817a0045b150ed11b56f9fff55f12b9edd0f3ed35b15a2809de0"}, - {file = "mypy-1.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:19f905bcfd9e167159b3d63ecd8cb5e696151c3e59a1742e79bc3bcb540c42c7"}, - {file = "mypy-1.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:82e469518d3e9a321912955cc702d418773a2fd1e91c651280a1bda10622f02f"}, - {file = "mypy-1.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d4473c22cc296425bbbce7e9429588e76e05bc7342da359d6520b6427bf76660"}, - {file = "mypy-1.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59a0d7d24dfb26729e0a068639a6ce3500e31d6655df8557156c51c1cb874ce7"}, - {file = "mypy-1.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cfd13d47b29ed3bbaafaff7d8b21e90d827631afda134836962011acb5904b71"}, - {file = "mypy-1.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:eb4f18589d196a4cbe5290b435d135dee96567e07c2b2d43b5c4621b6501531a"}, - {file = "mypy-1.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:41697773aa0bf53ff917aa077e2cde7aa50254f28750f9b88884acea38a16169"}, - {file = "mypy-1.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7274b0c57737bd3476d2229c6389b2ec9eefeb090bbaf77777e9d6b1b5a9d143"}, - {file = "mypy-1.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbaf4662e498c8c2e352da5f5bca5ab29d378895fa2d980630656178bd607c46"}, - {file = "mypy-1.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bb8ccb4724f7d8601938571bf3f24da0da791fe2db7be3d9e79849cb64e0ae85"}, - {file = "mypy-1.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:68351911e85145f582b5aa6cd9ad666c8958bcae897a1bfda8f4940472463c45"}, - {file = "mypy-1.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:49ae115da099dcc0922a7a895c1eec82c1518109ea5c162ed50e3b3594c71208"}, - {file = "mypy-1.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b27958f8c76bed8edaa63da0739d76e4e9ad4ed325c814f9b3851425582a3cd"}, - {file = "mypy-1.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:925cd6a3b7b55dfba252b7c4561892311c5358c6b5a601847015a1ad4eb7d332"}, - {file = "mypy-1.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8f57e6b6927a49550da3d122f0cb983d400f843a8a82e65b3b380d3d7259468f"}, - {file = "mypy-1.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a43ef1c8ddfdb9575691720b6352761f3f53d85f1b57d7745701041053deff30"}, - {file = "mypy-1.6.1-py3-none-any.whl", hash = "sha256:4cbe68ef919c28ea561165206a2dcb68591c50f3bcf777932323bc208d949cf1"}, - {file = "mypy-1.6.1.tar.gz", hash = "sha256:4d01c00d09a0be62a4ca3f933e315455bde83f37f892ba4b08ce92f3cf44bcc1"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, + {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, + {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, + {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, + {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, + {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, + {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, + {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, + {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, + {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, + {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, + {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, + {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, + {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, + {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, + {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, + {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, + {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, ] [package.dependencies] @@ -799,6 +801,7 @@ typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] install-types = ["pip"] +mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] @@ -825,13 +828,13 @@ files = [ [[package]] name = "pathspec" -version = "0.11.2" +version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, - {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] @@ -850,13 +853,13 @@ flake8 = ">=5.0.0" [[package]] name = "platformdirs" -version = "3.11.0" +version = "4.1.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, - {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, ] [package.extras] @@ -865,13 +868,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -931,28 +934,29 @@ files = [ [[package]] name = "pyflakes" -version = "3.1.0" +version = "3.2.0" description = "passive checker of Python programs" optional = false python-versions = ">=3.8" files = [ - {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, - {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, + {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, + {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, ] [[package]] name = "pygments" -version = "2.16.1" +version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.7" files = [ - {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, - {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, ] [package.extras] plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyperclip" @@ -977,13 +981,13 @@ files = [ [[package]] name = "pytest" -version = "7.4.3" +version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, - {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, ] [package.dependencies] @@ -1034,13 +1038,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "pytest-xdist" -version = "3.3.1" +version = "3.5.0" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-xdist-3.3.1.tar.gz", hash = "sha256:d5ee0520eb1b7bcca50a60a518ab7a7707992812c578198f8b44fdfac78e8c93"}, - {file = "pytest_xdist-3.3.1-py3-none-any.whl", hash = "sha256:ff9daa7793569e6a68544850fd3927cd257cc03a7ef76c95e86915355e82b5f2"}, + {file = "pytest-xdist-3.5.0.tar.gz", hash = "sha256:cbb36f3d67e0c478baa57fa4edc8843887e0f6cfc42d677530a36d7472b32d8a"}, + {file = "pytest_xdist-3.5.0-py3-none-any.whl", hash = "sha256:d075629c7e00b611df89f490a5063944bee7a4362a5ff11c7cc7824a03dfce24"}, ] [package.dependencies] @@ -1089,13 +1093,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "13.6.0" +version = "13.7.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.6.0-py3-none-any.whl", hash = "sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245"}, - {file = "rich-13.6.0.tar.gz", hash = "sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef"}, + {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, + {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, ] [package.dependencies] @@ -1152,56 +1156,50 @@ test = ["cython (>=3.0)", "filelock", "html5lib", "pytest (>=4.6)", "setuptools [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.7" +version = "1.0.8" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_applehelp-1.0.7-py3-none-any.whl", hash = "sha256:094c4d56209d1734e7d252f6e0b3ccc090bd52ee56807a5d9315b19c122ab15d"}, - {file = "sphinxcontrib_applehelp-1.0.7.tar.gz", hash = "sha256:39fdc8d762d33b01a7d8f026a3b7d71563ea3b72787d5f00ad8465bd9d6dfbfa"}, + {file = "sphinxcontrib_applehelp-1.0.8-py3-none-any.whl", hash = "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4"}, + {file = "sphinxcontrib_applehelp-1.0.8.tar.gz", hash = "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619"}, ] -[package.dependencies] -Sphinx = ">=5" - [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-devhelp" -version = "1.0.5" +version = "1.0.6" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_devhelp-1.0.5-py3-none-any.whl", hash = "sha256:fe8009aed765188f08fcaadbb3ea0d90ce8ae2d76710b7e29ea7d047177dae2f"}, - {file = "sphinxcontrib_devhelp-1.0.5.tar.gz", hash = "sha256:63b41e0d38207ca40ebbeabcf4d8e51f76c03e78cd61abe118cf4435c73d4212"}, + {file = "sphinxcontrib_devhelp-1.0.6-py3-none-any.whl", hash = "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f"}, + {file = "sphinxcontrib_devhelp-1.0.6.tar.gz", hash = "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"}, ] -[package.dependencies] -Sphinx = ">=5" - [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" -version = "2.0.4" +version = "2.0.5" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_htmlhelp-2.0.4-py3-none-any.whl", hash = "sha256:8001661c077a73c29beaf4a79968d0726103c5605e27db92b9ebed8bab1359e9"}, - {file = "sphinxcontrib_htmlhelp-2.0.4.tar.gz", hash = "sha256:6c26a118a05b76000738429b724a0568dbde5b72391a688577da08f11891092a"}, + {file = "sphinxcontrib_htmlhelp-2.0.5-py3-none-any.whl", hash = "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04"}, + {file = "sphinxcontrib_htmlhelp-2.0.5.tar.gz", hash = "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015"}, ] -[package.dependencies] -Sphinx = ">=5" - [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] test = ["html5lib", "pytest"] [[package]] @@ -1220,38 +1218,34 @@ test = ["flake8", "mypy", "pytest"] [[package]] name = "sphinxcontrib-qthelp" -version = "1.0.6" +version = "1.0.7" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_qthelp-1.0.6-py3-none-any.whl", hash = "sha256:bf76886ee7470b934e363da7a954ea2825650013d367728588732c7350f49ea4"}, - {file = "sphinxcontrib_qthelp-1.0.6.tar.gz", hash = "sha256:62b9d1a186ab7f5ee3356d906f648cacb7a6bdb94d201ee7adf26db55092982d"}, + {file = "sphinxcontrib_qthelp-1.0.7-py3-none-any.whl", hash = "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182"}, + {file = "sphinxcontrib_qthelp-1.0.7.tar.gz", hash = "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6"}, ] -[package.dependencies] -Sphinx = ">=5" - [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-serializinghtml" -version = "1.1.9" +version = "1.1.10" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_serializinghtml-1.1.9-py3-none-any.whl", hash = "sha256:9b36e503703ff04f20e9675771df105e58aa029cfcbc23b8ed716019b7416ae1"}, - {file = "sphinxcontrib_serializinghtml-1.1.9.tar.gz", hash = "sha256:0c64ff898339e1fac29abd2bf5f11078f3ec413cfe9c046d3120d7ca65530b54"}, + {file = "sphinxcontrib_serializinghtml-1.1.10-py3-none-any.whl", hash = "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7"}, + {file = "sphinxcontrib_serializinghtml-1.1.10.tar.gz", hash = "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"}, ] -[package.dependencies] -Sphinx = ">=5" - [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] @@ -1298,24 +1292,24 @@ files = [ [[package]] name = "types-psutil" -version = "5.9.5.17" +version = "5.9.5.20240106" description = "Typing stubs for psutil" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "types-psutil-5.9.5.17.tar.gz", hash = "sha256:f7d8769812d72a4b513d7ec9eb5580fe2f6013fc270394a603cb6534811f3e4d"}, - {file = "types_psutil-5.9.5.17-py3-none-any.whl", hash = "sha256:2161d166256084acf629d30aaf6bda8bee726ae1fea530559650281056b491fc"}, + {file = "types-psutil-5.9.5.20240106.tar.gz", hash = "sha256:60b233fb613b41fe859526103dbda0b4812d7a16c5f791119ec7016fbe1c8128"}, + {file = "types_psutil-5.9.5.20240106-py3-none-any.whl", hash = "sha256:fea169a85b1bb9d9edd0b063a93ad950e37d574290b1bf11ef5e46c9c5d82326"}, ] [[package]] name = "typing-extensions" -version = "4.8.0" +version = "4.9.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, - {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, ] [[package]] @@ -1350,13 +1344,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "wcwidth" -version = "0.2.8" +version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" files = [ - {file = "wcwidth-0.2.8-py2.py3-none-any.whl", hash = "sha256:77f719e01648ed600dfa5402c347481c0992263b81a027344f3e1ba25493a704"}, - {file = "wcwidth-0.2.8.tar.gz", hash = "sha256:8705c569999ffbb4f6a87c6d1b80f324bd6db952f5eb0b95bc07517f4c1813d4"}, + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] [[package]] diff --git a/src/pyk/kore/rpc.py b/src/pyk/kore/rpc.py index 7658a2f09..ecc6a5e5e 100644 --- a/src/pyk/kore/rpc.py +++ b/src/pyk/kore/rpc.py @@ -1,5 +1,6 @@ from __future__ import annotations +import traceback import http.client import json import logging @@ -1108,6 +1109,7 @@ def __enter__(self) -> KoreServer: return self def __exit__(self, *args: Any) -> None: + print(f'closing server {self.pid}', file=sys.stderr) self.close() def start(self) -> None: @@ -1123,6 +1125,7 @@ def start(self) -> None: assert port == self._port self._info = KoreServerInfo(pid=pid, host=host, port=port) _LOGGER.info(f'KoreServer started: {self.host}:{self.port}, pid={self.pid}') + traceback.print_stack() def close(self) -> None: _LOGGER.info(f'Stopping KoreServer: {self.host}:{self.port}, pid={self.pid}') @@ -1314,6 +1317,7 @@ def kore_server( 'haskell_log_entries': haskell_log_entries, 'bug_report': bug_report, } + server = None if llvm_definition_dir: booster_args: BoosterServerArgs = { 'llvm_kompiled_dir': llvm_definition_dir, @@ -1322,5 +1326,9 @@ def kore_server( 'no_post_exec_simplify': no_post_exec_simplify, **kore_args, } - return BoosterServer(booster_args) - return KoreServer(kore_args) + server = BoosterServer(booster_args) + server = KoreServer(kore_args) + + print(f'starting server {server.pid}', file=sys.stderr) + + return server diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index 603400d4e..0a721c08d 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -1,6 +1,7 @@ from __future__ import annotations import time +import sys from abc import ABC, abstractmethod from multiprocessing import Process, Queue @@ -70,9 +71,12 @@ def status(self) -> ProofStatus: class ProcessData(ABC): - @abstractmethod - def cleanup(self) -> None: - ... + ... + + +# @abstractmethod +# def cleanup(self) -> None: +# ... class ProofStep(ABC, Generic[U, D]): @@ -113,6 +117,7 @@ def prove_parallel( total_time = 0 total_init_time = time.time_ns() + print('d', file=sys.stderr) def run_process(data: ProcessData) -> None: while True: @@ -122,7 +127,8 @@ def run_process(data: ProcessData) -> None: proof_id, proof_step = dequeued update = proof_step.exec(data) out_queue.put((proof_id, update)) - data.cleanup() + + # data.cleanup() def submit(proof_id: str) -> None: proof = proofs[proof_id] @@ -142,12 +148,15 @@ def submit(proof_id: str) -> None: processes = [Process(target=run_process, args=(process_data,)) for _ in range(max_workers)] for process in processes: process.start() + print('e', file=sys.stderr) for proof_id in proofs.keys(): submit(proof_id) + print('f', file=sys.stderr) while pending_jobs > 0: proof_id, update = out_queue.get() + print('g', file=sys.stderr) pending_jobs -= 1 proof = proofs[proof_id] diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index 3b8e52603..c03c6d9c0 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -1,6 +1,7 @@ from __future__ import annotations import graphlib +import sys import json import logging import re @@ -173,7 +174,7 @@ def from_dict(cls: type[APRProof], dct: Mapping[str, Any], proof_dir: Path | Non admitted = dct.get('admitted', False) subproof_ids = dct['subproof_ids'] if 'subproof_ids' in dct else [] node_refutations: dict[int, str] = {} - checked_for_subsumption = dct['checked_for_subsumption'] + checked_for_subsumption = set(dct['checked_for_subsumption']) if 'node_refutation' in dct: node_refutations = {kcfg._resolve(node_id): proof_id for (node_id, proof_id) in dct['node_refutations']} if 'logs' in dct: @@ -688,7 +689,7 @@ def _inject_module(module_name: str, import_name: str, sentences: list[KRule]) - dependencies_as_rules: list[KRule] = [d.as_rule(priority=20) for d in apr_subproofs] circularity_rule = proof.as_rule(priority=20) - module_name = re.sub(r'[%().:,]+', '-', self.proof.id.upper()) + module_name = re.sub(r'[%().:,_]+', '-', self.proof.id.upper()) self.dependencies_module_name = module_name + '-DEPENDS-MODULE' self.circularities_module_name = module_name + '-CIRCULARITIES-MODULE' _inject_module(self.dependencies_module_name, self.main_module_name, dependencies_as_rules) @@ -1133,7 +1134,7 @@ class APRProofProcessData(parallel.ProcessData): haskell_log_entries: Iterable[str] log_axioms_file: Path | None - kore_servers: dict[str, KoreServer] + # kore_servers: dict[str, KoreServer] def __init__( self, @@ -1152,7 +1153,7 @@ def __init__( ) -> None: self.kprint = kprint self.kcfg_semantics = kcfg_semantics - self.kore_servers = {} + # self.kore_servers = {} self.definition_dir = definition_dir self.llvm_definition_dir = llvm_definition_dir self.module_name = module_name @@ -1164,15 +1165,17 @@ def __init__( self.haskell_log_entries = haskell_log_entries self.log_axioms_file = log_axioms_file - def cleanup(self) -> None: - for server in self.kore_servers.values(): - server.close() + +# def cleanup(self) -> None: +# for server in self.kore_servers.values(): +# server.close() class ParallelAPRProver(parallel.Prover[APRProof, APRProofResult, APRProofProcessData]): prover: APRProver - server: KoreServer kcfg_explore: KCFGExplore + port: int + client: KoreClient execute_depth: int | None cut_point_rules: Iterable[str] @@ -1197,6 +1200,7 @@ def __init__( execute_depth: int | None, kprint: KPrint, kcfg_semantics: KCFGSemantics | None, + port: int, id: str | None, trace_rewrites: bool, cut_point_rules: Iterable[str], @@ -1219,29 +1223,14 @@ def __init__( self.kcfg_semantics = kcfg_semantics self.id = id self.trace_rewrites = trace_rewrites + self.port = port self.bug_report = bug_report self.bug_report_id = bug_report_id self.total_cterm_extend_time = 0 self.total_cterm_implies_time = 0 - self.server = kore_server( - definition_dir=definition_dir, - llvm_definition_dir=llvm_definition_dir, - module_name=module_name, - command=command, - bug_report=bug_report, - smt_timeout=smt_timeout, - smt_retry_limit=smt_retry_limit, - smt_tactic=smt_tactic, - haskell_log_format=haskell_log_format, - haskell_log_entries=haskell_log_entries, - log_axioms_file=log_axioms_file, - fallback_on=None, - interim_simplification=None, - no_post_exec_simplify=None, - ) self.client = KoreClient( host='localhost', - port=self.server.port, + port=self.port, bug_report=self.bug_report, bug_report_id=self.bug_report_id, ) @@ -1257,7 +1246,6 @@ def __init__( def __del__(self) -> None: self.client.close() - self.server.close() def steps(self, proof: APRProof) -> Iterable[APRProofStep]: """ @@ -1293,7 +1281,7 @@ def steps(self, proof: APRProof) -> Iterable[APRProofStep]: target_cterm=target_node.cterm, target_node_id=target_node.id, use_module_name=module_name, - # port=self.server.port, + port=self.port, execute_depth=self.execute_depth, terminal_rules=self.terminal_rules, cut_point_rules=self.cut_point_rules, @@ -1353,6 +1341,7 @@ def __init__( execute_depth: int | None, kprint: KPrint, kcfg_semantics: KCFGSemantics | None, + port: int, id: str | None, trace_rewrites: bool, cut_point_rules: Iterable[str], @@ -1377,25 +1366,9 @@ def __init__( self.trace_rewrites = trace_rewrites self.bug_report = bug_report self.bug_report_id = bug_report_id - self.server = kore_server( - definition_dir=definition_dir, - llvm_definition_dir=llvm_definition_dir, - module_name=module_name, - command=command, - bug_report=bug_report, - smt_timeout=smt_timeout, - smt_retry_limit=smt_retry_limit, - smt_tactic=smt_tactic, - haskell_log_format=haskell_log_format, - haskell_log_entries=haskell_log_entries, - log_axioms_file=log_axioms_file, - fallback_on=None, - interim_simplification=None, - no_post_exec_simplify=None, - ) self.client = KoreClient( host='localhost', - port=self.server.port, + port=self.port, bug_report=self.bug_report, bug_report_id=self.bug_report_id, ) @@ -1419,7 +1392,7 @@ class APRProofStep(parallel.ProofStep[APRProofResult, APRProofProcessData]): use_module_name: str target_cterm: CTerm target_node_id: int - # port: int + port: int execute_depth: int | None cut_point_rules: Iterable[str] terminal_rules: Iterable[str] @@ -1457,31 +1430,35 @@ def exec(self, data: APRProofProcessData) -> APRProofResult: Able to be called on any `ProofStep` returned by `prover.steps(proof)`. """ + print('ba', file=sys.stderr) + init_kcfg_explore = False - if data.kore_servers.get(self.proof_id) is None: - init_kcfg_explore = True - data.kore_servers[self.proof_id] = kore_server( - definition_dir=data.definition_dir, - llvm_definition_dir=data.llvm_definition_dir, - module_name=data.module_name, - command=data.command, - bug_report=self.bug_report, - smt_timeout=data.smt_timeout, - smt_retry_limit=data.smt_retry_limit, - smt_tactic=data.smt_tactic, - haskell_log_format=data.haskell_log_format, - haskell_log_entries=data.haskell_log_entries, - log_axioms_file=data.log_axioms_file, - fallback_on=None, - interim_simplification=None, - no_post_exec_simplify=None, - ) - server = data.kore_servers[self.proof_id] + # if data.kore_servers.get(self.proof_id) is None: + # init_kcfg_explore = True + # data.kore_servers[self.proof_id] = kore_server( + # definition_dir=data.definition_dir, + # llvm_definition_dir=data.llvm_definition_dir, + # module_name=data.module_name, + # command=data.command, + # bug_report=self.bug_report, + # smt_timeout=data.smt_timeout, + # smt_retry_limit=data.smt_retry_limit, + # smt_tactic=data.smt_tactic, + # haskell_log_format=data.haskell_log_format, + # haskell_log_entries=data.haskell_log_entries, + # log_axioms_file=data.log_axioms_file, + # fallback_on=None, + # interim_simplification=None, + # no_post_exec_simplify=None, + # ) + # server = data.kore_servers[self.proof_id] + + print('bb', file=sys.stderr) with KoreClient( host='localhost', - port=server.port, + port=self.port, bug_report=self.bug_report, bug_report_id=self.bug_report_id, ) as client: @@ -1512,10 +1489,12 @@ def _inject_module(module_name: str, import_name: str, sentences: list[KRule]) - kcfg_explore.kprint.definition, kcfg_explore.kprint.kompiled_kore, _module ) kcfg_explore._kore_client.add_module(_kore_module, name_as_id=True) + print('bc', file=sys.stderr) if init_kcfg_explore: _inject_module(self.dependencies_module_name, self.main_module_name, self.dependencies_as_rules) _inject_module(self.circularities_module_name, self.main_module_name, [self.self_proof_as_rule]) + print('bd', file=sys.stderr) cterm_implies_time = 0 extend_cterm_time = 0 @@ -1533,6 +1512,8 @@ def _inject_module(module_name: str, import_name: str, sentences: list[KRule]) - extend_cterm_time=extend_cterm_time, ) + print('be', file=sys.stderr) + self.circularities_module_name if self.depth_is_nonzero else self.dependencies_module_name init_extend_cterm_time = time.time_ns() result = kcfg_explore.extend_cterm( @@ -1542,6 +1523,7 @@ def _inject_module(module_name: str, import_name: str, sentences: list[KRule]) - terminal_rules=self.terminal_rules, cut_point_rules=self.cut_point_rules, ) + print('bf', file=sys.stderr) extend_cterm_time = time.time_ns() - init_extend_cterm_time return APRProofExtendResult( extend_result=result, diff --git a/src/tests/integration/proof/test_imp_parallel.py b/src/tests/integration/proof/test_imp_parallel.py index e02886d0e..df3f9a8ce 100644 --- a/src/tests/integration/proof/test_imp_parallel.py +++ b/src/tests/integration/proof/test_imp_parallel.py @@ -61,14 +61,11 @@ def test_imp_parallel_prove( proof_dir: Path, ) -> None: # claim_id = 'addition-1' - print('a', file=sys.stderr) spec_file = K_FILES / 'imp-simple-spec.k' spec_module = 'IMP-SIMPLE-SPEC' - print('b', file=sys.stderr) spec_modules = kprove.get_claim_modules(Path(spec_file), spec_module_name=spec_module) spec_label = f'{spec_module}.{claim_id}' - print('c', file=sys.stderr) proofs = APRProof.from_spec_modules( kprove.definition, spec_modules, @@ -76,17 +73,14 @@ def test_imp_parallel_prove( logs={}, proof_dir=proof_dir, ) - print('d', file=sys.stderr) proof = single([p for p in proofs if p.id == spec_label]) if admit_deps: for subproof in proof.subproofs: subproof.admit() subproof.write_proof_data() - print('e', file=sys.stderr) semantics = self.semantics(kprove.definition) - print('f', file=sys.stderr) parallel_prover = ParallelAPRProver( proof=proof, module_name=kprove.main_module, @@ -101,7 +95,6 @@ def test_imp_parallel_prove( bug_report=None, bug_report_id=None, ) - print('g', file=sys.stderr) process_data = APRProofProcessData( kprint=kprint, @@ -109,7 +102,6 @@ def test_imp_parallel_prove( definition_dir=kprove.definition_dir, module_name=kprove.main_module, ) - print('h', file=sys.stderr) results = prove_parallel( proofs={'proof1': proof}, @@ -117,7 +109,6 @@ def test_imp_parallel_prove( max_workers=2, process_data=process_data, ) - print('i', file=sys.stderr) assert len(list(results)) == 1 assert list(results)[0].status == expected_status From 3d6968094a133aeebb71cc0dafbb6cd53ba19e0d Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Fri, 26 Jan 2024 15:03:57 -0600 Subject: [PATCH 072/116] Fix too many servers being started --- src/pyk/kore/rpc.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/pyk/kore/rpc.py b/src/pyk/kore/rpc.py index ecc6a5e5e..a614fe8eb 100644 --- a/src/pyk/kore/rpc.py +++ b/src/pyk/kore/rpc.py @@ -1327,7 +1327,8 @@ def kore_server( **kore_args, } server = BoosterServer(booster_args) - server = KoreServer(kore_args) + else: + server = KoreServer(kore_args) print(f'starting server {server.pid}', file=sys.stderr) From 6d95c128ac2c7ed71334a0b04ee7d8b81755c9b0 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Tue, 30 Jan 2024 12:55:09 -0600 Subject: [PATCH 073/116] Work on debugging parallel prover crash --- src/pyk/kast/outer.py | 7 ++++--- src/pyk/kore/rpc.py | 5 ++++- src/pyk/proof/reachability.py | 11 +---------- src/tests/integration/k-files/imp-simple-spec.k | 16 ++++++++++++++++ src/tests/integration/proof/test_imp_parallel.py | 14 ++++++++++++-- 5 files changed, 37 insertions(+), 16 deletions(-) diff --git a/src/pyk/kast/outer.py b/src/pyk/kast/outer.py index a6d349fc2..fc1ae6cb2 100644 --- a/src/pyk/kast/outer.py +++ b/src/pyk/kast/outer.py @@ -1083,9 +1083,10 @@ def sentence_by_unique_id(self) -> dict[str, KSentence]: for module in self.all_modules: for unique_id, sent in module.sentence_by_unique_id.items(): if unique_id in unique_id_map and sent != unique_id_map[unique_id]: - _LOGGER.warning( - f'Same UNIQUE_ID found for two different sentences: {(sent, unique_id_map[unique_id])}' - ) + ... +# _LOGGER.warning( +# f'Same UNIQUE_ID found for two different sentences: {(sent, unique_id_map[unique_id])}' +# ) else: unique_id_map[unique_id] = sent return unique_id_map diff --git a/src/pyk/kore/rpc.py b/src/pyk/kore/rpc.py index a614fe8eb..423303cb1 100644 --- a/src/pyk/kore/rpc.py +++ b/src/pyk/kore/rpc.py @@ -309,6 +309,7 @@ def request(self, method: str, **params: Any) -> dict[str, Any]: _LOGGER.debug(f'Sending request to {server_addr}: {req}') resp = self._transport.request(req) if not resp: + print(payload) raise RuntimeError('Empty response received') _LOGGER.debug(f'Received response from {server_addr}: {resp}') @@ -1125,7 +1126,6 @@ def start(self) -> None: assert port == self._port self._info = KoreServerInfo(pid=pid, host=host, port=port) _LOGGER.info(f'KoreServer started: {self.host}:{self.port}, pid={self.pid}') - traceback.print_stack() def close(self) -> None: _LOGGER.info(f'Stopping KoreServer: {self.host}:{self.port}, pid={self.pid}') @@ -1274,6 +1274,9 @@ def _extra_args(self) -> list[str]: res += ['--interim-simplification', str(self._interim_simplification)] if self._no_post_exec_simplify: res += ['--no-post-exec-simplify'] + if self._no_post_exec_simplify: + res += ['--no-post-exec-simplify'] +# res += ['--log-level', 'debug'] return res def _populate_bug_report(self, bug_report: BugReport) -> None: diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index c03c6d9c0..ed5f9c997 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -562,7 +562,7 @@ def from_dict(cls: type[APRBMCProof], dct: Mapping[str, Any], proof_dir: Path | logs = {int(k): tuple(LogEntry.from_dict(l) for l in ls) for k, ls in dct['logs'].items()} else: logs = {} - checked_for_subsumption = {kcfg._resolve(node_id) for node_id in dct['checked_for_subsumption']} + checked_for_subsumption = list({kcfg._resolve(node_id) for node_id in dct['checked_for_subsumption']}) return APRBMCProof( id, @@ -1430,8 +1430,6 @@ def exec(self, data: APRProofProcessData) -> APRProofResult: Able to be called on any `ProofStep` returned by `prover.steps(proof)`. """ - print('ba', file=sys.stderr) - init_kcfg_explore = False # if data.kore_servers.get(self.proof_id) is None: @@ -1454,8 +1452,6 @@ def exec(self, data: APRProofProcessData) -> APRProofResult: # ) # server = data.kore_servers[self.proof_id] - print('bb', file=sys.stderr) - with KoreClient( host='localhost', port=self.port, @@ -1489,12 +1485,10 @@ def _inject_module(module_name: str, import_name: str, sentences: list[KRule]) - kcfg_explore.kprint.definition, kcfg_explore.kprint.kompiled_kore, _module ) kcfg_explore._kore_client.add_module(_kore_module, name_as_id=True) - print('bc', file=sys.stderr) if init_kcfg_explore: _inject_module(self.dependencies_module_name, self.main_module_name, self.dependencies_as_rules) _inject_module(self.circularities_module_name, self.main_module_name, [self.self_proof_as_rule]) - print('bd', file=sys.stderr) cterm_implies_time = 0 extend_cterm_time = 0 @@ -1512,8 +1506,6 @@ def _inject_module(module_name: str, import_name: str, sentences: list[KRule]) - extend_cterm_time=extend_cterm_time, ) - print('be', file=sys.stderr) - self.circularities_module_name if self.depth_is_nonzero else self.dependencies_module_name init_extend_cterm_time = time.time_ns() result = kcfg_explore.extend_cterm( @@ -1523,7 +1515,6 @@ def _inject_module(module_name: str, import_name: str, sentences: list[KRule]) - terminal_rules=self.terminal_rules, cut_point_rules=self.cut_point_rules, ) - print('bf', file=sys.stderr) extend_cterm_time = time.time_ns() - init_extend_cterm_time return APRProofExtendResult( extend_result=result, diff --git a/src/tests/integration/k-files/imp-simple-spec.k b/src/tests/integration/k-files/imp-simple-spec.k index de91067ea..a2def585d 100644 --- a/src/tests/integration/k-files/imp-simple-spec.k +++ b/src/tests/integration/k-files/imp-simple-spec.k @@ -95,6 +95,22 @@ module IMP-SIMPLE-SPEC requires 0 <=Int S andBool 0 <=Int K ensures 0 + if(_B:Bool) { + $n = 1; + } + else { + $n = 2; + } + $s = 10; + while (0 <= $s) { + $s = $s + -1; + } + => . + ... + + claim [failing-if]: if(_B:Bool) { $n = 1 ; } else { $n = 2 ; } => . ... $n |-> (0 => 1) diff --git a/src/tests/integration/proof/test_imp_parallel.py b/src/tests/integration/proof/test_imp_parallel.py index df3f9a8ce..9b7a9508d 100644 --- a/src/tests/integration/proof/test_imp_parallel.py +++ b/src/tests/integration/proof/test_imp_parallel.py @@ -31,6 +31,7 @@ ('sum-N', ProofStatus.PASSED, True), ('sum-loop', ProofStatus.PASSED, False), ('failing-if', ProofStatus.FAILED, False), + ('long-branches', ProofStatus.PASSED, False), ) @@ -59,6 +60,7 @@ def test_imp_parallel_prove( kprove: KProve, kprint: KPrint, proof_dir: Path, + _kore_server: KoreServer, ) -> None: # claim_id = 'addition-1' spec_file = K_FILES / 'imp-simple-spec.k' @@ -85,7 +87,7 @@ def test_imp_parallel_prove( proof=proof, module_name=kprove.main_module, definition_dir=kprove.definition_dir, - execute_depth=1000, + execute_depth=100, kprint=kprint, kcfg_semantics=semantics, id=claim_id, @@ -94,6 +96,7 @@ def test_imp_parallel_prove( terminal_rules=(), bug_report=None, bug_report_id=None, + port=_kore_server.port, ) process_data = APRProofProcessData( @@ -106,9 +109,16 @@ def test_imp_parallel_prove( results = prove_parallel( proofs={'proof1': proof}, provers={'proof1': parallel_prover}, - max_workers=2, + max_workers=1, process_data=process_data, ) + kcfg_show = KCFGShow( + kcfg_explore.kprint, node_printer=APRBMCProofNodePrinter(results[0], kcfg_explore.kprint, full_printer=True) + ) + cfg_lines = kcfg_show.show(proof.kcfg) + print('\n'.join(cfg_lines)) + assert 1 == 2 + assert len(list(results)) == 1 assert list(results)[0].status == expected_status From 2378e88baf0eb62047b22593d7a65f655e987404 Mon Sep 17 00:00:00 2001 From: devops Date: Tue, 30 Jan 2024 18:56:49 +0000 Subject: [PATCH 074/116] Set Version: 0.1.605 --- docs/conf.py | 4 ++-- package/version | 2 +- pyproject.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index af00f291e..184efe7c6 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -9,8 +9,8 @@ project = 'pyk' author = 'Runtime Verification, Inc' copyright = '2024, Runtime Verification, Inc' -version = '0.1.604' -release = '0.1.604' +version = '0.1.605' +release = '0.1.605' # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration diff --git a/package/version b/package/version index 83e6a1406..2706885e1 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.604 +0.1.605 diff --git a/pyproject.toml b/pyproject.toml index 388111bba..27c969144 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.604" +version = "0.1.605" description = "" authors = [ "Runtime Verification, Inc. ", From be877a374d58a18db850fc65fa0a809b71e53c48 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Fri, 2 Feb 2024 20:47:11 -0600 Subject: [PATCH 075/116] Clean up --- src/pyk/kore/rpc.py | 1 - src/pyk/proof/parallel.py | 2 ++ src/pyk/proof/reachability.py | 6 +----- src/tests/integration/proof/test_imp_parallel.py | 10 +--------- 4 files changed, 4 insertions(+), 15 deletions(-) diff --git a/src/pyk/kore/rpc.py b/src/pyk/kore/rpc.py index 8acf03793..1eeffb6af 100644 --- a/src/pyk/kore/rpc.py +++ b/src/pyk/kore/rpc.py @@ -1,6 +1,5 @@ from __future__ import annotations -import traceback import http.client import json import logging diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index 0a721c08d..db5269572 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -155,7 +155,9 @@ def submit(proof_id: str) -> None: print('f', file=sys.stderr) while pending_jobs > 0: + wait_init_time = time.time_ns() proof_id, update = out_queue.get() + total_process_time += time.time_ns() - wait_init_time print('g', file=sys.stderr) pending_jobs -= 1 diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index 94da35877..776cf0847 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -1,7 +1,6 @@ from __future__ import annotations import graphlib -import sys import json import logging import re @@ -11,7 +10,7 @@ from typing import TYPE_CHECKING import pyk.proof.parallel as parallel -from pyk.kore.rpc import KoreClient, KoreExecLogFormat, LogEntry, kore_server +from pyk.kore.rpc import KoreClient, KoreExecLogFormat, LogEntry from ..kast.inner import KInner, Subst from ..kast.manip import flatten_label, ml_pred_to_bool @@ -31,14 +30,11 @@ from typing import Any, Final, TypeVar from pyk.kcfg.semantics import KCFGSemantics - from pyk.kore.rpc import KoreServer from pyk.utils import BugReport from ..cterm import CSubst, CTerm - from ..kast.outer import KClaim, KDefinition, KFlatModuleList from ..kcfg.explore import ExtendResult from ..kast.outer import KClaim, KDefinition, KFlatModuleList, KRuleLike - from ..kcfg import KCFGExplore from ..kcfg.kcfg import NodeIdLike from ..ktool.kprint import KPrint diff --git a/src/tests/integration/proof/test_imp_parallel.py b/src/tests/integration/proof/test_imp_parallel.py index 9b7a9508d..a9db3ebcf 100644 --- a/src/tests/integration/proof/test_imp_parallel.py +++ b/src/tests/integration/proof/test_imp_parallel.py @@ -1,6 +1,5 @@ from __future__ import annotations -import sys from pathlib import Path from typing import TYPE_CHECKING @@ -21,6 +20,7 @@ from pyk.kast.outer import KDefinition from pyk.kcfg.explore import KCFGExplore from pyk.kcfg.semantics import KCFGSemantics + from pyk.kore.rpc import KoreServer from pyk.ktool.kprint import KPrint from pyk.ktool.kprove import KProve @@ -62,7 +62,6 @@ def test_imp_parallel_prove( proof_dir: Path, _kore_server: KoreServer, ) -> None: - # claim_id = 'addition-1' spec_file = K_FILES / 'imp-simple-spec.k' spec_module = 'IMP-SIMPLE-SPEC' @@ -113,12 +112,5 @@ def test_imp_parallel_prove( process_data=process_data, ) - kcfg_show = KCFGShow( - kcfg_explore.kprint, node_printer=APRBMCProofNodePrinter(results[0], kcfg_explore.kprint, full_printer=True) - ) - cfg_lines = kcfg_show.show(proof.kcfg) - print('\n'.join(cfg_lines)) - assert 1 == 2 - assert len(list(results)) == 1 assert list(results)[0].status == expected_status From 6e7dde5c65b831d10570a87b49ee6ba8eb7d892b Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Fri, 2 Feb 2024 20:49:28 -0600 Subject: [PATCH 076/116] Clean up --- src/pyk/kast/outer.py | 6 +++--- src/pyk/kore/rpc.py | 15 ++------------- 2 files changed, 5 insertions(+), 16 deletions(-) diff --git a/src/pyk/kast/outer.py b/src/pyk/kast/outer.py index 23bbe6bbd..cc535ab13 100644 --- a/src/pyk/kast/outer.py +++ b/src/pyk/kast/outer.py @@ -1089,9 +1089,9 @@ def sentence_by_unique_id(self) -> dict[str, KSentence]: for unique_id, sent in module.sentence_by_unique_id.items(): if unique_id in unique_id_map and sent != unique_id_map[unique_id]: ... -# _LOGGER.warning( -# f'Same UNIQUE_ID found for two different sentences: {(sent, unique_id_map[unique_id])}' -# ) + _LOGGER.warning( + f'Same UNIQUE_ID found for two different sentences: {(sent, unique_id_map[unique_id])}' + ) else: unique_id_map[unique_id] = sent return unique_id_map diff --git a/src/pyk/kore/rpc.py b/src/pyk/kore/rpc.py index 1eeffb6af..04ce4cccc 100644 --- a/src/pyk/kore/rpc.py +++ b/src/pyk/kore/rpc.py @@ -309,7 +309,6 @@ def request(self, method: str, **params: Any) -> dict[str, Any]: _LOGGER.debug(f'Sending request to {server_addr}: {req}') resp = self._transport.request(req) if not resp: - print(payload) raise RuntimeError('Empty response received') _LOGGER.debug(f'Received response from {server_addr}: {resp}') @@ -890,7 +889,6 @@ def _request(self, method: str, **params: Any) -> dict[str, Any]: try: return self._client.request(method, **params) except JsonRpcError as err: - print(params, file=sys.stderr) raise self._error(err) from err def _error(self, err: JsonRpcError) -> KoreClientError: @@ -1012,7 +1010,6 @@ def get_model(self, pattern: Pattern, module_name: str | None = None) -> GetMode return GetModelResult.from_dict(result) def add_module(self, module: Module, *, name_as_id: bool | None = None) -> str: - print(f'adding module {module.text}', file=sys.stderr) params = filter_none( { 'module': module.text, @@ -1110,7 +1107,6 @@ def __enter__(self) -> KoreServer: return self def __exit__(self, *args: Any) -> None: - print(f'closing server {self.pid}', file=sys.stderr) self.close() def start(self) -> None: @@ -1274,9 +1270,6 @@ def _extra_args(self) -> list[str]: res += ['--interim-simplification', str(self._interim_simplification)] if self._no_post_exec_simplify: res += ['--no-post-exec-simplify'] - if self._no_post_exec_simplify: - res += ['--no-post-exec-simplify'] -# res += ['--log-level', 'debug'] return res def _populate_bug_report(self, bug_report: BugReport) -> None: @@ -1320,7 +1313,6 @@ def kore_server( 'haskell_log_entries': haskell_log_entries, 'bug_report': bug_report, } - server = None if llvm_definition_dir: booster_args: BoosterServerArgs = { 'llvm_kompiled_dir': llvm_definition_dir, @@ -1329,10 +1321,7 @@ def kore_server( 'no_post_exec_simplify': no_post_exec_simplify, **kore_args, } - server = BoosterServer(booster_args) + return BoosterServer(booster_args) else: - server = KoreServer(kore_args) - - print(f'starting server {server.pid}', file=sys.stderr) + return KoreServer(kore_args) - return server From f53b0a9905d9dcc4111f42f00caa2f609e398469 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Fri, 2 Feb 2024 20:49:46 -0600 Subject: [PATCH 077/116] Clean up --- src/pyk/kore/rpc.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/pyk/kore/rpc.py b/src/pyk/kore/rpc.py index 04ce4cccc..4d44142a6 100644 --- a/src/pyk/kore/rpc.py +++ b/src/pyk/kore/rpc.py @@ -1322,6 +1322,5 @@ def kore_server( **kore_args, } return BoosterServer(booster_args) - else: - return KoreServer(kore_args) + return KoreServer(kore_args) From 73078626a63831338304c0ae1c7ed012274ff522 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Fri, 2 Feb 2024 20:50:51 -0600 Subject: [PATCH 078/116] Clean up --- src/pyk/kast/outer.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/pyk/kast/outer.py b/src/pyk/kast/outer.py index cc535ab13..286cc1426 100644 --- a/src/pyk/kast/outer.py +++ b/src/pyk/kast/outer.py @@ -1088,7 +1088,6 @@ def sentence_by_unique_id(self) -> dict[str, KSentence]: for module in self.all_modules: for unique_id, sent in module.sentence_by_unique_id.items(): if unique_id in unique_id_map and sent != unique_id_map[unique_id]: - ... _LOGGER.warning( f'Same UNIQUE_ID found for two different sentences: {(sent, unique_id_map[unique_id])}' ) From 31dcc4641b347ffdfd72604baaa74ab10b42a78f Mon Sep 17 00:00:00 2001 From: devops Date: Sat, 3 Feb 2024 03:10:41 +0000 Subject: [PATCH 079/116] Set Version: 0.1.613 --- docs/conf.py | 4 ++-- package/version | 2 +- pyproject.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 134529b35..f4b234d37 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -9,8 +9,8 @@ project = 'pyk' author = 'Runtime Verification, Inc' copyright = '2024, Runtime Verification, Inc' -version = '0.1.612' -release = '0.1.612' +version = '0.1.613' +release = '0.1.613' # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration diff --git a/package/version b/package/version index 4165da619..a628887fb 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.612 +0.1.613 diff --git a/pyproject.toml b/pyproject.toml index 432ff416f..b955aaffc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.612" +version = "0.1.613" description = "" authors = [ "Runtime Verification, Inc. ", From 597d28064c6a94d1b9db39aaaca5ae1ed29df5e2 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Mon, 5 Feb 2024 15:31:11 -0600 Subject: [PATCH 080/116] Fix formatting --- poetry.lock | 325 ++++++++++++++++--------------- src/pyk/kast/inner.py | 42 ++-- src/pyk/kast/kast.py | 6 +- src/pyk/kast/markdown.py | 3 +- src/pyk/kast/outer.py | 3 +- src/pyk/kast/outer_syntax.py | 18 +- src/pyk/kbuild/project.py | 3 +- src/pyk/kcfg/explore.py | 9 +- src/pyk/kcfg/kcfg.py | 6 +- src/pyk/kcfg/semantics.py | 12 +- src/pyk/kdist/_kdist.py | 2 +- src/pyk/kdist/api.py | 3 +- src/pyk/kllvm/convert.py | 10 +- src/pyk/kore/match.py | 39 ++-- src/pyk/kore/rpc.py | 25 +-- src/pyk/kore/syntax.py | 39 ++-- src/pyk/krepl/repl.py | 9 +- src/pyk/ktool/kompile.py | 6 +- src/pyk/proof/parallel.py | 5 +- src/pyk/proof/proof.py | 9 +- src/pyk/proof/reachability.py | 4 +- src/pyk/testing/_kompiler.py | 3 +- src/pyk/utils.py | 17 +- src/tests/unit/kast/test_kast.py | 2 +- src/tests/unit/test_proof.py | 12 +- 25 files changed, 264 insertions(+), 348 deletions(-) diff --git a/poetry.lock b/poetry.lock index eada75fd9..f6c25cd98 100644 --- a/poetry.lock +++ b/poetry.lock @@ -61,33 +61,33 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "black" -version = "23.12.1" +version = "24.1.1" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, - {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, - {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, - {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, - {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, - {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, - {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, - {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, - {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, - {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, - {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, - {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, - {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, - {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, - {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, - {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, - {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, - {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, - {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, - {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, - {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, - {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, + {file = "black-24.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2588021038bd5ada078de606f2a804cadd0a3cc6a79cb3e9bb3a8bf581325a4c"}, + {file = "black-24.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a95915c98d6e32ca43809d46d932e2abc5f1f7d582ffbe65a5b4d1588af7445"}, + {file = "black-24.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fa6a0e965779c8f2afb286f9ef798df770ba2b6cee063c650b96adec22c056a"}, + {file = "black-24.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5242ecd9e990aeb995b6d03dc3b2d112d4a78f2083e5a8e86d566340ae80fec4"}, + {file = "black-24.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fc1ec9aa6f4d98d022101e015261c056ddebe3da6a8ccfc2c792cbe0349d48b7"}, + {file = "black-24.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0269dfdea12442022e88043d2910429bed717b2d04523867a85dacce535916b8"}, + {file = "black-24.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3d64db762eae4a5ce04b6e3dd745dcca0fb9560eb931a5be97472e38652a161"}, + {file = "black-24.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:5d7b06ea8816cbd4becfe5f70accae953c53c0e53aa98730ceccb0395520ee5d"}, + {file = "black-24.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e2c8dfa14677f90d976f68e0c923947ae68fa3961d61ee30976c388adc0b02c8"}, + {file = "black-24.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a21725862d0e855ae05da1dd25e3825ed712eaaccef6b03017fe0853a01aa45e"}, + {file = "black-24.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07204d078e25327aad9ed2c64790d681238686bce254c910de640c7cc4fc3aa6"}, + {file = "black-24.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:a83fe522d9698d8f9a101b860b1ee154c1d25f8a82ceb807d319f085b2627c5b"}, + {file = "black-24.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:08b34e85170d368c37ca7bf81cf67ac863c9d1963b2c1780c39102187ec8dd62"}, + {file = "black-24.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7258c27115c1e3b5de9ac6c4f9957e3ee2c02c0b39222a24dc7aa03ba0e986f5"}, + {file = "black-24.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40657e1b78212d582a0edecafef133cf1dd02e6677f539b669db4746150d38f6"}, + {file = "black-24.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e298d588744efda02379521a19639ebcd314fba7a49be22136204d7ed1782717"}, + {file = "black-24.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:34afe9da5056aa123b8bfda1664bfe6fb4e9c6f311d8e4a6eb089da9a9173bf9"}, + {file = "black-24.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:854c06fb86fd854140f37fb24dbf10621f5dab9e3b0c29a690ba595e3d543024"}, + {file = "black-24.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3897ae5a21ca132efa219c029cce5e6bfc9c3d34ed7e892113d199c0b1b444a2"}, + {file = "black-24.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:ecba2a15dfb2d97105be74bbfe5128bc5e9fa8477d8c46766505c1dda5883aac"}, + {file = "black-24.1.1-py3-none-any.whl", hash = "sha256:5cdc2e2195212208fbcae579b931407c1fa9997584f0a415421748aeafff1168"}, + {file = "black-24.1.1.tar.gz", hash = "sha256:48b5760dcbfe5cf97fd4fba23946681f3a81514c6ab8a45b50da67ac8fbc6c7b"}, ] [package.dependencies] @@ -107,13 +107,13 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -292,63 +292,63 @@ cron = ["capturer (>=2.4)"] [[package]] name = "coverage" -version = "7.4.0" +version = "7.4.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, - {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, - {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, - {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, - {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, - {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, - {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, - {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, - {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, - {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, - {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, - {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, - {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, - {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, + {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, + {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, + {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, + {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, + {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, + {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, + {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, + {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, + {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, + {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, + {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, + {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, ] [package.dependencies] @@ -475,13 +475,13 @@ flake8 = "*" [[package]] name = "flake8-type-checking" -version = "2.8.0" +version = "2.9.0" description = "A flake8 plugin for managing type-checking imports & forward references" optional = false python-versions = ">=3.8" files = [ - {file = "flake8_type_checking-2.8.0-py3-none-any.whl", hash = "sha256:a6f9ded325f0c9845f073609c557bf481882adc4d18571a39b137ef2d284dc85"}, - {file = "flake8_type_checking-2.8.0.tar.gz", hash = "sha256:07d949b686f39eb0cb828a394aa29d48bd1ca0df92d552d9794d17b22c309cd7"}, + {file = "flake8_type_checking-2.9.0-py3-none-any.whl", hash = "sha256:b63e1745f6e7deee1403d7e0150a5bca378315e9fe4d4cdaa7b71338034dbcc3"}, + {file = "flake8_type_checking-2.9.0.tar.gz", hash = "sha256:6fcc0e8a63f6a87b5b26b776388c21907e66c4efbd15dcc1bcbd96fe884da93d"}, ] [package.dependencies] @@ -603,13 +603,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "linkify-it-py" -version = "2.0.2" +version = "2.0.3" description = "Links recognition library with FULL unicode support." optional = false python-versions = ">=3.7" files = [ - {file = "linkify-it-py-2.0.2.tar.gz", hash = "sha256:19f3060727842c254c808e99d465c80c49d2c7306788140987a1a7a29b0d6ad2"}, - {file = "linkify_it_py-2.0.2-py3-none-any.whl", hash = "sha256:a3a24428f6c96f27370d7fe61d2ac0be09017be5190d68d8658233171f1b6541"}, + {file = "linkify-it-py-2.0.3.tar.gz", hash = "sha256:68cda27e162e9215c17d786649d1da0021a451bdc436ef9e0fa0ba5234b9b048"}, + {file = "linkify_it_py-2.0.3-py3-none-any.whl", hash = "sha256:6bcbc417b0ac14323382aef5c5192c0075bf8a9d6b41820a2b66371eac6b6d79"}, ] [package.dependencies] @@ -649,71 +649,71 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.4" +version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de8153a7aae3835484ac168a9a9bdaa0c5eee4e0bc595503c95d53b942879c84"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e888ff76ceb39601c59e219f281466c6d7e66bd375b4ec1ce83bcdc68306796b"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0b838c37ba596fcbfca71651a104a611543077156cb0a26fe0c475e1f152ee8"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac1ebf6983148b45b5fa48593950f90ed6d1d26300604f321c74a9ca1609f8e"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbad3d346df8f9d72622ac71b69565e621ada2ce6572f37c2eae8dacd60385d"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5291d98cd3ad9a562883468c690a2a238c4a6388ab3bd155b0c75dd55ece858"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a7cc49ef48a3c7a0005a949f3c04f8baa5409d3f663a1b36f0eba9bfe2a0396e"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b83041cda633871572f0d3c41dddd5582ad7d22f65a72eacd8d3d6d00291df26"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-win32.whl", hash = "sha256:0c26f67b3fe27302d3a412b85ef696792c4a2386293c53ba683a89562f9399b0"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:a76055d5cb1c23485d7ddae533229039b850db711c554a12ea64a0fd8a0129e2"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e9e3c4020aa2dc62d5dd6743a69e399ce3de58320522948af6140ac959ab863"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0042d6a9880b38e1dd9ff83146cc3c9c18a059b9360ceae207805567aacccc69"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d03fea4c4e9fd0ad75dc2e7e2b6757b80c152c032ea1d1de487461d8140efc"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ab3a886a237f6e9c9f4f7d272067e712cdb4efa774bef494dccad08f39d8ae6"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf5ebbec056817057bfafc0445916bb688a255a5146f900445d081db08cbabb"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e1a0d1924a5013d4f294087e00024ad25668234569289650929ab871231668e7"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e7902211afd0af05fbadcc9a312e4cf10f27b779cf1323e78d52377ae4b72bea"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c669391319973e49a7c6230c218a1e3044710bc1ce4c8e6eb71f7e6d43a2c131"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-win32.whl", hash = "sha256:31f57d64c336b8ccb1966d156932f3daa4fee74176b0fdc48ef580be774aae74"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:54a7e1380dfece8847c71bf7e33da5d084e9b889c75eca19100ef98027bd9f56"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a76cd37d229fc385738bd1ce4cba2a121cf26b53864c1772694ad0ad348e509e"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:987d13fe1d23e12a66ca2073b8d2e2a75cec2ecb8eab43ff5624ba0ad42764bc"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5244324676254697fe5c181fc762284e2c5fceeb1c4e3e7f6aca2b6f107e60dc"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78bc995e004681246e85e28e068111a4c3f35f34e6c62da1471e844ee1446250"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4d176cfdfde84f732c4a53109b293d05883e952bbba68b857ae446fa3119b4f"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f9917691f410a2e0897d1ef99619fd3f7dd503647c8ff2475bf90c3cf222ad74"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f06e5a9e99b7df44640767842f414ed5d7bedaaa78cd817ce04bbd6fd86e2dd6"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396549cea79e8ca4ba65525470d534e8a41070e6b3500ce2414921099cb73e8d"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-win32.whl", hash = "sha256:f6be2d708a9d0e9b0054856f07ac7070fbe1754be40ca8525d5adccdbda8f475"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:5045e892cfdaecc5b4c01822f353cf2c8feb88a6ec1c0adef2a2e705eef0f656"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a07f40ef8f0fbc5ef1000d0c78771f4d5ca03b4953fc162749772916b298fc4"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d18b66fe626ac412d96c2ab536306c736c66cf2a31c243a45025156cc190dc8a"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:698e84142f3f884114ea8cf83e7a67ca8f4ace8454e78fe960646c6c91c63bfa"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a3b78a5af63ec10d8604180380c13dcd870aba7928c1fe04e881d5c792dc4e"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:15866d7f2dc60cfdde12ebb4e75e41be862348b4728300c36cdf405e258415ec"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6aa5e2e7fc9bc042ae82d8b79d795b9a62bd8f15ba1e7594e3db243f158b5565"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:54635102ba3cf5da26eb6f96c4b8c53af8a9c0d97b64bdcb592596a6255d8518"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-win32.whl", hash = "sha256:3583a3a3ab7958e354dc1d25be74aee6228938312ee875a22330c4dc2e41beb0"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-win_amd64.whl", hash = "sha256:d6e427c7378c7f1b2bef6a344c925b8b63623d3321c09a237b7cc0e77dd98ceb"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:bf1196dcc239e608605b716e7b166eb5faf4bc192f8a44b81e85251e62584bd2"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4df98d4a9cd6a88d6a585852f56f2155c9cdb6aec78361a19f938810aa020954"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b835aba863195269ea358cecc21b400276747cc977492319fd7682b8cd2c253d"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23984d1bdae01bee794267424af55eef4dfc038dc5d1272860669b2aa025c9e3"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c98c33ffe20e9a489145d97070a435ea0679fddaabcafe19982fe9c971987d5"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9896fca4a8eb246defc8b2a7ac77ef7553b638e04fbf170bff78a40fa8a91474"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b0fe73bac2fed83839dbdbe6da84ae2a31c11cfc1c777a40dbd8ac8a6ed1560f"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c7556bafeaa0a50e2fe7dc86e0382dea349ebcad8f010d5a7dc6ba568eaaa789"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-win32.whl", hash = "sha256:fc1a75aa8f11b87910ffd98de62b29d6520b6d6e8a3de69a70ca34dea85d2a8a"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-win_amd64.whl", hash = "sha256:3a66c36a3864df95e4f62f9167c734b3b1192cb0851b43d7cc08040c074c6279"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:765f036a3d00395a326df2835d8f86b637dbaf9832f90f5d196c3b8a7a5080cb"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:21e7af8091007bf4bebf4521184f4880a6acab8df0df52ef9e513d8e5db23411"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c31fe855c77cad679b302aabc42d724ed87c043b1432d457f4976add1c2c3e"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653fa39578957bc42e5ebc15cf4361d9e0ee4b702d7d5ec96cdac860953c5b4"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47bb5f0142b8b64ed1399b6b60f700a580335c8e1c57f2f15587bd072012decc"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fe8512ed897d5daf089e5bd010c3dc03bb1bdae00b35588c49b98268d4a01e00"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:36d7626a8cca4d34216875aee5a1d3d654bb3dac201c1c003d182283e3205949"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b6f14a9cd50c3cb100eb94b3273131c80d102e19bb20253ac7bd7336118a673a"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-win32.whl", hash = "sha256:c8f253a84dbd2c63c19590fa86a032ef3d8cc18923b8049d91bcdeeb2581fbf6"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:8b570a1537367b52396e53325769608f2a687ec9a4363647af1cded8928af959"}, - {file = "MarkupSafe-2.1.4.tar.gz", hash = "sha256:3aae9af4cac263007fd6309c64c6ab4506dd2b79382d9d19a1994f9240b8db4f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] @@ -853,18 +853,18 @@ flake8 = ">=5.0.0" [[package]] name = "platformdirs" -version = "4.1.0" +version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, - {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] [[package]] name = "pluggy" @@ -981,13 +981,13 @@ files = [ [[package]] name = "pytest" -version = "7.4.4" +version = "8.0.0" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, - {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, + {file = "pytest-8.0.0-py3-none-any.whl", hash = "sha256:50fb9cbe836c3f20f0dfa99c565201fb75dc54c8d76373cd1bde06b06657bdb6"}, + {file = "pytest-8.0.0.tar.gz", hash = "sha256:249b1b0864530ba251b7438274c4d251c58d868edaaec8762893ad4a0d71c36c"}, ] [package.dependencies] @@ -995,7 +995,7 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<2.0" +pluggy = ">=1.3.0,<2.0" tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] @@ -1292,13 +1292,13 @@ files = [ [[package]] name = "types-psutil" -version = "5.9.5.20240106" +version = "5.9.5.20240205" description = "Typing stubs for psutil" optional = false python-versions = ">=3.8" files = [ - {file = "types-psutil-5.9.5.20240106.tar.gz", hash = "sha256:60b233fb613b41fe859526103dbda0b4812d7a16c5f791119ec7016fbe1c8128"}, - {file = "types_psutil-5.9.5.20240106-py3-none-any.whl", hash = "sha256:fea169a85b1bb9d9edd0b063a93ad950e37d574290b1bf11ef5e46c9c5d82326"}, + {file = "types-psutil-5.9.5.20240205.tar.gz", hash = "sha256:51df36a361aa597bf483dcc5b58f2ab7aa87452a36d2da97c90994d6a81ef743"}, + {file = "types_psutil-5.9.5.20240205-py3-none-any.whl", hash = "sha256:3ec9bd8b95a64fe1269241d3ffb74b94a45df2d0391da1402423cd33f29745ca"}, ] [[package]] @@ -1328,17 +1328,18 @@ test = ["coverage", "pytest", "pytest-cov"] [[package]] name = "urllib3" -version = "2.1.0" +version = "2.2.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, + {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, + {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] diff --git a/src/pyk/kast/inner.py b/src/pyk/kast/inner.py index 46198ba9c..18dd4153a 100644 --- a/src/pyk/kast/inner.py +++ b/src/pyk/kast/inner.py @@ -57,12 +57,10 @@ class KLabel(KAst): params: tuple[KSort, ...] @overload - def __init__(self, name: str, params: Iterable[str | KSort]): - ... + def __init__(self, name: str, params: Iterable[str | KSort]): ... @overload - def __init__(self, name: str, *params: str | KSort): - ... + def __init__(self, name: str, *params: str | KSort): ... # TODO Is it possible to extract a decorator? def __init__(self, name: str, *args: Any, **kwargs: Any): @@ -95,12 +93,10 @@ def __iter__(self) -> Iterator[str | KSort]: return chain([self.name], self.params) @overload - def __call__(self, args: Iterable[KInner]) -> KApply: - ... + def __call__(self, args: Iterable[KInner]) -> KApply: ... @overload - def __call__(self, *args: KInner) -> KApply: - ... + def __call__(self, *args: KInner) -> KApply: ... def __call__(self, *args: Any, **kwargs: Any) -> KApply: return self.apply(*args, **kwargs) @@ -119,12 +115,10 @@ def let(self, *, name: str | None = None, params: Iterable[str | KSort] | None = return KLabel(name=name, params=params) @overload - def apply(self, args: Iterable[KInner]) -> KApply: - ... + def apply(self, args: Iterable[KInner]) -> KApply: ... @overload - def apply(self, *args: KInner) -> KApply: - ... + def apply(self, *args: KInner) -> KApply: ... def apply(self, *args: Any, **kwargs: Any) -> KApply: """Construct a `KApply` with this `KLabel` as the AST head and the supplied parameters as the arguments.""" @@ -184,8 +178,7 @@ def _extract_dicts(dct: Mapping[str, Any]) -> list[Mapping[str, Any]]: @classmethod @abstractmethod - def _from_dict(cls: type[KI], d: Mapping[str, Any], terms: list[KInner]) -> KI: - ... + def _from_dict(cls: type[KI], d: Mapping[str, Any], terms: list[KInner]) -> KI: ... @property @abstractmethod @@ -243,8 +236,7 @@ def to_dict(self) -> dict[str, Any]: stack.append([]) @abstractmethod - def _to_dict(self, terms: list[KInner]) -> dict[str, Any]: - ... + def _to_dict(self, terms: list[KInner]) -> dict[str, Any]: ... @final @@ -364,12 +356,10 @@ class KApply(KInner): args: tuple[KInner, ...] @overload - def __init__(self, label: str | KLabel, args: Iterable[KInner]): - ... + def __init__(self, label: str | KLabel, args: Iterable[KInner]): ... @overload - def __init__(self, label: str | KLabel, *args: KInner): - ... + def __init__(self, label: str | KLabel, *args: KInner): ... def __init__(self, label: str | KLabel, *args: Any, **kwargs: Any): """Construct a new `KApply` given the input `KLabel` or str, applied to arguments.""" @@ -579,12 +569,10 @@ class KSequence(KInner, Sequence[KInner]): items: tuple[KInner, ...] @overload - def __init__(self, items: Iterable[KInner]): - ... + def __init__(self, items: Iterable[KInner]): ... @overload - def __init__(self, *items: KInner): - ... + def __init__(self, *items: KInner): ... def __init__(self, *args: Any, **kwargs: Any): """Construct a new `KSequence` given the arguments.""" @@ -613,12 +601,10 @@ def __init__(self, *args: Any, **kwargs: Any): object.__setattr__(self, 'items', tuple(items)) @overload - def __getitem__(self, key: int) -> KInner: - ... + def __getitem__(self, key: int) -> KInner: ... @overload - def __getitem__(self, key: slice) -> tuple[KInner, ...]: - ... + def __getitem__(self, key: slice) -> tuple[KInner, ...]: ... def __getitem__(self, key: int | slice) -> KInner | tuple[KInner, ...]: return self.items[key] diff --git a/src/pyk/kast/kast.py b/src/pyk/kast/kast.py index 98c71e85d..c92fc414f 100644 --- a/src/pyk/kast/kast.py +++ b/src/pyk/kast/kast.py @@ -27,8 +27,7 @@ def version() -> int: return 3 @abstractmethod - def to_dict(self) -> dict[str, Any]: - ... + def to_dict(self) -> dict[str, Any]: ... @final def to_json(self) -> str: @@ -144,8 +143,7 @@ class WithKAtt(ABC): att: KAtt @abstractmethod - def let_att(self: W, att: KAtt) -> W: - ... + def let_att(self: W, att: KAtt) -> W: ... def map_att(self: W, f: Callable[[KAtt], KAtt]) -> W: return self.let_att(att=f(self.att)) diff --git a/src/pyk/kast/markdown.py b/src/pyk/kast/markdown.py index a19b96c85..f4a2b8934 100644 --- a/src/pyk/kast/markdown.py +++ b/src/pyk/kast/markdown.py @@ -70,8 +70,7 @@ def check_tag(tag: str) -> None: class Selector(ABC): @abstractmethod - def eval(self, atoms: Container[str]) -> bool: - ... + def eval(self, atoms: Container[str]) -> bool: ... @final diff --git a/src/pyk/kast/outer.py b/src/pyk/kast/outer.py index 286cc1426..7a08d9d8c 100644 --- a/src/pyk/kast/outer.py +++ b/src/pyk/kast/outer.py @@ -562,8 +562,7 @@ def let( requires: KInner | None = None, ensures: KInner | None = None, att: KAtt | None = None, - ) -> RL: - ... + ) -> RL: ... @final diff --git a/src/pyk/kast/outer_syntax.py b/src/pyk/kast/outer_syntax.py index db2b88816..57b5bc4b4 100644 --- a/src/pyk/kast/outer_syntax.py +++ b/src/pyk/kast/outer_syntax.py @@ -11,8 +11,7 @@ from typing import Any, Final -class AST(ABC): - ... +class AST(ABC): ... @final @@ -24,12 +23,10 @@ def __init__(self, items: Iterable[tuple[str, str]] = ()): object.__setattr__(self, 'items', tuple(items)) @overload - def __getitem__(self, key: int) -> tuple[str, str]: - ... + def __getitem__(self, key: int) -> tuple[str, str]: ... @overload - def __getitem__(self, key: slice) -> tuple[tuple[str, str], ...]: - ... + def __getitem__(self, key: slice) -> tuple[tuple[str, str], ...]: ... def __getitem__(self, key: Any) -> Any: return self.items[key] @@ -41,12 +38,10 @@ def __len__(self) -> int: EMPTY_ATT: Final = Att() -class Sentence(AST, ABC): - ... +class Sentence(AST, ABC): ... -class SyntaxSentence(Sentence, ABC): - ... +class SyntaxSentence(Sentence, ABC): ... class Assoc(Enum): @@ -123,8 +118,7 @@ def __init__(self, items: Iterable[ProductionItem], att: Att = EMPTY_ATT): object.__setattr__(self, 'att', att) -class ProductionItem(AST, ABC): - ... +class ProductionItem(AST, ABC): ... @final diff --git a/src/pyk/kbuild/project.py b/src/pyk/kbuild/project.py index 6f0f8dd8c..d7a24d75b 100644 --- a/src/pyk/kbuild/project.py +++ b/src/pyk/kbuild/project.py @@ -28,8 +28,7 @@ def from_dict(dct: Mapping[str, Any]) -> Source: raise ValueError(f'Cannot parse source: {dct}') @abstractmethod - def resolve(self, project_path: Path) -> Path: - ... + def resolve(self, project_path: Path) -> Path: ... @final diff --git a/src/pyk/kcfg/explore.py b/src/pyk/kcfg/explore.py index 4a783b385..23db5dabf 100644 --- a/src/pyk/kcfg/explore.py +++ b/src/pyk/kcfg/explore.py @@ -521,20 +521,17 @@ def extract_rule_labels(_logs: tuple[LogEntry, ...]) -> list[str]: raise AssertionError() -class ExtendResult(ABC): - ... +class ExtendResult(ABC): ... @final @dataclass(frozen=True) -class Vacuous(ExtendResult): - ... +class Vacuous(ExtendResult): ... @final @dataclass(frozen=True) -class Stuck(ExtendResult): - ... +class Stuck(ExtendResult): ... @final diff --git a/src/pyk/kcfg/kcfg.py b/src/pyk/kcfg/kcfg.py index b8163b759..49f24e231 100644 --- a/src/pyk/kcfg/kcfg.py +++ b/src/pyk/kcfg/kcfg.py @@ -57,8 +57,7 @@ def __lt__(self, other: Any) -> bool: @property @abstractmethod - def targets(self) -> tuple[KCFG.Node, ...]: - ... + def targets(self) -> tuple[KCFG.Node, ...]: ... @property def target_ids(self) -> list[int]: @@ -131,8 +130,7 @@ def __lt__(self, other: Any) -> bool: return (self.source, self.target_ids) < (other.source, other.target_ids) @abstractmethod - def with_single_target(self, target: KCFG.Node) -> KCFG.MultiEdge: - ... + def with_single_target(self, target: KCFG.Node) -> KCFG.MultiEdge: ... @final @dataclass(frozen=True) diff --git a/src/pyk/kcfg/semantics.py b/src/pyk/kcfg/semantics.py index 8d9ccaf73..408688255 100644 --- a/src/pyk/kcfg/semantics.py +++ b/src/pyk/kcfg/semantics.py @@ -10,20 +10,16 @@ class KCFGSemantics(ABC): @abstractmethod - def is_terminal(self, c: CTerm) -> bool: - ... + def is_terminal(self, c: CTerm) -> bool: ... @abstractmethod - def extract_branches(self, c: CTerm) -> list[KInner]: - ... + def extract_branches(self, c: CTerm) -> list[KInner]: ... @abstractmethod - def abstract_node(self, c: CTerm) -> CTerm: - ... + def abstract_node(self, c: CTerm) -> CTerm: ... @abstractmethod - def same_loop(self, c1: CTerm, c2: CTerm) -> bool: - ... + def same_loop(self, c1: CTerm, c2: CTerm) -> bool: ... class DefaultSemantics(KCFGSemantics): diff --git a/src/pyk/kdist/_kdist.py b/src/pyk/kdist/_kdist.py index 2e172854c..3adfc6ae8 100644 --- a/src/pyk/kdist/_kdist.py +++ b/src/pyk/kdist/_kdist.py @@ -155,7 +155,7 @@ def _build_target( ): try: target.target.build(output_dir, deps=self._deps(target), args=args, verbose=verbose) - except BaseException as err: + except BaseException as err: # noqa: B036 shutil.rmtree(output_dir, ignore_errors=True) raise RuntimeError(f'Build failed: {target_id.full_name}') from err diff --git a/src/pyk/kdist/api.py b/src/pyk/kdist/api.py index 2af08d186..c6eb6a932 100644 --- a/src/pyk/kdist/api.py +++ b/src/pyk/kdist/api.py @@ -56,8 +56,7 @@ def full_name(self) -> str: class Target(ABC): @abstractmethod - def build(self, output_dir: Path, deps: dict[str, Path], args: dict[str, Any], verbose: bool) -> None: - ... + def build(self, output_dir: Path, deps: dict[str, Path], args: dict[str, Any], verbose: bool) -> None: ... def deps(self) -> Iterable[str]: return () diff --git a/src/pyk/kllvm/convert.py b/src/pyk/kllvm/convert.py index 7bdd01867..25aa81cd6 100644 --- a/src/pyk/kllvm/convert.py +++ b/src/pyk/kllvm/convert.py @@ -168,16 +168,16 @@ def llvm_to_sentence(decl: kllvm.Declaration) -> Sentence: case kllvm.ModuleImportDeclaration(): # type: ignore return Import(decl.module_name, attrs) case kllvm.CompositeSortDeclaration(): # type: ignore - return SortDecl(decl.name, vars, attrs, hooked=decl.is_hooked) + return SortDecl(decl.name, vars, attrs, hooked=decl.is_hooked) # type: ignore case kllvm.SymbolDeclaration(): # type: ignore llvm_to_symbol = decl.symbol - symbol = Symbol(llvm_to_symbol.name, vars) + symbol = Symbol(llvm_to_symbol.name, vars) # type: ignore param_sorts = (llvm_to_sort(sort) for sort in llvm_to_symbol.arguments) sort = llvm_to_sort(llvm_to_symbol.sort) return SymbolDecl(symbol, param_sorts, sort, attrs, hooked=decl.is_hooked) case kllvm.AliasDeclaration(): # type: ignore llvm_to_symbol = decl.symbol - symbol = Symbol(llvm_to_symbol.name, vars) + symbol = Symbol(llvm_to_symbol.name, vars) # type: ignore param_sorts = (llvm_to_sort(sort) for sort in llvm_to_symbol.arguments) sort = llvm_to_sort(llvm_to_symbol.sort) left = App(*_unpack_composite_pattern(decl.variables)) @@ -186,9 +186,9 @@ def llvm_to_sentence(decl: kllvm.Declaration) -> Sentence: case kllvm.AxiomDeclaration(): # type: ignore pattern = llvm_to_pattern(decl.pattern) if decl.is_claim: - return Claim(vars, pattern, attrs) + return Claim(vars, pattern, attrs) # type: ignore else: - return Axiom(vars, pattern, attrs) + return Axiom(vars, pattern, attrs) # type: ignore case _: raise AssertionError() diff --git a/src/pyk/kore/match.py b/src/pyk/kore/match.py index cd397c19e..86c18aeca 100644 --- a/src/pyk/kore/match.py +++ b/src/pyk/kore/match.py @@ -128,13 +128,11 @@ def res(pattern: Pattern) -> App: @overload -def arg(n: int, /) -> Callable[[App], Pattern]: - ... +def arg(n: int, /) -> Callable[[App], Pattern]: ... @overload -def arg(symbol: str, /) -> Callable[[App], App]: - ... +def arg(symbol: str, /) -> Callable[[App], App]: ... def arg(id: int | str) -> Callable[[App], Pattern | App]: @@ -155,58 +153,47 @@ def res(app: App) -> Pattern | App: @overload -def args() -> Callable[[App], tuple[()]]: - ... +def args() -> Callable[[App], tuple[()]]: ... @overload -def args(n1: int, /) -> Callable[[App], tuple[Pattern]]: - ... +def args(n1: int, /) -> Callable[[App], tuple[Pattern]]: ... @overload -def args(n1: int, n2: int, /) -> Callable[[App], tuple[Pattern, Pattern]]: - ... +def args(n1: int, n2: int, /) -> Callable[[App], tuple[Pattern, Pattern]]: ... @overload -def args(n1: int, n2: int, n3: int, /) -> Callable[[App], tuple[Pattern, Pattern, Pattern]]: - ... +def args(n1: int, n2: int, n3: int, /) -> Callable[[App], tuple[Pattern, Pattern, Pattern]]: ... @overload -def args(n1: int, n2: int, n3: int, n4: int, /) -> Callable[[App], tuple[Pattern, Pattern, Pattern, Pattern]]: - ... +def args(n1: int, n2: int, n3: int, n4: int, /) -> Callable[[App], tuple[Pattern, Pattern, Pattern, Pattern]]: ... @overload -def args(*ns: int) -> Callable[[App], tuple[Pattern, ...]]: - ... +def args(*ns: int) -> Callable[[App], tuple[Pattern, ...]]: ... @overload -def args(s1: str, /) -> Callable[[App], tuple[App]]: - ... +def args(s1: str, /) -> Callable[[App], tuple[App]]: ... @overload -def args(s1: str, s2: str, /) -> Callable[[App], tuple[App, App]]: - ... +def args(s1: str, s2: str, /) -> Callable[[App], tuple[App, App]]: ... @overload -def args(s1: str, s2: str, s3: str, /) -> Callable[[App], tuple[App, App, App]]: - ... +def args(s1: str, s2: str, s3: str, /) -> Callable[[App], tuple[App, App, App]]: ... @overload -def args(s1: str, s2: str, s3: str, s4: str, /) -> Callable[[App], tuple[App, App, App, App]]: - ... +def args(s1: str, s2: str, s3: str, s4: str, /) -> Callable[[App], tuple[App, App, App, App]]: ... @overload -def args(*ss: str) -> Callable[[App], tuple[App, ...]]: - ... +def args(*ss: str) -> Callable[[App], tuple[App, ...]]: ... def args(*ids: Any) -> Callable[[App], tuple]: diff --git a/src/pyk/kore/rpc.py b/src/pyk/kore/rpc.py index 4d44142a6..104674303 100644 --- a/src/pyk/kore/rpc.py +++ b/src/pyk/kore/rpc.py @@ -50,8 +50,7 @@ def __init__(self, message: str, code: int, data: Any = None): class Transport(ContextManager['Transport'], ABC): @abstractmethod - def request(self, req: str) -> str: - ... + def request(self, req: str) -> str: ... def __enter__(self) -> Transport: return self @@ -60,16 +59,13 @@ def __exit__(self, *args: Any) -> None: self.close() @abstractmethod - def close(self) -> None: - ... + def close(self) -> None: ... @abstractmethod - def command(self, bug_report_id: str, old_id: int, bug_report_request: str) -> list[str]: - ... + def command(self, bug_report_id: str, old_id: int, bug_report_request: str) -> list[str]: ... @abstractmethod - def description(self) -> str: - ... + def description(self) -> str: ... class TransportType(Enum): @@ -479,8 +475,7 @@ def from_dict(cls: type[RR], dct: Mapping[str, Any]) -> RR: raise ValueError(f"Expected {dct['tag']} as 'success'/'failure'") @abstractmethod - def to_dict(self) -> dict[str, Any]: - ... + def to_dict(self) -> dict[str, Any]: ... @final @@ -538,8 +533,7 @@ def from_dict(cls: type[LE], dct: Mapping[str, Any]) -> LE: raise ValueError(f"Expected {dct['tag']} as 'rewrite'/'simplification'") @abstractmethod - def to_dict(self) -> dict[str, Any]: - ... + def to_dict(self) -> dict[str, Any]: ... @final @@ -832,14 +826,12 @@ def from_dict(dct: Mapping[str, Any]) -> GetModelResult: @final @dataclass(frozen=True) -class UnknownResult(GetModelResult): - ... +class UnknownResult(GetModelResult): ... @final @dataclass(frozen=True) -class UnsatResult(GetModelResult): - ... +class UnsatResult(GetModelResult): ... @final @@ -1323,4 +1315,3 @@ def kore_server( } return BoosterServer(booster_args) return KoreServer(kore_args) - diff --git a/src/pyk/kore/syntax.py b/src/pyk/kore/syntax.py index c889ccee4..5db55c2ff 100644 --- a/src/pyk/kore/syntax.py +++ b/src/pyk/kore/syntax.py @@ -87,8 +87,7 @@ def text(self) -> str: return str_io.getvalue() @abstractmethod - def write(self, output: IO[str]) -> None: - ... + def write(self, output: IO[str]) -> None: ... def _write_sep_by_comma(kores: Iterable[Kore], output: IO[str]) -> None: @@ -111,8 +110,7 @@ def json(self) -> str: @property @abstractmethod - def dict(self) -> dict[str, Any]: - ... + def dict(self) -> dict[str, Any]: ... @staticmethod def from_dict(dct: Mapping[str, Any]) -> Sort: @@ -134,8 +132,7 @@ class WithSort(ABC): sort: Sort @abstractmethod - def let_sort(self: WS, sort: Sort) -> WS: - ... + def let_sort(self: WS, sort: Sort) -> WS: ... def map_sort(self: WS, f: Callable[[Sort], Sort]) -> WS: return self.let_sort(f(self.sort)) @@ -259,16 +256,14 @@ def from_json(s: str) -> Pattern: @classmethod @abstractmethod - def _from_dict(cls: type[P], dct: Mapping[str, Any], patterns: list[Pattern]) -> P: - ... + def _from_dict(cls: type[P], dct: Mapping[str, Any], patterns: list[Pattern]) -> P: ... @property def json(self) -> str: return json.dumps(self.dict, sort_keys=True) @abstractmethod - def _dict(self, dicts: list) -> dict[str, Any]: - ... + def _dict(self, dicts: list) -> dict[str, Any]: ... @classmethod @abstractmethod @@ -305,12 +300,10 @@ def dict(self) -> dict[str, Any]: @property @abstractmethod - def patterns(self) -> tuple[Pattern, ...]: - ... + def patterns(self) -> tuple[Pattern, ...]: ... @abstractmethod - def let_patterns(self: P, patterns: Iterable[Pattern]) -> P: - ... + def let_patterns(self: P, patterns: Iterable[Pattern]) -> P: ... def map_patterns(self: P, f: Callable[[Pattern], Pattern]) -> P: return self.let_patterns(patterns=(f(pattern) for pattern in self.patterns)) @@ -543,8 +536,7 @@ def write(self, output: IO[str]) -> None: class MLPattern(Pattern): @classmethod @abstractmethod - def symbol(cls) -> str: - ... + def symbol(cls) -> str: ... @classmethod def of(cls: type[ML], symbol: str, sorts: Iterable[Sort] = (), patterns: Iterable[Pattern] = ()) -> ML: @@ -565,8 +557,7 @@ def _check_symbol(cls: type[ML], symbol: str) -> None: @property @abstractmethod - def sorts(self) -> tuple[Sort, ...]: - ... + def sorts(self) -> tuple[Sort, ...]: ... @property def ctor_patterns(self) -> tuple[Pattern, ...]: @@ -1618,8 +1609,7 @@ def _dict(self, dicts: list) -> dict[str, Any]: return {'tag': 'DV', 'sort': self.sort.dict, 'value': self.value.value} -class MLSyntaxSugar(MLPattern): - ... +class MLSyntaxSugar(MLPattern): ... # TODO AppAssoc, OrAssoc @@ -1628,8 +1618,7 @@ class Assoc(MLSyntaxSugar): @property @abstractmethod - def pattern(self) -> Pattern: - ... + def pattern(self) -> Pattern: ... @property def sorts(self) -> tuple[()]: @@ -1792,15 +1781,13 @@ class WithAttrs(ABC): attrs: tuple[App, ...] @abstractmethod - def let_attrs(self: WA, attrs: Iterable[App]) -> WA: - ... + def let_attrs(self: WA, attrs: Iterable[App]) -> WA: ... def map_attrs(self: WA, f: Callable[[tuple[App, ...]], Iterable[App]]) -> WA: return self.let_attrs(f(self.attrs)) -class Sentence(Kore, WithAttrs): - ... +class Sentence(Kore, WithAttrs): ... @final diff --git a/src/pyk/krepl/repl.py b/src/pyk/krepl/repl.py index dcae4e6d4..63a13f954 100644 --- a/src/pyk/krepl/repl.py +++ b/src/pyk/krepl/repl.py @@ -32,12 +32,10 @@ def __iter__(self) -> Iterator[T]: state = self.next_state(state) @abstractmethod - def init_state(self) -> T: - ... + def init_state(self) -> T: ... @abstractmethod - def next_state(self, state: T, steps: int | None = None) -> T: - ... + def next_state(self, state: T, steps: int | None = None) -> T: ... @final @@ -99,8 +97,7 @@ def _show_parser() -> ArgumentParser: return ArgumentParser(description='Show the current configuration') -class ReplError(Exception): - ... +class ReplError(Exception): ... class BaseRepl(Cmd, Generic[T], ABC): diff --git a/src/pyk/ktool/kompile.py b/src/pyk/ktool/kompile.py index b3f3fffcb..a77b27566 100644 --- a/src/pyk/ktool/kompile.py +++ b/src/pyk/ktool/kompile.py @@ -109,8 +109,7 @@ def from_dict(dct: Mapping[str, Any]) -> Kompile: @property @abstractmethod - def backend(self) -> KompileBackend: - ... + def backend(self) -> KompileBackend: ... def __call__( self, @@ -174,8 +173,7 @@ def __call__( return definition_dir @abstractmethod - def args(self) -> list[str]: - ... + def args(self) -> list[str]: ... @final diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index db5269572..1fa97fa3c 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -1,7 +1,7 @@ from __future__ import annotations -import time import sys +import time from abc import ABC, abstractmethod from multiprocessing import Process, Queue @@ -70,8 +70,7 @@ def status(self) -> ProofStatus: ... -class ProcessData(ABC): - ... +class ProcessData(ABC): ... # @abstractmethod diff --git a/src/pyk/proof/proof.py b/src/pyk/proof/proof.py index f4d0558c5..1df679314 100644 --- a/src/pyk/proof/proof.py +++ b/src/pyk/proof/proof.py @@ -171,8 +171,7 @@ def subproofs_status(self) -> ProofStatus: @property @abstractmethod - def status(self) -> ProofStatus: - ... + def status(self) -> ProofStatus: ... @property def failed(self) -> bool: @@ -192,8 +191,7 @@ def dict(self) -> dict[str, Any]: @classmethod @abstractmethod - def from_dict(cls: type[Proof], dct: Mapping[str, Any], proof_dir: Path | None = None) -> Proof: - ... + def from_dict(cls: type[Proof], dct: Mapping[str, Any], proof_dir: Path | None = None) -> Proof: ... @classmethod def read_proof(cls: type[Proof], id: str, proof_dir: Path) -> Proof: @@ -259,8 +257,7 @@ class ProofSummary(ABC): @property @abstractmethod - def lines(self) -> list[str]: - ... + def lines(self) -> list[str]: ... def __str__(self) -> str: return '\n'.join(self.lines) diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index 776cf0847..b06b600ad 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -33,8 +33,8 @@ from pyk.utils import BugReport from ..cterm import CSubst, CTerm - from ..kcfg.explore import ExtendResult from ..kast.outer import KClaim, KDefinition, KFlatModuleList, KRuleLike + from ..kcfg.explore import ExtendResult from ..kcfg.kcfg import NodeIdLike from ..ktool.kprint import KPrint @@ -560,7 +560,7 @@ def from_dict(cls: type[APRBMCProof], dct: Mapping[str, Any], proof_dir: Path | logs = {int(k): tuple(LogEntry.from_dict(l) for l in ls) for k, ls in dct['logs'].items()} else: logs = {} - checked_for_subsumption = list({kcfg._resolve(node_id) for node_id in dct['checked_for_subsumption']}) + checked_for_subsumption = set({kcfg._resolve(node_id) for node_id in dct['checked_for_subsumption']}) return APRBMCProof( id, diff --git a/src/pyk/testing/_kompiler.py b/src/pyk/testing/_kompiler.py index 8eb95a394..9aae8f2b1 100644 --- a/src/pyk/testing/_kompiler.py +++ b/src/pyk/testing/_kompiler.py @@ -228,8 +228,7 @@ def kore_client(self, _kore_server: KoreServer, bug_report: BugReport) -> Iterat class KCFGExploreTest(KoreClientTest, KPrintTest): @abstractmethod - def semantics(self, definition: KDefinition) -> KCFGSemantics: - ... + def semantics(self, definition: KDefinition) -> KCFGSemantics: ... @pytest.fixture def kcfg_explore( diff --git a/src/pyk/utils.py b/src/pyk/utils.py index 2a7d7e09a..acaeec3e2 100644 --- a/src/pyk/utils.py +++ b/src/pyk/utils.py @@ -92,7 +92,7 @@ def check_type(x: Any, typ: type[T]) -> T: def raised(f: Callable, *args: Any, **kwargs: Any) -> BaseException | None: try: f(*args, **kwargs) - except BaseException as e: + except BaseException as e: # noqa: B036 return e return None @@ -150,16 +150,14 @@ def res(p: P | None) -> R | None: @overload -def tuple_of() -> Callable[[tuple[()]], tuple[()]]: - ... +def tuple_of() -> Callable[[tuple[()]], tuple[()]]: ... @overload def tuple_of( f1: Callable[[P1], R1], /, -) -> Callable[[tuple[P1]], tuple[R1]]: - ... +) -> Callable[[tuple[P1]], tuple[R1]]: ... @overload @@ -167,8 +165,7 @@ def tuple_of( f1: Callable[[P1], R1], f2: Callable[[P2], R2], /, -) -> Callable[[tuple[P1, P2]], tuple[R1, R2]]: - ... +) -> Callable[[tuple[P1, P2]], tuple[R1, R2]]: ... @overload @@ -177,8 +174,7 @@ def tuple_of( f2: Callable[[P2], R2], f3: Callable[[P3], R3], /, -) -> Callable[[tuple[P1, P2, P3]], tuple[R1, R2, R3]]: - ... +) -> Callable[[tuple[P1, P2, P3]], tuple[R1, R2, R3]]: ... @overload @@ -188,8 +184,7 @@ def tuple_of( f3: Callable[[P3], R3], f4: Callable[[P4], R4], /, -) -> Callable[[tuple[P1, P2, P3, P4]], tuple[R1, R2, R3, R4]]: - ... +) -> Callable[[tuple[P1, P2, P3, P4]], tuple[R1, R2, R3, R4]]: ... def tuple_of(*args: Callable) -> Callable: diff --git a/src/tests/unit/kast/test_kast.py b/src/tests/unit/kast/test_kast.py index 5d36d2385..a8d06e8ed 100644 --- a/src/tests/unit/kast/test_kast.py +++ b/src/tests/unit/kast/test_kast.py @@ -107,7 +107,7 @@ def test_klabel_init_multiple_values(params: list[KSort]) -> None: actual_message = str(excinfo.value) # Then - actual_message, expected_message + actual_message, expected_message # noqa: B018 @pytest.mark.parametrize('params', KLABEL_TEST_DATA, ids=count()) diff --git a/src/tests/unit/test_proof.py b/src/tests/unit/test_proof.py index c604ebf24..b78550478 100644 --- a/src/tests/unit/test_proof.py +++ b/src/tests/unit/test_proof.py @@ -279,12 +279,12 @@ def test_print_failure_info() -> None: path_conditions[5] = '#Top' failure_reasons = {} - failure_reasons[ - 3 - ] = 'Structural matching failed, the following cells failed individually (antecedent #Implies consequent):\nSTATE_CELL: $n |-> 2 #Implies 1' - failure_reasons[ - 5 - ] = 'Structural matching failed, the following cells failed individually (antecedent #Implies consequent):\nSTATE_CELL: $n |-> 5 #Implies 6' + failure_reasons[3] = ( + 'Structural matching failed, the following cells failed individually (antecedent #Implies consequent):\nSTATE_CELL: $n |-> 2 #Implies 1' + ) + failure_reasons[5] = ( + 'Structural matching failed, the following cells failed individually (antecedent #Implies consequent):\nSTATE_CELL: $n |-> 5 #Implies 6' + ) models: dict[int, list[tuple[str, str]]] = {} models[5] = [('X', '101')] From 9f3096244bf591d89d05568796b20dc7be6063de Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Mon, 5 Feb 2024 15:49:07 -0600 Subject: [PATCH 081/116] Clean up parallel.py --- src/pyk/proof/parallel.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index 1fa97fa3c..4b4ca0c1f 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -1,6 +1,5 @@ from __future__ import annotations -import sys import time from abc import ABC, abstractmethod from multiprocessing import Process, Queue @@ -73,11 +72,6 @@ def status(self) -> ProofStatus: class ProcessData(ABC): ... -# @abstractmethod -# def cleanup(self) -> None: -# ... - - class ProofStep(ABC, Generic[U, D]): """ Should be a description of a computation needed to make progress on a `Proof`. @@ -116,7 +110,6 @@ def prove_parallel( total_time = 0 total_init_time = time.time_ns() - print('d', file=sys.stderr) def run_process(data: ProcessData) -> None: while True: @@ -127,8 +120,6 @@ def run_process(data: ProcessData) -> None: update = proof_step.exec(data) out_queue.put((proof_id, update)) - # data.cleanup() - def submit(proof_id: str) -> None: proof = proofs[proof_id] prover = provers[proof_id] @@ -147,17 +138,14 @@ def submit(proof_id: str) -> None: processes = [Process(target=run_process, args=(process_data,)) for _ in range(max_workers)] for process in processes: process.start() - print('e', file=sys.stderr) for proof_id in proofs.keys(): submit(proof_id) - print('f', file=sys.stderr) while pending_jobs > 0: wait_init_time = time.time_ns() proof_id, update = out_queue.get() total_process_time += time.time_ns() - wait_init_time - print('g', file=sys.stderr) pending_jobs -= 1 proof = proofs[proof_id] From f16161dd0c00f04a0be47a350730b09048428651 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Mon, 5 Feb 2024 18:12:34 -0600 Subject: [PATCH 082/116] Clean up reachability.py --- src/pyk/proof/reachability.py | 41 ----------------------------------- 1 file changed, 41 deletions(-) diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index b06b600ad..c9309e0c9 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -1412,14 +1412,6 @@ class APRProofStep(parallel.ProofStep[APRProofResult, APRProofProcessData]): circularity: bool depth_is_nonzero: bool - # @property - # def circularities_module_name(self) -> str: - # return self.circularities_module_name - # - # @property - # def dependencies_module_name(self) -> str: - # return self.dependencies_module_name - def __hash__(self) -> int: return hash((self.cterm, self.node_id)) @@ -1432,26 +1424,6 @@ def exec(self, data: APRProofProcessData) -> APRProofResult: init_kcfg_explore = False - # if data.kore_servers.get(self.proof_id) is None: - # init_kcfg_explore = True - # data.kore_servers[self.proof_id] = kore_server( - # definition_dir=data.definition_dir, - # llvm_definition_dir=data.llvm_definition_dir, - # module_name=data.module_name, - # command=data.command, - # bug_report=self.bug_report, - # smt_timeout=data.smt_timeout, - # smt_retry_limit=data.smt_retry_limit, - # smt_tactic=data.smt_tactic, - # haskell_log_format=data.haskell_log_format, - # haskell_log_entries=data.haskell_log_entries, - # log_axioms_file=data.log_axioms_file, - # fallback_on=None, - # interim_simplification=None, - # no_post_exec_simplify=None, - # ) - # server = data.kore_servers[self.proof_id] - with KoreClient( host='localhost', port=self.port, @@ -1466,19 +1438,6 @@ def exec(self, data: APRProofProcessData) -> APRProofResult: trace_rewrites=self.trace_rewrites, ) - # if init_kcfg_explore: - # kcfg_explore.add_dependencies_module( - # self.main_module_name, - # self.dependencies_module_name, - # self.dependencies_as_claims, - # priority=1, - # ) - # kcfg_explore.add_dependencies_module( - # self.main_module_name, - # self.circularities_module_name, - # self.dependencies_as_claims + ([self.self_proof_as_claim] if self.circularity else []), - # priority=1, - # ) def _inject_module(module_name: str, import_name: str, sentences: list[KRule]) -> None: _module = KFlatModule(module_name, sentences, [KImport(import_name)]) _kore_module = kflatmodule_to_kore( From 95f560d9864127ce00f27e29f02cc001954b1865 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Wed, 7 Feb 2024 14:30:44 -0600 Subject: [PATCH 083/116] Remove extra inject_modules --- src/pyk/proof/reachability.py | 22 ++-------------------- 1 file changed, 2 insertions(+), 20 deletions(-) diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index c9309e0c9..82f279cd0 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -1166,11 +1166,6 @@ def __init__( self.log_axioms_file = log_axioms_file -# def cleanup(self) -> None: -# for server in self.kore_servers.values(): -# server.close() - - class ParallelAPRProver(parallel.Prover[APRProof, APRProofResult, APRProofProcessData]): prover: APRProver kcfg_explore: KCFGExplore @@ -1256,6 +1251,8 @@ def steps(self, proof: APRProof) -> Iterable[APRProofStep]: """ steps: list[APRProofStep] = [] target_node = proof.kcfg.node(proof.target) + + for pending_node in proof.pending: module_name = ( self.prover.circularities_module_name @@ -1292,8 +1289,6 @@ def steps(self, proof: APRProof) -> Iterable[APRProofStep]: is_terminal=(self.kcfg_explore.kcfg_semantics.is_terminal(pending_node.cterm)), target_is_terminal=(proof.target not in proof._terminal), main_module_name=self.prover.main_module_name, - dependencies_as_rules=[d.as_rule() for d in apr_subproofs], - self_proof_as_rule=proof.as_rule(), circularity=proof.circularity, depth_is_nonzero=self.prover.nonzero_depth(pending_node), ) @@ -1407,8 +1402,6 @@ class APRProofStep(parallel.ProofStep[APRProofResult, APRProofProcessData]): id: str | None trace_rewrites: bool - dependencies_as_rules: list[KRule] - self_proof_as_rule: KRule circularity: bool depth_is_nonzero: bool @@ -1438,17 +1431,6 @@ def exec(self, data: APRProofProcessData) -> APRProofResult: trace_rewrites=self.trace_rewrites, ) - def _inject_module(module_name: str, import_name: str, sentences: list[KRule]) -> None: - _module = KFlatModule(module_name, sentences, [KImport(import_name)]) - _kore_module = kflatmodule_to_kore( - kcfg_explore.kprint.definition, kcfg_explore.kprint.kompiled_kore, _module - ) - kcfg_explore._kore_client.add_module(_kore_module, name_as_id=True) - - if init_kcfg_explore: - _inject_module(self.dependencies_module_name, self.main_module_name, self.dependencies_as_rules) - _inject_module(self.circularities_module_name, self.main_module_name, [self.self_proof_as_rule]) - cterm_implies_time = 0 extend_cterm_time = 0 From b4ae90775850e14011d8057b490d8318ffd7af78 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Wed, 7 Feb 2024 15:22:53 -0600 Subject: [PATCH 084/116] Revert formatting changes with specific black version --- poetry.lock | 52 ++++++++++++++++------------------- src/pyk/kast/inner.py | 42 ++++++++++++++++++---------- src/pyk/kast/kast.py | 6 ++-- src/pyk/kast/markdown.py | 3 +- src/pyk/kast/outer.py | 3 +- src/pyk/kast/outer_syntax.py | 18 ++++++++---- src/pyk/kbuild/project.py | 3 +- src/pyk/kcfg/explore.py | 9 ++++-- src/pyk/kcfg/kcfg.py | 6 ++-- src/pyk/kcfg/semantics.py | 12 +++++--- src/pyk/kdist/api.py | 3 +- src/pyk/kore/match.py | 39 +++++++++++++++++--------- src/pyk/kore/rpc.py | 24 ++++++++++------ src/pyk/kore/syntax.py | 39 +++++++++++++++++--------- src/pyk/krepl/repl.py | 9 ++++-- src/pyk/ktool/kompile.py | 6 ++-- src/pyk/proof/parallel.py | 3 +- src/pyk/proof/proof.py | 9 ++++-- src/pyk/proof/reachability.py | 13 --------- src/pyk/testing/_kompiler.py | 3 +- src/pyk/utils.py | 15 ++++++---- src/tests/unit/test_proof.py | 12 ++++---- 22 files changed, 198 insertions(+), 131 deletions(-) diff --git a/poetry.lock b/poetry.lock index f6c25cd98..c90da08b3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -61,33 +61,29 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "black" -version = "24.1.1" +version = "23.10.1" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2588021038bd5ada078de606f2a804cadd0a3cc6a79cb3e9bb3a8bf581325a4c"}, - {file = "black-24.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a95915c98d6e32ca43809d46d932e2abc5f1f7d582ffbe65a5b4d1588af7445"}, - {file = "black-24.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fa6a0e965779c8f2afb286f9ef798df770ba2b6cee063c650b96adec22c056a"}, - {file = "black-24.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5242ecd9e990aeb995b6d03dc3b2d112d4a78f2083e5a8e86d566340ae80fec4"}, - {file = "black-24.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fc1ec9aa6f4d98d022101e015261c056ddebe3da6a8ccfc2c792cbe0349d48b7"}, - {file = "black-24.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0269dfdea12442022e88043d2910429bed717b2d04523867a85dacce535916b8"}, - {file = "black-24.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3d64db762eae4a5ce04b6e3dd745dcca0fb9560eb931a5be97472e38652a161"}, - {file = "black-24.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:5d7b06ea8816cbd4becfe5f70accae953c53c0e53aa98730ceccb0395520ee5d"}, - {file = "black-24.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e2c8dfa14677f90d976f68e0c923947ae68fa3961d61ee30976c388adc0b02c8"}, - {file = "black-24.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a21725862d0e855ae05da1dd25e3825ed712eaaccef6b03017fe0853a01aa45e"}, - {file = "black-24.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07204d078e25327aad9ed2c64790d681238686bce254c910de640c7cc4fc3aa6"}, - {file = "black-24.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:a83fe522d9698d8f9a101b860b1ee154c1d25f8a82ceb807d319f085b2627c5b"}, - {file = "black-24.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:08b34e85170d368c37ca7bf81cf67ac863c9d1963b2c1780c39102187ec8dd62"}, - {file = "black-24.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7258c27115c1e3b5de9ac6c4f9957e3ee2c02c0b39222a24dc7aa03ba0e986f5"}, - {file = "black-24.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40657e1b78212d582a0edecafef133cf1dd02e6677f539b669db4746150d38f6"}, - {file = "black-24.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e298d588744efda02379521a19639ebcd314fba7a49be22136204d7ed1782717"}, - {file = "black-24.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:34afe9da5056aa123b8bfda1664bfe6fb4e9c6f311d8e4a6eb089da9a9173bf9"}, - {file = "black-24.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:854c06fb86fd854140f37fb24dbf10621f5dab9e3b0c29a690ba595e3d543024"}, - {file = "black-24.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3897ae5a21ca132efa219c029cce5e6bfc9c3d34ed7e892113d199c0b1b444a2"}, - {file = "black-24.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:ecba2a15dfb2d97105be74bbfe5128bc5e9fa8477d8c46766505c1dda5883aac"}, - {file = "black-24.1.1-py3-none-any.whl", hash = "sha256:5cdc2e2195212208fbcae579b931407c1fa9997584f0a415421748aeafff1168"}, - {file = "black-24.1.1.tar.gz", hash = "sha256:48b5760dcbfe5cf97fd4fba23946681f3a81514c6ab8a45b50da67ac8fbc6c7b"}, + {file = "black-23.10.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:ec3f8e6234c4e46ff9e16d9ae96f4ef69fa328bb4ad08198c8cee45bb1f08c69"}, + {file = "black-23.10.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:1b917a2aa020ca600483a7b340c165970b26e9029067f019e3755b56e8dd5916"}, + {file = "black-23.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c74de4c77b849e6359c6f01987e94873c707098322b91490d24296f66d067dc"}, + {file = "black-23.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:7b4d10b0f016616a0d93d24a448100adf1699712fb7a4efd0e2c32bbb219b173"}, + {file = "black-23.10.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b15b75fc53a2fbcac8a87d3e20f69874d161beef13954747e053bca7a1ce53a0"}, + {file = "black-23.10.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:e293e4c2f4a992b980032bbd62df07c1bcff82d6964d6c9496f2cd726e246ace"}, + {file = "black-23.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d56124b7a61d092cb52cce34182a5280e160e6aff3137172a68c2c2c4b76bcb"}, + {file = "black-23.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:3f157a8945a7b2d424da3335f7ace89c14a3b0625e6593d21139c2d8214d55ce"}, + {file = "black-23.10.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:cfcce6f0a384d0da692119f2d72d79ed07c7159879d0bb1bb32d2e443382bf3a"}, + {file = "black-23.10.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:33d40f5b06be80c1bbce17b173cda17994fbad096ce60eb22054da021bf933d1"}, + {file = "black-23.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:840015166dbdfbc47992871325799fd2dc0dcf9395e401ada6d88fe11498abad"}, + {file = "black-23.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:037e9b4664cafda5f025a1728c50a9e9aedb99a759c89f760bd83730e76ba884"}, + {file = "black-23.10.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:7cb5936e686e782fddb1c73f8aa6f459e1ad38a6a7b0e54b403f1f05a1507ee9"}, + {file = "black-23.10.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:7670242e90dc129c539e9ca17665e39a146a761e681805c54fbd86015c7c84f7"}, + {file = "black-23.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed45ac9a613fb52dad3b61c8dea2ec9510bf3108d4db88422bacc7d1ba1243d"}, + {file = "black-23.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:6d23d7822140e3fef190734216cefb262521789367fbdc0b3f22af6744058982"}, + {file = "black-23.10.1-py3-none-any.whl", hash = "sha256:d431e6739f727bb2e0495df64a6c7a5310758e87505f5f8cde9ff6c0f2d7e4fe"}, + {file = "black-23.10.1.tar.gz", hash = "sha256:1f8ce316753428ff68749c65a5f7844631aa18c8679dfd3ca9dc1a289979c258"}, ] [package.dependencies] @@ -101,7 +97,7 @@ typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +d = ["aiohttp (>=3.7.4)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] @@ -430,13 +426,13 @@ pyflakes = ">=3.2.0,<3.3.0" [[package]] name = "flake8-bugbear" -version = "24.1.17" +version = "24.2.6" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." optional = false python-versions = ">=3.8.1" files = [ - {file = "flake8-bugbear-24.1.17.tar.gz", hash = "sha256:bcb388a4f3b516258749b1e690ee394c082eff742f44595e3754cf5c7781c2c7"}, - {file = "flake8_bugbear-24.1.17-py3-none-any.whl", hash = "sha256:46cc840ddaed26507cd0ada530d1526418b717ee76c9b5dfdbd238b5eab34139"}, + {file = "flake8-bugbear-24.2.6.tar.gz", hash = "sha256:f9cb5f2a9e792dd80ff68e89a14c12eed8620af8b41a49d823b7a33064ac9658"}, + {file = "flake8_bugbear-24.2.6-py3-none-any.whl", hash = "sha256:663ef5de80cd32aacd39d362212983bc4636435a6f83700b4ed35acbd0b7d1b8"}, ] [package.dependencies] @@ -1383,4 +1379,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "ad2830bf989de6a9820095054fd33f29d9515b5beda89038490ca6297170803b" +content-hash = "6d3d47508975a4963c527a346ba075b350e37c72e8b98b82d3c0420c1d1c5f20" diff --git a/src/pyk/kast/inner.py b/src/pyk/kast/inner.py index 18dd4153a..46198ba9c 100644 --- a/src/pyk/kast/inner.py +++ b/src/pyk/kast/inner.py @@ -57,10 +57,12 @@ class KLabel(KAst): params: tuple[KSort, ...] @overload - def __init__(self, name: str, params: Iterable[str | KSort]): ... + def __init__(self, name: str, params: Iterable[str | KSort]): + ... @overload - def __init__(self, name: str, *params: str | KSort): ... + def __init__(self, name: str, *params: str | KSort): + ... # TODO Is it possible to extract a decorator? def __init__(self, name: str, *args: Any, **kwargs: Any): @@ -93,10 +95,12 @@ def __iter__(self) -> Iterator[str | KSort]: return chain([self.name], self.params) @overload - def __call__(self, args: Iterable[KInner]) -> KApply: ... + def __call__(self, args: Iterable[KInner]) -> KApply: + ... @overload - def __call__(self, *args: KInner) -> KApply: ... + def __call__(self, *args: KInner) -> KApply: + ... def __call__(self, *args: Any, **kwargs: Any) -> KApply: return self.apply(*args, **kwargs) @@ -115,10 +119,12 @@ def let(self, *, name: str | None = None, params: Iterable[str | KSort] | None = return KLabel(name=name, params=params) @overload - def apply(self, args: Iterable[KInner]) -> KApply: ... + def apply(self, args: Iterable[KInner]) -> KApply: + ... @overload - def apply(self, *args: KInner) -> KApply: ... + def apply(self, *args: KInner) -> KApply: + ... def apply(self, *args: Any, **kwargs: Any) -> KApply: """Construct a `KApply` with this `KLabel` as the AST head and the supplied parameters as the arguments.""" @@ -178,7 +184,8 @@ def _extract_dicts(dct: Mapping[str, Any]) -> list[Mapping[str, Any]]: @classmethod @abstractmethod - def _from_dict(cls: type[KI], d: Mapping[str, Any], terms: list[KInner]) -> KI: ... + def _from_dict(cls: type[KI], d: Mapping[str, Any], terms: list[KInner]) -> KI: + ... @property @abstractmethod @@ -236,7 +243,8 @@ def to_dict(self) -> dict[str, Any]: stack.append([]) @abstractmethod - def _to_dict(self, terms: list[KInner]) -> dict[str, Any]: ... + def _to_dict(self, terms: list[KInner]) -> dict[str, Any]: + ... @final @@ -356,10 +364,12 @@ class KApply(KInner): args: tuple[KInner, ...] @overload - def __init__(self, label: str | KLabel, args: Iterable[KInner]): ... + def __init__(self, label: str | KLabel, args: Iterable[KInner]): + ... @overload - def __init__(self, label: str | KLabel, *args: KInner): ... + def __init__(self, label: str | KLabel, *args: KInner): + ... def __init__(self, label: str | KLabel, *args: Any, **kwargs: Any): """Construct a new `KApply` given the input `KLabel` or str, applied to arguments.""" @@ -569,10 +579,12 @@ class KSequence(KInner, Sequence[KInner]): items: tuple[KInner, ...] @overload - def __init__(self, items: Iterable[KInner]): ... + def __init__(self, items: Iterable[KInner]): + ... @overload - def __init__(self, *items: KInner): ... + def __init__(self, *items: KInner): + ... def __init__(self, *args: Any, **kwargs: Any): """Construct a new `KSequence` given the arguments.""" @@ -601,10 +613,12 @@ def __init__(self, *args: Any, **kwargs: Any): object.__setattr__(self, 'items', tuple(items)) @overload - def __getitem__(self, key: int) -> KInner: ... + def __getitem__(self, key: int) -> KInner: + ... @overload - def __getitem__(self, key: slice) -> tuple[KInner, ...]: ... + def __getitem__(self, key: slice) -> tuple[KInner, ...]: + ... def __getitem__(self, key: int | slice) -> KInner | tuple[KInner, ...]: return self.items[key] diff --git a/src/pyk/kast/kast.py b/src/pyk/kast/kast.py index c92fc414f..98c71e85d 100644 --- a/src/pyk/kast/kast.py +++ b/src/pyk/kast/kast.py @@ -27,7 +27,8 @@ def version() -> int: return 3 @abstractmethod - def to_dict(self) -> dict[str, Any]: ... + def to_dict(self) -> dict[str, Any]: + ... @final def to_json(self) -> str: @@ -143,7 +144,8 @@ class WithKAtt(ABC): att: KAtt @abstractmethod - def let_att(self: W, att: KAtt) -> W: ... + def let_att(self: W, att: KAtt) -> W: + ... def map_att(self: W, f: Callable[[KAtt], KAtt]) -> W: return self.let_att(att=f(self.att)) diff --git a/src/pyk/kast/markdown.py b/src/pyk/kast/markdown.py index f4a2b8934..a19b96c85 100644 --- a/src/pyk/kast/markdown.py +++ b/src/pyk/kast/markdown.py @@ -70,7 +70,8 @@ def check_tag(tag: str) -> None: class Selector(ABC): @abstractmethod - def eval(self, atoms: Container[str]) -> bool: ... + def eval(self, atoms: Container[str]) -> bool: + ... @final diff --git a/src/pyk/kast/outer.py b/src/pyk/kast/outer.py index 7a08d9d8c..286cc1426 100644 --- a/src/pyk/kast/outer.py +++ b/src/pyk/kast/outer.py @@ -562,7 +562,8 @@ def let( requires: KInner | None = None, ensures: KInner | None = None, att: KAtt | None = None, - ) -> RL: ... + ) -> RL: + ... @final diff --git a/src/pyk/kast/outer_syntax.py b/src/pyk/kast/outer_syntax.py index 57b5bc4b4..db2b88816 100644 --- a/src/pyk/kast/outer_syntax.py +++ b/src/pyk/kast/outer_syntax.py @@ -11,7 +11,8 @@ from typing import Any, Final -class AST(ABC): ... +class AST(ABC): + ... @final @@ -23,10 +24,12 @@ def __init__(self, items: Iterable[tuple[str, str]] = ()): object.__setattr__(self, 'items', tuple(items)) @overload - def __getitem__(self, key: int) -> tuple[str, str]: ... + def __getitem__(self, key: int) -> tuple[str, str]: + ... @overload - def __getitem__(self, key: slice) -> tuple[tuple[str, str], ...]: ... + def __getitem__(self, key: slice) -> tuple[tuple[str, str], ...]: + ... def __getitem__(self, key: Any) -> Any: return self.items[key] @@ -38,10 +41,12 @@ def __len__(self) -> int: EMPTY_ATT: Final = Att() -class Sentence(AST, ABC): ... +class Sentence(AST, ABC): + ... -class SyntaxSentence(Sentence, ABC): ... +class SyntaxSentence(Sentence, ABC): + ... class Assoc(Enum): @@ -118,7 +123,8 @@ def __init__(self, items: Iterable[ProductionItem], att: Att = EMPTY_ATT): object.__setattr__(self, 'att', att) -class ProductionItem(AST, ABC): ... +class ProductionItem(AST, ABC): + ... @final diff --git a/src/pyk/kbuild/project.py b/src/pyk/kbuild/project.py index d7a24d75b..6f0f8dd8c 100644 --- a/src/pyk/kbuild/project.py +++ b/src/pyk/kbuild/project.py @@ -28,7 +28,8 @@ def from_dict(dct: Mapping[str, Any]) -> Source: raise ValueError(f'Cannot parse source: {dct}') @abstractmethod - def resolve(self, project_path: Path) -> Path: ... + def resolve(self, project_path: Path) -> Path: + ... @final diff --git a/src/pyk/kcfg/explore.py b/src/pyk/kcfg/explore.py index 23db5dabf..4a783b385 100644 --- a/src/pyk/kcfg/explore.py +++ b/src/pyk/kcfg/explore.py @@ -521,17 +521,20 @@ def extract_rule_labels(_logs: tuple[LogEntry, ...]) -> list[str]: raise AssertionError() -class ExtendResult(ABC): ... +class ExtendResult(ABC): + ... @final @dataclass(frozen=True) -class Vacuous(ExtendResult): ... +class Vacuous(ExtendResult): + ... @final @dataclass(frozen=True) -class Stuck(ExtendResult): ... +class Stuck(ExtendResult): + ... @final diff --git a/src/pyk/kcfg/kcfg.py b/src/pyk/kcfg/kcfg.py index 49f24e231..b8163b759 100644 --- a/src/pyk/kcfg/kcfg.py +++ b/src/pyk/kcfg/kcfg.py @@ -57,7 +57,8 @@ def __lt__(self, other: Any) -> bool: @property @abstractmethod - def targets(self) -> tuple[KCFG.Node, ...]: ... + def targets(self) -> tuple[KCFG.Node, ...]: + ... @property def target_ids(self) -> list[int]: @@ -130,7 +131,8 @@ def __lt__(self, other: Any) -> bool: return (self.source, self.target_ids) < (other.source, other.target_ids) @abstractmethod - def with_single_target(self, target: KCFG.Node) -> KCFG.MultiEdge: ... + def with_single_target(self, target: KCFG.Node) -> KCFG.MultiEdge: + ... @final @dataclass(frozen=True) diff --git a/src/pyk/kcfg/semantics.py b/src/pyk/kcfg/semantics.py index 408688255..8d9ccaf73 100644 --- a/src/pyk/kcfg/semantics.py +++ b/src/pyk/kcfg/semantics.py @@ -10,16 +10,20 @@ class KCFGSemantics(ABC): @abstractmethod - def is_terminal(self, c: CTerm) -> bool: ... + def is_terminal(self, c: CTerm) -> bool: + ... @abstractmethod - def extract_branches(self, c: CTerm) -> list[KInner]: ... + def extract_branches(self, c: CTerm) -> list[KInner]: + ... @abstractmethod - def abstract_node(self, c: CTerm) -> CTerm: ... + def abstract_node(self, c: CTerm) -> CTerm: + ... @abstractmethod - def same_loop(self, c1: CTerm, c2: CTerm) -> bool: ... + def same_loop(self, c1: CTerm, c2: CTerm) -> bool: + ... class DefaultSemantics(KCFGSemantics): diff --git a/src/pyk/kdist/api.py b/src/pyk/kdist/api.py index c6eb6a932..2af08d186 100644 --- a/src/pyk/kdist/api.py +++ b/src/pyk/kdist/api.py @@ -56,7 +56,8 @@ def full_name(self) -> str: class Target(ABC): @abstractmethod - def build(self, output_dir: Path, deps: dict[str, Path], args: dict[str, Any], verbose: bool) -> None: ... + def build(self, output_dir: Path, deps: dict[str, Path], args: dict[str, Any], verbose: bool) -> None: + ... def deps(self) -> Iterable[str]: return () diff --git a/src/pyk/kore/match.py b/src/pyk/kore/match.py index 86c18aeca..cd397c19e 100644 --- a/src/pyk/kore/match.py +++ b/src/pyk/kore/match.py @@ -128,11 +128,13 @@ def res(pattern: Pattern) -> App: @overload -def arg(n: int, /) -> Callable[[App], Pattern]: ... +def arg(n: int, /) -> Callable[[App], Pattern]: + ... @overload -def arg(symbol: str, /) -> Callable[[App], App]: ... +def arg(symbol: str, /) -> Callable[[App], App]: + ... def arg(id: int | str) -> Callable[[App], Pattern | App]: @@ -153,47 +155,58 @@ def res(app: App) -> Pattern | App: @overload -def args() -> Callable[[App], tuple[()]]: ... +def args() -> Callable[[App], tuple[()]]: + ... @overload -def args(n1: int, /) -> Callable[[App], tuple[Pattern]]: ... +def args(n1: int, /) -> Callable[[App], tuple[Pattern]]: + ... @overload -def args(n1: int, n2: int, /) -> Callable[[App], tuple[Pattern, Pattern]]: ... +def args(n1: int, n2: int, /) -> Callable[[App], tuple[Pattern, Pattern]]: + ... @overload -def args(n1: int, n2: int, n3: int, /) -> Callable[[App], tuple[Pattern, Pattern, Pattern]]: ... +def args(n1: int, n2: int, n3: int, /) -> Callable[[App], tuple[Pattern, Pattern, Pattern]]: + ... @overload -def args(n1: int, n2: int, n3: int, n4: int, /) -> Callable[[App], tuple[Pattern, Pattern, Pattern, Pattern]]: ... +def args(n1: int, n2: int, n3: int, n4: int, /) -> Callable[[App], tuple[Pattern, Pattern, Pattern, Pattern]]: + ... @overload -def args(*ns: int) -> Callable[[App], tuple[Pattern, ...]]: ... +def args(*ns: int) -> Callable[[App], tuple[Pattern, ...]]: + ... @overload -def args(s1: str, /) -> Callable[[App], tuple[App]]: ... +def args(s1: str, /) -> Callable[[App], tuple[App]]: + ... @overload -def args(s1: str, s2: str, /) -> Callable[[App], tuple[App, App]]: ... +def args(s1: str, s2: str, /) -> Callable[[App], tuple[App, App]]: + ... @overload -def args(s1: str, s2: str, s3: str, /) -> Callable[[App], tuple[App, App, App]]: ... +def args(s1: str, s2: str, s3: str, /) -> Callable[[App], tuple[App, App, App]]: + ... @overload -def args(s1: str, s2: str, s3: str, s4: str, /) -> Callable[[App], tuple[App, App, App, App]]: ... +def args(s1: str, s2: str, s3: str, s4: str, /) -> Callable[[App], tuple[App, App, App, App]]: + ... @overload -def args(*ss: str) -> Callable[[App], tuple[App, ...]]: ... +def args(*ss: str) -> Callable[[App], tuple[App, ...]]: + ... def args(*ids: Any) -> Callable[[App], tuple]: diff --git a/src/pyk/kore/rpc.py b/src/pyk/kore/rpc.py index 104674303..08decd8ea 100644 --- a/src/pyk/kore/rpc.py +++ b/src/pyk/kore/rpc.py @@ -50,7 +50,8 @@ def __init__(self, message: str, code: int, data: Any = None): class Transport(ContextManager['Transport'], ABC): @abstractmethod - def request(self, req: str) -> str: ... + def request(self, req: str) -> str: + ... def __enter__(self) -> Transport: return self @@ -59,13 +60,16 @@ def __exit__(self, *args: Any) -> None: self.close() @abstractmethod - def close(self) -> None: ... + def close(self) -> None: + ... @abstractmethod - def command(self, bug_report_id: str, old_id: int, bug_report_request: str) -> list[str]: ... + def command(self, bug_report_id: str, old_id: int, bug_report_request: str) -> list[str]: + ... @abstractmethod - def description(self) -> str: ... + def description(self) -> str: + ... class TransportType(Enum): @@ -475,7 +479,8 @@ def from_dict(cls: type[RR], dct: Mapping[str, Any]) -> RR: raise ValueError(f"Expected {dct['tag']} as 'success'/'failure'") @abstractmethod - def to_dict(self) -> dict[str, Any]: ... + def to_dict(self) -> dict[str, Any]: + ... @final @@ -533,7 +538,8 @@ def from_dict(cls: type[LE], dct: Mapping[str, Any]) -> LE: raise ValueError(f"Expected {dct['tag']} as 'rewrite'/'simplification'") @abstractmethod - def to_dict(self) -> dict[str, Any]: ... + def to_dict(self) -> dict[str, Any]: + ... @final @@ -826,12 +832,14 @@ def from_dict(dct: Mapping[str, Any]) -> GetModelResult: @final @dataclass(frozen=True) -class UnknownResult(GetModelResult): ... +class UnknownResult(GetModelResult): + ... @final @dataclass(frozen=True) -class UnsatResult(GetModelResult): ... +class UnsatResult(GetModelResult): + ... @final diff --git a/src/pyk/kore/syntax.py b/src/pyk/kore/syntax.py index 5db55c2ff..c889ccee4 100644 --- a/src/pyk/kore/syntax.py +++ b/src/pyk/kore/syntax.py @@ -87,7 +87,8 @@ def text(self) -> str: return str_io.getvalue() @abstractmethod - def write(self, output: IO[str]) -> None: ... + def write(self, output: IO[str]) -> None: + ... def _write_sep_by_comma(kores: Iterable[Kore], output: IO[str]) -> None: @@ -110,7 +111,8 @@ def json(self) -> str: @property @abstractmethod - def dict(self) -> dict[str, Any]: ... + def dict(self) -> dict[str, Any]: + ... @staticmethod def from_dict(dct: Mapping[str, Any]) -> Sort: @@ -132,7 +134,8 @@ class WithSort(ABC): sort: Sort @abstractmethod - def let_sort(self: WS, sort: Sort) -> WS: ... + def let_sort(self: WS, sort: Sort) -> WS: + ... def map_sort(self: WS, f: Callable[[Sort], Sort]) -> WS: return self.let_sort(f(self.sort)) @@ -256,14 +259,16 @@ def from_json(s: str) -> Pattern: @classmethod @abstractmethod - def _from_dict(cls: type[P], dct: Mapping[str, Any], patterns: list[Pattern]) -> P: ... + def _from_dict(cls: type[P], dct: Mapping[str, Any], patterns: list[Pattern]) -> P: + ... @property def json(self) -> str: return json.dumps(self.dict, sort_keys=True) @abstractmethod - def _dict(self, dicts: list) -> dict[str, Any]: ... + def _dict(self, dicts: list) -> dict[str, Any]: + ... @classmethod @abstractmethod @@ -300,10 +305,12 @@ def dict(self) -> dict[str, Any]: @property @abstractmethod - def patterns(self) -> tuple[Pattern, ...]: ... + def patterns(self) -> tuple[Pattern, ...]: + ... @abstractmethod - def let_patterns(self: P, patterns: Iterable[Pattern]) -> P: ... + def let_patterns(self: P, patterns: Iterable[Pattern]) -> P: + ... def map_patterns(self: P, f: Callable[[Pattern], Pattern]) -> P: return self.let_patterns(patterns=(f(pattern) for pattern in self.patterns)) @@ -536,7 +543,8 @@ def write(self, output: IO[str]) -> None: class MLPattern(Pattern): @classmethod @abstractmethod - def symbol(cls) -> str: ... + def symbol(cls) -> str: + ... @classmethod def of(cls: type[ML], symbol: str, sorts: Iterable[Sort] = (), patterns: Iterable[Pattern] = ()) -> ML: @@ -557,7 +565,8 @@ def _check_symbol(cls: type[ML], symbol: str) -> None: @property @abstractmethod - def sorts(self) -> tuple[Sort, ...]: ... + def sorts(self) -> tuple[Sort, ...]: + ... @property def ctor_patterns(self) -> tuple[Pattern, ...]: @@ -1609,7 +1618,8 @@ def _dict(self, dicts: list) -> dict[str, Any]: return {'tag': 'DV', 'sort': self.sort.dict, 'value': self.value.value} -class MLSyntaxSugar(MLPattern): ... +class MLSyntaxSugar(MLPattern): + ... # TODO AppAssoc, OrAssoc @@ -1618,7 +1628,8 @@ class Assoc(MLSyntaxSugar): @property @abstractmethod - def pattern(self) -> Pattern: ... + def pattern(self) -> Pattern: + ... @property def sorts(self) -> tuple[()]: @@ -1781,13 +1792,15 @@ class WithAttrs(ABC): attrs: tuple[App, ...] @abstractmethod - def let_attrs(self: WA, attrs: Iterable[App]) -> WA: ... + def let_attrs(self: WA, attrs: Iterable[App]) -> WA: + ... def map_attrs(self: WA, f: Callable[[tuple[App, ...]], Iterable[App]]) -> WA: return self.let_attrs(f(self.attrs)) -class Sentence(Kore, WithAttrs): ... +class Sentence(Kore, WithAttrs): + ... @final diff --git a/src/pyk/krepl/repl.py b/src/pyk/krepl/repl.py index 63a13f954..dcae4e6d4 100644 --- a/src/pyk/krepl/repl.py +++ b/src/pyk/krepl/repl.py @@ -32,10 +32,12 @@ def __iter__(self) -> Iterator[T]: state = self.next_state(state) @abstractmethod - def init_state(self) -> T: ... + def init_state(self) -> T: + ... @abstractmethod - def next_state(self, state: T, steps: int | None = None) -> T: ... + def next_state(self, state: T, steps: int | None = None) -> T: + ... @final @@ -97,7 +99,8 @@ def _show_parser() -> ArgumentParser: return ArgumentParser(description='Show the current configuration') -class ReplError(Exception): ... +class ReplError(Exception): + ... class BaseRepl(Cmd, Generic[T], ABC): diff --git a/src/pyk/ktool/kompile.py b/src/pyk/ktool/kompile.py index a77b27566..b3f3fffcb 100644 --- a/src/pyk/ktool/kompile.py +++ b/src/pyk/ktool/kompile.py @@ -109,7 +109,8 @@ def from_dict(dct: Mapping[str, Any]) -> Kompile: @property @abstractmethod - def backend(self) -> KompileBackend: ... + def backend(self) -> KompileBackend: + ... def __call__( self, @@ -173,7 +174,8 @@ def __call__( return definition_dir @abstractmethod - def args(self) -> list[str]: ... + def args(self) -> list[str]: + ... @final diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index 4b4ca0c1f..c8833e73f 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -69,7 +69,8 @@ def status(self) -> ProofStatus: ... -class ProcessData(ABC): ... +class ProcessData(ABC): + ... class ProofStep(ABC, Generic[U, D]): diff --git a/src/pyk/proof/proof.py b/src/pyk/proof/proof.py index 1df679314..f4d0558c5 100644 --- a/src/pyk/proof/proof.py +++ b/src/pyk/proof/proof.py @@ -171,7 +171,8 @@ def subproofs_status(self) -> ProofStatus: @property @abstractmethod - def status(self) -> ProofStatus: ... + def status(self) -> ProofStatus: + ... @property def failed(self) -> bool: @@ -191,7 +192,8 @@ def dict(self) -> dict[str, Any]: @classmethod @abstractmethod - def from_dict(cls: type[Proof], dct: Mapping[str, Any], proof_dir: Path | None = None) -> Proof: ... + def from_dict(cls: type[Proof], dct: Mapping[str, Any], proof_dir: Path | None = None) -> Proof: + ... @classmethod def read_proof(cls: type[Proof], id: str, proof_dir: Path) -> Proof: @@ -257,7 +259,8 @@ class ProofSummary(ABC): @property @abstractmethod - def lines(self) -> list[str]: ... + def lines(self) -> list[str]: + ... def __str__(self) -> str: return '\n'.join(self.lines) diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index 82f279cd0..db48d82d9 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -1252,22 +1252,12 @@ def steps(self, proof: APRProof) -> Iterable[APRProofStep]: steps: list[APRProofStep] = [] target_node = proof.kcfg.node(proof.target) - for pending_node in proof.pending: module_name = ( self.prover.circularities_module_name if self.prover.nonzero_depth(pending_node) else self.prover.dependencies_module_name ) - - subproofs: list[Proof] = ( - [Proof.read_proof_data(proof.proof_dir, i) for i in proof.subproof_ids] - if proof.proof_dir is not None - else [] - ) - - apr_subproofs: list[APRProof] = [pf for pf in subproofs if isinstance(pf, APRProof)] - steps.append( APRProofStep( proof_id=proof.id, @@ -1414,9 +1404,6 @@ def exec(self, data: APRProofProcessData) -> APRProofResult: Allowed to be nondeterministic. Able to be called on any `ProofStep` returned by `prover.steps(proof)`. """ - - init_kcfg_explore = False - with KoreClient( host='localhost', port=self.port, diff --git a/src/pyk/testing/_kompiler.py b/src/pyk/testing/_kompiler.py index 9aae8f2b1..8eb95a394 100644 --- a/src/pyk/testing/_kompiler.py +++ b/src/pyk/testing/_kompiler.py @@ -228,7 +228,8 @@ def kore_client(self, _kore_server: KoreServer, bug_report: BugReport) -> Iterat class KCFGExploreTest(KoreClientTest, KPrintTest): @abstractmethod - def semantics(self, definition: KDefinition) -> KCFGSemantics: ... + def semantics(self, definition: KDefinition) -> KCFGSemantics: + ... @pytest.fixture def kcfg_explore( diff --git a/src/pyk/utils.py b/src/pyk/utils.py index acaeec3e2..15fa7d9aa 100644 --- a/src/pyk/utils.py +++ b/src/pyk/utils.py @@ -150,14 +150,16 @@ def res(p: P | None) -> R | None: @overload -def tuple_of() -> Callable[[tuple[()]], tuple[()]]: ... +def tuple_of() -> Callable[[tuple[()]], tuple[()]]: + ... @overload def tuple_of( f1: Callable[[P1], R1], /, -) -> Callable[[tuple[P1]], tuple[R1]]: ... +) -> Callable[[tuple[P1]], tuple[R1]]: + ... @overload @@ -165,7 +167,8 @@ def tuple_of( f1: Callable[[P1], R1], f2: Callable[[P2], R2], /, -) -> Callable[[tuple[P1, P2]], tuple[R1, R2]]: ... +) -> Callable[[tuple[P1, P2]], tuple[R1, R2]]: + ... @overload @@ -174,7 +177,8 @@ def tuple_of( f2: Callable[[P2], R2], f3: Callable[[P3], R3], /, -) -> Callable[[tuple[P1, P2, P3]], tuple[R1, R2, R3]]: ... +) -> Callable[[tuple[P1, P2, P3]], tuple[R1, R2, R3]]: + ... @overload @@ -184,7 +188,8 @@ def tuple_of( f3: Callable[[P3], R3], f4: Callable[[P4], R4], /, -) -> Callable[[tuple[P1, P2, P3, P4]], tuple[R1, R2, R3, R4]]: ... +) -> Callable[[tuple[P1, P2, P3, P4]], tuple[R1, R2, R3, R4]]: + ... def tuple_of(*args: Callable) -> Callable: diff --git a/src/tests/unit/test_proof.py b/src/tests/unit/test_proof.py index b78550478..c604ebf24 100644 --- a/src/tests/unit/test_proof.py +++ b/src/tests/unit/test_proof.py @@ -279,12 +279,12 @@ def test_print_failure_info() -> None: path_conditions[5] = '#Top' failure_reasons = {} - failure_reasons[3] = ( - 'Structural matching failed, the following cells failed individually (antecedent #Implies consequent):\nSTATE_CELL: $n |-> 2 #Implies 1' - ) - failure_reasons[5] = ( - 'Structural matching failed, the following cells failed individually (antecedent #Implies consequent):\nSTATE_CELL: $n |-> 5 #Implies 6' - ) + failure_reasons[ + 3 + ] = 'Structural matching failed, the following cells failed individually (antecedent #Implies consequent):\nSTATE_CELL: $n |-> 2 #Implies 1' + failure_reasons[ + 5 + ] = 'Structural matching failed, the following cells failed individually (antecedent #Implies consequent):\nSTATE_CELL: $n |-> 5 #Implies 6' models: dict[int, list[tuple[str, str]]] = {} models[5] = [('X', '101')] From 7bb87754e46b968cd7005870a9c6dc60ff763799 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Wed, 7 Feb 2024 15:32:55 -0600 Subject: [PATCH 085/116] Revert changes relating to newer flake8-bugbear changes --- src/pyk/kdist/_kdist.py | 2 +- src/tests/unit/kast/test_kast.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pyk/kdist/_kdist.py b/src/pyk/kdist/_kdist.py index 3adfc6ae8..2e172854c 100644 --- a/src/pyk/kdist/_kdist.py +++ b/src/pyk/kdist/_kdist.py @@ -155,7 +155,7 @@ def _build_target( ): try: target.target.build(output_dir, deps=self._deps(target), args=args, verbose=verbose) - except BaseException as err: # noqa: B036 + except BaseException as err: shutil.rmtree(output_dir, ignore_errors=True) raise RuntimeError(f'Build failed: {target_id.full_name}') from err diff --git a/src/tests/unit/kast/test_kast.py b/src/tests/unit/kast/test_kast.py index a8d06e8ed..5d36d2385 100644 --- a/src/tests/unit/kast/test_kast.py +++ b/src/tests/unit/kast/test_kast.py @@ -107,7 +107,7 @@ def test_klabel_init_multiple_values(params: list[KSort]) -> None: actual_message = str(excinfo.value) # Then - actual_message, expected_message # noqa: B018 + actual_message, expected_message @pytest.mark.parametrize('params', KLABEL_TEST_DATA, ids=count()) From dc4d66dd3315f15dcd9ebd859d3284990004a92b Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Wed, 7 Feb 2024 15:33:31 -0600 Subject: [PATCH 086/116] Revert poetry.lock --- poetry.lock | 501 ++++++++++++++++++++++++++-------------------------- 1 file changed, 255 insertions(+), 246 deletions(-) diff --git a/poetry.lock b/poetry.lock index c90da08b3..760b48c07 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "alabaster" @@ -13,22 +13,21 @@ files = [ [[package]] name = "attrs" -version = "23.2.0" +version = "23.1.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, ] [package.extras] cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] +dev = ["attrs[docs,tests]", "pre-commit"] docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] [[package]] name = "autoflake" @@ -103,13 +102,13 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2024.2.2" +version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, + {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, ] [[package]] @@ -288,63 +287,63 @@ cron = ["capturer (>=2.4)"] [[package]] name = "coverage" -version = "7.4.1" +version = "7.3.2" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, - {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, - {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, - {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, - {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, - {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, - {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, - {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, - {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, - {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, - {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, - {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, - {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, - {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, - {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, - {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, - {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, - {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, - {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, - {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, + {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, + {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, + {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, + {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, + {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, + {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, + {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, + {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, + {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, + {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, + {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, + {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, + {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, + {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, ] [package.dependencies] @@ -366,13 +365,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.2.0" +version = "1.1.3" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, ] [package.extras] @@ -394,13 +393,13 @@ testing = ["hatch", "pre-commit", "pytest", "tox"] [[package]] name = "filelock" -version = "3.13.1" +version = "3.13.0" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, - {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, + {file = "filelock-3.13.0-py3-none-any.whl", hash = "sha256:a552f4fde758f4eab33191e9548f671970f8b06d436d31388c9aa1e5861a710f"}, + {file = "filelock-3.13.0.tar.gz", hash = "sha256:63c6052c82a1a24c873a549fbd39a26982e8f35a3016da231ead11a5be9dad44"}, ] [package.extras] @@ -410,29 +409,29 @@ typing = ["typing-extensions (>=4.8)"] [[package]] name = "flake8" -version = "7.0.0" +version = "6.1.0" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" files = [ - {file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"}, - {file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"}, + {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, + {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, ] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" pycodestyle = ">=2.11.0,<2.12.0" -pyflakes = ">=3.2.0,<3.3.0" +pyflakes = ">=3.1.0,<3.2.0" [[package]] name = "flake8-bugbear" -version = "24.2.6" +version = "23.9.16" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." optional = false python-versions = ">=3.8.1" files = [ - {file = "flake8-bugbear-24.2.6.tar.gz", hash = "sha256:f9cb5f2a9e792dd80ff68e89a14c12eed8620af8b41a49d823b7a33064ac9658"}, - {file = "flake8_bugbear-24.2.6-py3-none-any.whl", hash = "sha256:663ef5de80cd32aacd39d362212983bc4636435a6f83700b4ed35acbd0b7d1b8"}, + {file = "flake8-bugbear-23.9.16.tar.gz", hash = "sha256:90cf04b19ca02a682feb5aac67cae8de742af70538590509941ab10ae8351f71"}, + {file = "flake8_bugbear-23.9.16-py3-none-any.whl", hash = "sha256:b182cf96ea8f7a8595b2f87321d7d9b28728f4d9c3318012d896543d19742cb5"}, ] [package.dependencies] @@ -471,13 +470,13 @@ flake8 = "*" [[package]] name = "flake8-type-checking" -version = "2.9.0" +version = "2.5.1" description = "A flake8 plugin for managing type-checking imports & forward references" optional = false python-versions = ">=3.8" files = [ - {file = "flake8_type_checking-2.9.0-py3-none-any.whl", hash = "sha256:b63e1745f6e7deee1403d7e0150a5bca378315e9fe4d4cdaa7b71338034dbcc3"}, - {file = "flake8_type_checking-2.9.0.tar.gz", hash = "sha256:6fcc0e8a63f6a87b5b26b776388c21907e66c4efbd15dcc1bcbd96fe884da93d"}, + {file = "flake8_type_checking-2.5.1-py3-none-any.whl", hash = "sha256:1cd5cd9731f34921b33640751455643ca1cf7ee4a347a45cd94d3af328a3dd64"}, + {file = "flake8_type_checking-2.5.1.tar.gz", hash = "sha256:bfc51dd6e09a26662ab19191f44102f0606377ec0271a0e764ae993346a206d6"}, ] [package.dependencies] @@ -538,20 +537,20 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.0.1" +version = "6.8.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, - {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, + {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, + {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] @@ -568,17 +567,20 @@ files = [ [[package]] name = "isort" -version = "5.13.2" +version = "5.12.0" description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" files = [ - {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, - {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, + {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, + {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, ] [package.extras] -colors = ["colorama (>=0.4.6)"] +colors = ["colorama (>=0.4.3)"] +pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] +plugins = ["setuptools"] +requirements-deprecated-finder = ["pip-api", "pipreqs"] [[package]] name = "jinja2" @@ -599,13 +601,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "linkify-it-py" -version = "2.0.3" +version = "2.0.2" description = "Links recognition library with FULL unicode support." optional = false python-versions = ">=3.7" files = [ - {file = "linkify-it-py-2.0.3.tar.gz", hash = "sha256:68cda27e162e9215c17d786649d1da0021a451bdc436ef9e0fa0ba5234b9b048"}, - {file = "linkify_it_py-2.0.3-py3-none-any.whl", hash = "sha256:6bcbc417b0ac14323382aef5c5192c0075bf8a9d6b41820a2b66371eac6b6d79"}, + {file = "linkify-it-py-2.0.2.tar.gz", hash = "sha256:19f3060727842c254c808e99d465c80c49d2c7306788140987a1a7a29b0d6ad2"}, + {file = "linkify_it_py-2.0.2-py3-none-any.whl", hash = "sha256:a3a24428f6c96f27370d7fe61d2ac0be09017be5190d68d8658233171f1b6541"}, ] [package.dependencies] @@ -645,71 +647,71 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.5" +version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] [[package]] @@ -755,38 +757,38 @@ files = [ [[package]] name = "mypy" -version = "1.8.0" +version = "1.6.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, - {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, - {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, - {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, - {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, - {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, - {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, - {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, - {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, - {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, - {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, - {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, - {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, - {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, - {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, - {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, - {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, - {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, - {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, + {file = "mypy-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e5012e5cc2ac628177eaac0e83d622b2dd499e28253d4107a08ecc59ede3fc2c"}, + {file = "mypy-1.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d8fbb68711905f8912e5af474ca8b78d077447d8f3918997fecbf26943ff3cbb"}, + {file = "mypy-1.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a1ad938fee7d2d96ca666c77b7c494c3c5bd88dff792220e1afbebb2925b5e"}, + {file = "mypy-1.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b96ae2c1279d1065413965c607712006205a9ac541895004a1e0d4f281f2ff9f"}, + {file = "mypy-1.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:40b1844d2e8b232ed92e50a4bd11c48d2daa351f9deee6c194b83bf03e418b0c"}, + {file = "mypy-1.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81af8adaa5e3099469e7623436881eff6b3b06db5ef75e6f5b6d4871263547e5"}, + {file = "mypy-1.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8c223fa57cb154c7eab5156856c231c3f5eace1e0bed9b32a24696b7ba3c3245"}, + {file = "mypy-1.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8032e00ce71c3ceb93eeba63963b864bf635a18f6c0c12da6c13c450eedb183"}, + {file = "mypy-1.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c46b51de523817a0045b150ed11b56f9fff55f12b9edd0f3ed35b15a2809de0"}, + {file = "mypy-1.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:19f905bcfd9e167159b3d63ecd8cb5e696151c3e59a1742e79bc3bcb540c42c7"}, + {file = "mypy-1.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:82e469518d3e9a321912955cc702d418773a2fd1e91c651280a1bda10622f02f"}, + {file = "mypy-1.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d4473c22cc296425bbbce7e9429588e76e05bc7342da359d6520b6427bf76660"}, + {file = "mypy-1.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59a0d7d24dfb26729e0a068639a6ce3500e31d6655df8557156c51c1cb874ce7"}, + {file = "mypy-1.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cfd13d47b29ed3bbaafaff7d8b21e90d827631afda134836962011acb5904b71"}, + {file = "mypy-1.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:eb4f18589d196a4cbe5290b435d135dee96567e07c2b2d43b5c4621b6501531a"}, + {file = "mypy-1.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:41697773aa0bf53ff917aa077e2cde7aa50254f28750f9b88884acea38a16169"}, + {file = "mypy-1.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7274b0c57737bd3476d2229c6389b2ec9eefeb090bbaf77777e9d6b1b5a9d143"}, + {file = "mypy-1.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbaf4662e498c8c2e352da5f5bca5ab29d378895fa2d980630656178bd607c46"}, + {file = "mypy-1.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bb8ccb4724f7d8601938571bf3f24da0da791fe2db7be3d9e79849cb64e0ae85"}, + {file = "mypy-1.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:68351911e85145f582b5aa6cd9ad666c8958bcae897a1bfda8f4940472463c45"}, + {file = "mypy-1.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:49ae115da099dcc0922a7a895c1eec82c1518109ea5c162ed50e3b3594c71208"}, + {file = "mypy-1.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b27958f8c76bed8edaa63da0739d76e4e9ad4ed325c814f9b3851425582a3cd"}, + {file = "mypy-1.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:925cd6a3b7b55dfba252b7c4561892311c5358c6b5a601847015a1ad4eb7d332"}, + {file = "mypy-1.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8f57e6b6927a49550da3d122f0cb983d400f843a8a82e65b3b380d3d7259468f"}, + {file = "mypy-1.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a43ef1c8ddfdb9575691720b6352761f3f53d85f1b57d7745701041053deff30"}, + {file = "mypy-1.6.1-py3-none-any.whl", hash = "sha256:4cbe68ef919c28ea561165206a2dcb68591c50f3bcf777932323bc208d949cf1"}, + {file = "mypy-1.6.1.tar.gz", hash = "sha256:4d01c00d09a0be62a4ca3f933e315455bde83f37f892ba4b08ce92f3cf44bcc1"}, ] [package.dependencies] @@ -797,7 +799,6 @@ typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] install-types = ["pip"] -mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] @@ -824,13 +825,13 @@ files = [ [[package]] name = "pathspec" -version = "0.12.1" +version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, ] [[package]] @@ -849,28 +850,28 @@ flake8 = ">=5.0.0" [[package]] name = "platformdirs" -version = "4.2.0" +version = "3.11.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, + {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, + {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] [[package]] name = "pluggy" -version = "1.4.0" +version = "1.3.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, ] [package.extras] @@ -930,29 +931,28 @@ files = [ [[package]] name = "pyflakes" -version = "3.2.0" +version = "3.1.0" description = "passive checker of Python programs" optional = false python-versions = ">=3.8" files = [ - {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, - {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, + {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, + {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, ] [[package]] name = "pygments" -version = "2.17.2" +version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.7" files = [ - {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, - {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, ] [package.extras] plugins = ["importlib-metadata"] -windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyperclip" @@ -977,13 +977,13 @@ files = [ [[package]] name = "pytest" -version = "8.0.0" +version = "7.4.3" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "pytest-8.0.0-py3-none-any.whl", hash = "sha256:50fb9cbe836c3f20f0dfa99c565201fb75dc54c8d76373cd1bde06b06657bdb6"}, - {file = "pytest-8.0.0.tar.gz", hash = "sha256:249b1b0864530ba251b7438274c4d251c58d868edaaec8762893ad4a0d71c36c"}, + {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, + {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, ] [package.dependencies] @@ -991,7 +991,7 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.3.0,<2.0" +pluggy = ">=0.12,<2.0" tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] @@ -1034,13 +1034,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "pytest-xdist" -version = "3.5.0" +version = "3.3.1" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-xdist-3.5.0.tar.gz", hash = "sha256:cbb36f3d67e0c478baa57fa4edc8843887e0f6cfc42d677530a36d7472b32d8a"}, - {file = "pytest_xdist-3.5.0-py3-none-any.whl", hash = "sha256:d075629c7e00b611df89f490a5063944bee7a4362a5ff11c7cc7824a03dfce24"}, + {file = "pytest-xdist-3.3.1.tar.gz", hash = "sha256:d5ee0520eb1b7bcca50a60a518ab7a7707992812c578198f8b44fdfac78e8c93"}, + {file = "pytest_xdist-3.3.1-py3-none-any.whl", hash = "sha256:ff9daa7793569e6a68544850fd3927cd257cc03a7ef76c95e86915355e82b5f2"}, ] [package.dependencies] @@ -1089,13 +1089,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "13.7.0" +version = "13.6.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, - {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, + {file = "rich-13.6.0-py3-none-any.whl", hash = "sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245"}, + {file = "rich-13.6.0.tar.gz", hash = "sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef"}, ] [package.dependencies] @@ -1152,50 +1152,56 @@ test = ["cython (>=3.0)", "filelock", "html5lib", "pytest (>=4.6)", "setuptools [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.8" +version = "1.0.7" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_applehelp-1.0.8-py3-none-any.whl", hash = "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4"}, - {file = "sphinxcontrib_applehelp-1.0.8.tar.gz", hash = "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619"}, + {file = "sphinxcontrib_applehelp-1.0.7-py3-none-any.whl", hash = "sha256:094c4d56209d1734e7d252f6e0b3ccc090bd52ee56807a5d9315b19c122ab15d"}, + {file = "sphinxcontrib_applehelp-1.0.7.tar.gz", hash = "sha256:39fdc8d762d33b01a7d8f026a3b7d71563ea3b72787d5f00ad8465bd9d6dfbfa"}, ] +[package.dependencies] +Sphinx = ">=5" + [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] -standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-devhelp" -version = "1.0.6" +version = "1.0.5" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_devhelp-1.0.6-py3-none-any.whl", hash = "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f"}, - {file = "sphinxcontrib_devhelp-1.0.6.tar.gz", hash = "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"}, + {file = "sphinxcontrib_devhelp-1.0.5-py3-none-any.whl", hash = "sha256:fe8009aed765188f08fcaadbb3ea0d90ce8ae2d76710b7e29ea7d047177dae2f"}, + {file = "sphinxcontrib_devhelp-1.0.5.tar.gz", hash = "sha256:63b41e0d38207ca40ebbeabcf4d8e51f76c03e78cd61abe118cf4435c73d4212"}, ] +[package.dependencies] +Sphinx = ">=5" + [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] -standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" -version = "2.0.5" +version = "2.0.4" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_htmlhelp-2.0.5-py3-none-any.whl", hash = "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04"}, - {file = "sphinxcontrib_htmlhelp-2.0.5.tar.gz", hash = "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015"}, + {file = "sphinxcontrib_htmlhelp-2.0.4-py3-none-any.whl", hash = "sha256:8001661c077a73c29beaf4a79968d0726103c5605e27db92b9ebed8bab1359e9"}, + {file = "sphinxcontrib_htmlhelp-2.0.4.tar.gz", hash = "sha256:6c26a118a05b76000738429b724a0568dbde5b72391a688577da08f11891092a"}, ] +[package.dependencies] +Sphinx = ">=5" + [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] -standalone = ["Sphinx (>=5)"] test = ["html5lib", "pytest"] [[package]] @@ -1214,34 +1220,38 @@ test = ["flake8", "mypy", "pytest"] [[package]] name = "sphinxcontrib-qthelp" -version = "1.0.7" +version = "1.0.6" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_qthelp-1.0.7-py3-none-any.whl", hash = "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182"}, - {file = "sphinxcontrib_qthelp-1.0.7.tar.gz", hash = "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6"}, + {file = "sphinxcontrib_qthelp-1.0.6-py3-none-any.whl", hash = "sha256:bf76886ee7470b934e363da7a954ea2825650013d367728588732c7350f49ea4"}, + {file = "sphinxcontrib_qthelp-1.0.6.tar.gz", hash = "sha256:62b9d1a186ab7f5ee3356d906f648cacb7a6bdb94d201ee7adf26db55092982d"}, ] +[package.dependencies] +Sphinx = ">=5" + [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] -standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-serializinghtml" -version = "1.1.10" +version = "1.1.9" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_serializinghtml-1.1.10-py3-none-any.whl", hash = "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7"}, - {file = "sphinxcontrib_serializinghtml-1.1.10.tar.gz", hash = "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"}, + {file = "sphinxcontrib_serializinghtml-1.1.9-py3-none-any.whl", hash = "sha256:9b36e503703ff04f20e9675771df105e58aa029cfcbc23b8ed716019b7416ae1"}, + {file = "sphinxcontrib_serializinghtml-1.1.9.tar.gz", hash = "sha256:0c64ff898339e1fac29abd2bf5f11078f3ec413cfe9c046d3120d7ca65530b54"}, ] +[package.dependencies] +Sphinx = ">=5" + [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] -standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] @@ -1288,24 +1298,24 @@ files = [ [[package]] name = "types-psutil" -version = "5.9.5.20240205" +version = "5.9.5.17" description = "Typing stubs for psutil" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "types-psutil-5.9.5.20240205.tar.gz", hash = "sha256:51df36a361aa597bf483dcc5b58f2ab7aa87452a36d2da97c90994d6a81ef743"}, - {file = "types_psutil-5.9.5.20240205-py3-none-any.whl", hash = "sha256:3ec9bd8b95a64fe1269241d3ffb74b94a45df2d0391da1402423cd33f29745ca"}, + {file = "types-psutil-5.9.5.17.tar.gz", hash = "sha256:f7d8769812d72a4b513d7ec9eb5580fe2f6013fc270394a603cb6534811f3e4d"}, + {file = "types_psutil-5.9.5.17-py3-none-any.whl", hash = "sha256:2161d166256084acf629d30aaf6bda8bee726ae1fea530559650281056b491fc"}, ] [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.8.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, ] [[package]] @@ -1324,30 +1334,29 @@ test = ["coverage", "pytest", "pytest-cov"] [[package]] name = "urllib3" -version = "2.2.0" +version = "2.1.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, + {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "wcwidth" -version = "0.2.13" +version = "0.2.8" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" files = [ - {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, - {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, + {file = "wcwidth-0.2.8-py2.py3-none-any.whl", hash = "sha256:77f719e01648ed600dfa5402c347481c0992263b81a027344f3e1ba25493a704"}, + {file = "wcwidth-0.2.8.tar.gz", hash = "sha256:8705c569999ffbb4f6a87c6d1b80f324bd6db952f5eb0b95bc07517f4c1813d4"}, ] [[package]] @@ -1379,4 +1388,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "6d3d47508975a4963c527a346ba075b350e37c72e8b98b82d3c0420c1d1c5f20" +content-hash = "ad2830bf989de6a9820095054fd33f29d9515b5beda89038490ca6297170803b" From 4d67d174870fa357a4e081d77ca81a67b5e23ceb Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Wed, 7 Feb 2024 15:39:34 -0600 Subject: [PATCH 087/116] Revert changes relating to newer flake8-bugbear changes --- src/pyk/kllvm/convert.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/pyk/kllvm/convert.py b/src/pyk/kllvm/convert.py index 25aa81cd6..7bdd01867 100644 --- a/src/pyk/kllvm/convert.py +++ b/src/pyk/kllvm/convert.py @@ -168,16 +168,16 @@ def llvm_to_sentence(decl: kllvm.Declaration) -> Sentence: case kllvm.ModuleImportDeclaration(): # type: ignore return Import(decl.module_name, attrs) case kllvm.CompositeSortDeclaration(): # type: ignore - return SortDecl(decl.name, vars, attrs, hooked=decl.is_hooked) # type: ignore + return SortDecl(decl.name, vars, attrs, hooked=decl.is_hooked) case kllvm.SymbolDeclaration(): # type: ignore llvm_to_symbol = decl.symbol - symbol = Symbol(llvm_to_symbol.name, vars) # type: ignore + symbol = Symbol(llvm_to_symbol.name, vars) param_sorts = (llvm_to_sort(sort) for sort in llvm_to_symbol.arguments) sort = llvm_to_sort(llvm_to_symbol.sort) return SymbolDecl(symbol, param_sorts, sort, attrs, hooked=decl.is_hooked) case kllvm.AliasDeclaration(): # type: ignore llvm_to_symbol = decl.symbol - symbol = Symbol(llvm_to_symbol.name, vars) # type: ignore + symbol = Symbol(llvm_to_symbol.name, vars) param_sorts = (llvm_to_sort(sort) for sort in llvm_to_symbol.arguments) sort = llvm_to_sort(llvm_to_symbol.sort) left = App(*_unpack_composite_pattern(decl.variables)) @@ -186,9 +186,9 @@ def llvm_to_sentence(decl: kllvm.Declaration) -> Sentence: case kllvm.AxiomDeclaration(): # type: ignore pattern = llvm_to_pattern(decl.pattern) if decl.is_claim: - return Claim(vars, pattern, attrs) # type: ignore + return Claim(vars, pattern, attrs) else: - return Axiom(vars, pattern, attrs) # type: ignore + return Axiom(vars, pattern, attrs) case _: raise AssertionError() From 39b7cb1535c233b80041ebdb05dfe5692e071035 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Wed, 7 Feb 2024 15:40:46 -0600 Subject: [PATCH 088/116] Revert changes relating to newer flake8-bugbear changes --- src/pyk/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pyk/utils.py b/src/pyk/utils.py index 15fa7d9aa..2a7d7e09a 100644 --- a/src/pyk/utils.py +++ b/src/pyk/utils.py @@ -92,7 +92,7 @@ def check_type(x: Any, typ: type[T]) -> T: def raised(f: Callable, *args: Any, **kwargs: Any) -> BaseException | None: try: f(*args, **kwargs) - except BaseException as e: # noqa: B036 + except BaseException as e: return e return None From ab95cbe32b0d6b7da3c679517c4e880086bf7711 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Wed, 7 Feb 2024 18:47:15 -0600 Subject: [PATCH 089/116] Remove unnecessary class --- src/pyk/proof/parallel.py | 12 ++++------ src/pyk/proof/reachability.py | 2 +- .../integration/proof/test_parallel_prove.py | 23 ++++++------------- 3 files changed, 12 insertions(+), 25 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index c8833e73f..40ef29ae0 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -5,7 +5,7 @@ from multiprocessing import Process, Queue # from concurrent.futures import CancelledError, ProcessPoolExecutor, wait -from typing import TYPE_CHECKING, Generic, TypeVar +from typing import TYPE_CHECKING, Any, Generic, TypeVar from pyk.proof.proof import ProofStatus @@ -15,7 +15,7 @@ P = TypeVar('P', bound='Proof') U = TypeVar('U') -D = TypeVar('D', bound='ProcessData') +D = TypeVar('D') class Prover(ABC, Generic[P, U, D]): @@ -69,10 +69,6 @@ def status(self) -> ProofStatus: ... -class ProcessData(ABC): - ... - - class ProofStep(ABC, Generic[U, D]): """ Should be a description of a computation needed to make progress on a `Proof`. @@ -96,7 +92,7 @@ def prove_parallel( proofs: Mapping[str, Proof], provers: Mapping[str, Prover], max_workers: int, - process_data: ProcessData, + process_data: Any, ) -> Iterable[Proof]: explored: set[tuple[str, ProofStep]] = set() @@ -112,7 +108,7 @@ def prove_parallel( total_init_time = time.time_ns() - def run_process(data: ProcessData) -> None: + def run_process(data: Any) -> None: while True: dequeued = in_queue.get() if dequeued == 0: diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index db48d82d9..f2b15a39f 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -1119,7 +1119,7 @@ class APRProofSubsumeResult(APRProofResult): csubst: CSubst | None -class APRProofProcessData(parallel.ProcessData): +class APRProofProcessData: kprint: KPrint kcfg_semantics: KCFGSemantics | None diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index c4d011372..fd9e9d29f 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -4,7 +4,7 @@ import time from dataclasses import dataclass -from pyk.proof.parallel import ProcessData, Proof, ProofStep, Prover, prove_parallel +from pyk.proof.parallel import Proof, ProofStep, Prover, prove_parallel from pyk.proof.proof import ProofStatus @@ -34,23 +34,18 @@ def status(self) -> ProofStatus: return ProofStatus.PENDING -class TreeExploreProofProcessData(ProcessData): - def cleanup(self) -> None: - pass - - @dataclass(frozen=True) -class TreeExploreProofStep(ProofStep[int, TreeExploreProofProcessData]): +class TreeExploreProofStep(ProofStep[int, None]): node: int - def exec(self, data: TreeExploreProofProcessData) -> int: + def exec(self, data: None) -> int: print(f'exec {self.node}', file=sys.stderr) time.sleep(1) print(f'done {self.node}', file=sys.stderr) return self.node -class TreeExploreProver(Prover[TreeExploreProof, int, TreeExploreProofProcessData]): +class TreeExploreProver(Prover[TreeExploreProof, int, None]): def __init__(self) -> None: return @@ -101,9 +96,7 @@ def commit(self, proof: TreeExploreProof, update: int) -> None: def test_parallel_prove() -> None: prover = TreeExploreProver() proof = TreeExploreProof(0, 9, SIMPLE_TREE, set()) - results = prove_parallel( - {'proof1': proof}, {'proof1': prover}, max_workers=2, process_data=TreeExploreProofProcessData() - ) + results = prove_parallel({'proof1': proof}, {'proof1': prover}, max_workers=2, process_data=None) assert len(list(results)) == 1 assert len(list(prover.steps(proof))) == 0 assert list(results)[0].status == ProofStatus.PASSED @@ -112,9 +105,7 @@ def test_parallel_prove() -> None: def test_parallel_fail() -> None: prover = TreeExploreProver() proof = TreeExploreProof(0, 9, SIMPLE_TREE, {6}) - results = prove_parallel( - {'proof1': proof}, {'proof1': prover}, max_workers=2, process_data=TreeExploreProofProcessData() - ) + results = prove_parallel({'proof1': proof}, {'proof1': prover}, max_workers=2, process_data=None) assert len(list(results)) == 1 assert len(list(prover.steps(proof))) == 0 assert list(results)[0].status == ProofStatus.FAILED @@ -128,7 +119,7 @@ def test_parallel_multiple_proofs() -> None: proofs, provers_map, max_workers=4, - process_data=TreeExploreProofProcessData(), + process_data=None, ) assert len(list(results)) == 3 for proof in proofs.values(): From 3476c15f441a26301b4a3adbb5f44b4ae9ed555c Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Wed, 7 Feb 2024 18:48:08 -0600 Subject: [PATCH 090/116] Remove print messages --- src/tests/integration/proof/test_parallel_prove.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index fd9e9d29f..679bcf3e6 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -1,6 +1,5 @@ from __future__ import annotations -import sys import time from dataclasses import dataclass @@ -39,9 +38,7 @@ class TreeExploreProofStep(ProofStep[int, None]): node: int def exec(self, data: None) -> int: - print(f'exec {self.node}', file=sys.stderr) time.sleep(1) - print(f'done {self.node}', file=sys.stderr) return self.node From 58f865be628ecddd3a0a925bdd2d1cd2f9e60c72 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Wed, 7 Feb 2024 18:56:01 -0600 Subject: [PATCH 091/116] Encapsulate profiling info for parallel_prove --- src/pyk/proof/parallel.py | 39 +++++++++++-------- .../integration/proof/test_imp_parallel.py | 2 +- .../integration/proof/test_parallel_prove.py | 6 +-- 3 files changed, 26 insertions(+), 21 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index 40ef29ae0..54593dbb8 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -2,6 +2,7 @@ import time from abc import ABC, abstractmethod +from dataclasses import dataclass from multiprocessing import Process, Queue # from concurrent.futures import CancelledError, ProcessPoolExecutor, wait @@ -88,12 +89,20 @@ def exec(self, data: D) -> U: ... +@dataclass +class ProfilingInfo: + total_commit_time = 0 + total_steps_time = 0 + total_wait_time = 0 + total_time = 0 + + def prove_parallel( proofs: Mapping[str, Proof], provers: Mapping[str, Prover], max_workers: int, process_data: Any, -) -> Iterable[Proof]: +) -> tuple[Iterable[Proof], ProfilingInfo]: explored: set[tuple[str, ProofStep]] = set() in_queue: Queue = Queue() @@ -101,10 +110,7 @@ def prove_parallel( pending_jobs: int = 0 - total_commit_time = 0 - total_steps_time = 0 - total_process_time = 0 - total_time = 0 + profile = ProfilingInfo() total_init_time = time.time_ns() @@ -122,8 +128,7 @@ def submit(proof_id: str) -> None: prover = provers[proof_id] steps_init_time = time.time_ns() steps = prover.steps(proof) - nonlocal total_steps_time - total_steps_time += time.time_ns() - steps_init_time + profile.total_steps_time += time.time_ns() - steps_init_time for step in steps: # <-- get next steps (represented by e.g. pending nodes, ...) if (proof_id, step) in explored: continue @@ -142,7 +147,7 @@ def submit(proof_id: str) -> None: while pending_jobs > 0: wait_init_time = time.time_ns() proof_id, update = out_queue.get() - total_process_time += time.time_ns() - wait_init_time + profile.total_wait_time += time.time_ns() - wait_init_time pending_jobs -= 1 proof = proofs[proof_id] @@ -150,7 +155,7 @@ def submit(proof_id: str) -> None: commit_init_time = time.time_ns() prover.commit(proof, update) # <-- update the proof (can be in-memory, access disk with locking, ...) - total_commit_time += time.time_ns() - commit_init_time + profile.total_commit_time += time.time_ns() - commit_init_time match proof.status: # terminate on first failure, yield partial results, etc. @@ -160,12 +165,12 @@ def submit(proof_id: str) -> None: steps_init_time = time.time_ns() if not list(prover.steps(proof)): raise ValueError('Prover violated expectation. status is pending with no further steps.') - total_steps_time += time.time_ns() - steps_init_time + profile.total_steps_time += time.time_ns() - steps_init_time case ProofStatus.PASSED: steps_init_time = time.time_ns() if list(prover.steps(proof)): raise ValueError('Prover violated expectation. status is passed with further steps.') - total_steps_time += time.time_ns() - steps_init_time + profile.total_steps_time += time.time_ns() - steps_init_time submit(proof_id) @@ -175,11 +180,11 @@ def submit(proof_id: str) -> None: for process in processes: process.join() - total_time = time.time_ns() - total_init_time + profile.total_time = time.time_ns() - total_init_time - print(f'total time: {total_time / 1000000000}') - print(f'steps time: {total_steps_time / 1000000000}') - print(f'commit time: {total_commit_time / 1000000000}') - print(f'process time: {total_process_time / 1000000000}') + # print(f'total time in prove_parallel,: {total_time / 1000000000}s') + # print(f'total time executing steps(): {total_steps_time / 1000000000}s') + # print(f'total time executing commit(): {total_commit_time / 1000000000}s') + # print(f'time waiting for worker threads: {total_wait_time / 1000000000}s') - return proofs.values() + return proofs.values(), profile diff --git a/src/tests/integration/proof/test_imp_parallel.py b/src/tests/integration/proof/test_imp_parallel.py index a9db3ebcf..6159101af 100644 --- a/src/tests/integration/proof/test_imp_parallel.py +++ b/src/tests/integration/proof/test_imp_parallel.py @@ -105,7 +105,7 @@ def test_imp_parallel_prove( module_name=kprove.main_module, ) - results = prove_parallel( + results, _ = prove_parallel( proofs={'proof1': proof}, provers={'proof1': parallel_prover}, max_workers=1, diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index 679bcf3e6..28083bab1 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -93,7 +93,7 @@ def commit(self, proof: TreeExploreProof, update: int) -> None: def test_parallel_prove() -> None: prover = TreeExploreProver() proof = TreeExploreProof(0, 9, SIMPLE_TREE, set()) - results = prove_parallel({'proof1': proof}, {'proof1': prover}, max_workers=2, process_data=None) + results, _ = prove_parallel({'proof1': proof}, {'proof1': prover}, max_workers=2, process_data=None) assert len(list(results)) == 1 assert len(list(prover.steps(proof))) == 0 assert list(results)[0].status == ProofStatus.PASSED @@ -102,7 +102,7 @@ def test_parallel_prove() -> None: def test_parallel_fail() -> None: prover = TreeExploreProver() proof = TreeExploreProof(0, 9, SIMPLE_TREE, {6}) - results = prove_parallel({'proof1': proof}, {'proof1': prover}, max_workers=2, process_data=None) + results, _ = prove_parallel({'proof1': proof}, {'proof1': prover}, max_workers=2, process_data=None) assert len(list(results)) == 1 assert len(list(prover.steps(proof))) == 0 assert list(results)[0].status == ProofStatus.FAILED @@ -112,7 +112,7 @@ def test_parallel_multiple_proofs() -> None: prover = TreeExploreProver() proofs = {f'proof{i}': TreeExploreProof(0, 9, SIMPLE_TREE, set()) for i in range(3)} provers_map = {f'proof{i}': prover for i in range(3)} - results = prove_parallel( + results, _ = prove_parallel( proofs, provers_map, max_workers=4, From 21c16793d4b53dcc8b4f136c815e81658c8009b8 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Wed, 7 Feb 2024 18:56:26 -0600 Subject: [PATCH 092/116] Remove commented lines --- src/pyk/proof/parallel.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index 54593dbb8..e57110548 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -182,9 +182,4 @@ def submit(proof_id: str) -> None: profile.total_time = time.time_ns() - total_init_time - # print(f'total time in prove_parallel,: {total_time / 1000000000}s') - # print(f'total time executing steps(): {total_steps_time / 1000000000}s') - # print(f'total time executing commit(): {total_commit_time / 1000000000}s') - # print(f'time waiting for worker threads: {total_wait_time / 1000000000}s') - return proofs.values(), profile From c1dbe977f656e1629306875d067cbe3a76351458 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Wed, 7 Feb 2024 18:57:18 -0600 Subject: [PATCH 093/116] Remove commented lines --- src/pyk/proof/reachability.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index f2b15a39f..ac0c68192 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -1134,8 +1134,6 @@ class APRProofProcessData: haskell_log_entries: Iterable[str] log_axioms_file: Path | None - # kore_servers: dict[str, KoreServer] - def __init__( self, kprint: KPrint, @@ -1153,7 +1151,6 @@ def __init__( ) -> None: self.kprint = kprint self.kcfg_semantics = kcfg_semantics - # self.kore_servers = {} self.definition_dir = definition_dir self.llvm_definition_dir = llvm_definition_dir self.module_name = module_name From 215514e57933358344e1fda86cf18495a65a2147 Mon Sep 17 00:00:00 2001 From: devops Date: Thu, 8 Feb 2024 00:58:49 +0000 Subject: [PATCH 094/116] Set Version: 0.1.618 --- docs/conf.py | 4 ++-- package/version | 2 +- pyproject.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 049ce37a5..f321df1ef 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -9,8 +9,8 @@ project = 'pyk' author = 'Runtime Verification, Inc' copyright = '2024, Runtime Verification, Inc' -version = '0.1.617' -release = '0.1.617' +version = '0.1.618' +release = '0.1.618' # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration diff --git a/package/version b/package/version index 9a5f47f39..f814f2605 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.617 +0.1.618 diff --git a/pyproject.toml b/pyproject.toml index 1aa43ef4e..dfab17554 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.617" +version = "0.1.618" description = "" authors = [ "Runtime Verification, Inc. ", From e43535c8c27e3f63a1feaad1d18701f0b6d3081a Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Wed, 7 Feb 2024 21:24:40 -0600 Subject: [PATCH 095/116] Integrate max_iterations into APRParallelProver --- src/pyk/proof/reachability.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index ac0c68192..0bdb40d54 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -1184,6 +1184,9 @@ class ParallelAPRProver(parallel.Prover[APRProof, APRProofResult, APRProofProces total_cterm_implies_time: int total_cterm_extend_time: int + max_iterations: int | None + iterations: int + def __init__( self, proof: APRProof, @@ -1207,6 +1210,7 @@ def __init__( haskell_log_format: KoreExecLogFormat = KoreExecLogFormat.ONELINE, haskell_log_entries: Iterable[str] = (), log_axioms_file: Path | None = None, + max_iterations: int | None = None, ) -> None: self.execute_depth = execute_depth self.cut_point_rules = cut_point_rules @@ -1235,6 +1239,8 @@ def __init__( ) self.prover = APRProver(proof=proof, kcfg_explore=self.kcfg_explore) self.prover._check_all_terminals() + self.max_iterations = max_iterations + self.iterations = 0 def __del__(self) -> None: self.client.close() @@ -1246,6 +1252,8 @@ def steps(self, proof: APRProof) -> Iterable[APRProofStep]: Must not modify `self` or `proof`. The output of this function must only change with calls to `self.commit()`. """ + if self.max_iterations is not None and self.iterations >= self.max_iterations: + return [] steps: list[APRProofStep] = [] target_node = proof.kcfg.node(proof.target) @@ -1290,6 +1298,9 @@ def commit(self, proof: APRProof, update: APRProofResult) -> None: Steps for a proof `proof` can have their results submitted any time after they are made available by `self.steps(proof)`, including in any order and multiple times, and the Prover must be able to handle this. """ + if self.max_iterations is not None and self.iterations >= self.max_iterations: + return + self.prover._check_all_terminals() self.total_cterm_extend_time += update.extend_cterm_time @@ -1313,6 +1324,10 @@ def commit(self, proof: APRProof, update: APRProofResult) -> None: proof.write_proof_data() + self.iterations += 1 + if self.max_iterations is not None and self.iterations >= self.max_iterations: + _LOGGER.warning(f'Reached iteration bound {proof.id}: {self.max_iterations}') + class ParallelAPRBMCProver(ParallelAPRProver): def __init__( From a8e50410707ef35ddfd98526315e36dff99fa09e Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Wed, 7 Feb 2024 21:45:09 -0600 Subject: [PATCH 096/116] Fix test case --- src/tests/integration/k-files/imp-simple-spec.k | 8 ++++++-- src/tests/integration/proof/test_imp_parallel.py | 6 ++---- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/src/tests/integration/k-files/imp-simple-spec.k b/src/tests/integration/k-files/imp-simple-spec.k index a2def585d..299131530 100644 --- a/src/tests/integration/k-files/imp-simple-spec.k +++ b/src/tests/integration/k-files/imp-simple-spec.k @@ -49,6 +49,7 @@ endmodule module IMP-SIMPLE-SPEC imports IMP-VERIFICATION imports IMP-SIMPLE-SPEC-DEPENDENCIES + imports K-EQUAL-SYNTAX claim [addition-1]: 3 + 4 => 7 ... @@ -97,19 +98,22 @@ module IMP-SIMPLE-SPEC claim [long-branches]: - if(_B:Bool) { + if(B:Bool) { $n = 1; } else { $n = 2; } $s = 10; - while (0 <= $s) { + while (1 <= $s) { $s = $s + -1; } => . ... + $s |-> (10 => 0) + $n |-> (0 => (#if B:Bool #then 1 #else 2 #fi)) + claim [failing-if]: if(_B:Bool) { $n = 1 ; } else { $n = 2 ; } => . ... $n |-> (0 => 1) diff --git a/src/tests/integration/proof/test_imp_parallel.py b/src/tests/integration/proof/test_imp_parallel.py index 6159101af..ffbf5c81a 100644 --- a/src/tests/integration/proof/test_imp_parallel.py +++ b/src/tests/integration/proof/test_imp_parallel.py @@ -21,7 +21,6 @@ from pyk.kcfg.explore import KCFGExplore from pyk.kcfg.semantics import KCFGSemantics from pyk.kore.rpc import KoreServer - from pyk.ktool.kprint import KPrint from pyk.ktool.kprove import KProve PARALLEL_PROVE_TEST_DATA = ( @@ -58,7 +57,6 @@ def test_imp_parallel_prove( admit_deps: bool, kcfg_explore: KCFGExplore, kprove: KProve, - kprint: KPrint, proof_dir: Path, _kore_server: KoreServer, ) -> None: @@ -87,7 +85,7 @@ def test_imp_parallel_prove( module_name=kprove.main_module, definition_dir=kprove.definition_dir, execute_depth=100, - kprint=kprint, + kprint=kprove, kcfg_semantics=semantics, id=claim_id, trace_rewrites=False, @@ -99,7 +97,7 @@ def test_imp_parallel_prove( ) process_data = APRProofProcessData( - kprint=kprint, + kprint=kprove, kcfg_semantics=semantics, definition_dir=kprove.definition_dir, module_name=kprove.main_module, From 37df3d152766d50648dd076ea8932f9eb04c3e9a Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Thu, 8 Feb 2024 16:09:50 -0600 Subject: [PATCH 097/116] Implement fail_fast --- src/pyk/proof/reachability.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index 03b29a20c..652a62001 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -1186,6 +1186,7 @@ class ParallelAPRProver(parallel.Prover[APRProof, APRProofResult, APRProofProces max_iterations: int | None iterations: int + fail_fast: bool def __init__( self, @@ -1211,6 +1212,7 @@ def __init__( haskell_log_entries: Iterable[str] = (), log_axioms_file: Path | None = None, max_iterations: int | None = None, + fail_fast: bool = False, ) -> None: self.execute_depth = execute_depth self.cut_point_rules = cut_point_rules @@ -1241,6 +1243,7 @@ def __init__( self.prover._check_all_terminals() self.max_iterations = max_iterations self.iterations = 0 + self.fail_fast = fail_fast def __del__(self) -> None: self.client.close() @@ -1254,6 +1257,13 @@ def steps(self, proof: APRProof) -> Iterable[APRProofStep]: """ if self.max_iterations is not None and self.iterations >= self.max_iterations: return [] + + if self.fail_fast and proof.failed: + _LOGGER.warning( + f'Terminating proof early because fail_fast is set {proof.id}, failing nodes: {[nd.id for nd in proof.failing]}' + ) + return [] + steps: list[APRProofStep] = [] target_node = proof.kcfg.node(proof.target) @@ -1328,6 +1338,8 @@ def commit(self, proof: APRProof, update: APRProofResult) -> None: if self.max_iterations is not None and self.iterations >= self.max_iterations: _LOGGER.warning(f'Reached iteration bound {proof.id}: {self.max_iterations}') + print(f'fail_fast: {self.fail_fast}') + class ParallelAPRBMCProver(ParallelAPRProver): def __init__( @@ -1353,6 +1365,8 @@ def __init__( haskell_log_format: KoreExecLogFormat = KoreExecLogFormat.ONELINE, haskell_log_entries: Iterable[str] = (), log_axioms_file: Path | None = None, + max_iterations: int | None = None, + fail_fast: bool = False, ) -> None: self.execute_depth = execute_depth self.cut_point_rules = cut_point_rules @@ -1361,8 +1375,11 @@ def __init__( self.kcfg_semantics = kcfg_semantics self.id = id self.trace_rewrites = trace_rewrites + self.port = port self.bug_report = bug_report self.bug_report_id = bug_report_id + self.total_cterm_extend_time = 0 + self.total_cterm_implies_time = 0 self.client = KoreClient( host='localhost', port=self.port, @@ -1377,6 +1394,10 @@ def __init__( trace_rewrites=self.trace_rewrites, ) self.prover = APRBMCProver(proof=proof, kcfg_explore=self.kcfg_explore) + self.prover._check_all_terminals() + self.max_iterations = max_iterations + self.iterations = 0 + self.fail_fast = fail_fast @dataclass(frozen=True, eq=True) From e0fda99647c46221c1df7c43f6c19bdb66b7b12e Mon Sep 17 00:00:00 2001 From: devops Date: Thu, 8 Feb 2024 22:10:08 +0000 Subject: [PATCH 098/116] Set Version: 0.1.619 --- docs/conf.py | 4 ++-- package/version | 2 +- pyproject.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index f321df1ef..6104f3479 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -9,8 +9,8 @@ project = 'pyk' author = 'Runtime Verification, Inc' copyright = '2024, Runtime Verification, Inc' -version = '0.1.618' -release = '0.1.618' +version = '0.1.619' +release = '0.1.619' # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration diff --git a/package/version b/package/version index f814f2605..0c5bcacb3 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.618 +0.1.619 diff --git a/pyproject.toml b/pyproject.toml index dfab17554..b64125e61 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.618" +version = "0.1.619" description = "" authors = [ "Runtime Verification, Inc. ", From 6c797c2dc9e676a5ad900c09b3f84c200154cc22 Mon Sep 17 00:00:00 2001 From: devops Date: Fri, 9 Feb 2024 17:25:11 +0000 Subject: [PATCH 099/116] Set Version: 0.1.620 --- docs/conf.py | 4 ++-- package/version | 2 +- pyproject.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 6104f3479..7e98bdab0 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -9,8 +9,8 @@ project = 'pyk' author = 'Runtime Verification, Inc' copyright = '2024, Runtime Verification, Inc' -version = '0.1.619' -release = '0.1.619' +version = '0.1.620' +release = '0.1.620' # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration diff --git a/package/version b/package/version index 0c5bcacb3..d859e87da 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.619 +0.1.620 diff --git a/pyproject.toml b/pyproject.toml index b64125e61..19e4e7984 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.619" +version = "0.1.620" description = "" authors = [ "Runtime Verification, Inc. ", From 24a472fa2472e3a41b2b60f99bf4e04a9a9e3e45 Mon Sep 17 00:00:00 2001 From: Noah Watson <107630091+nwatson22@users.noreply.github.com> Date: Fri, 9 Feb 2024 11:33:42 -0600 Subject: [PATCH 100/116] Update src/pyk/proof/reachability.py --- src/pyk/proof/reachability.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index 774caf6d2..691662bd3 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -1348,8 +1348,6 @@ def commit(self, proof: APRProof, update: APRProofResult) -> None: if self.max_iterations is not None and self.iterations >= self.max_iterations: _LOGGER.warning(f'Reached iteration bound {proof.id}: {self.max_iterations}') - print(f'fail_fast: {self.fail_fast}') - class ParallelAPRBMCProver(ParallelAPRProver): def __init__( From fd177cc7eb80f6ecd3d059f8533307a230bf1a0b Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Fri, 9 Feb 2024 18:15:49 -0600 Subject: [PATCH 101/116] reimplement bmc checking for parallel proofs --- src/pyk/proof/reachability.py | 49 ++++++++++++++++- .../integration/k-files/imp-simple-spec.k | 7 +++ .../integration/proof/test_imp_parallel.py | 55 ++++++++++++++++++- 3 files changed, 108 insertions(+), 3 deletions(-) diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index 652a62001..c1421a3f3 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -1338,10 +1338,11 @@ def commit(self, proof: APRProof, update: APRProofResult) -> None: if self.max_iterations is not None and self.iterations >= self.max_iterations: _LOGGER.warning(f'Reached iteration bound {proof.id}: {self.max_iterations}') - print(f'fail_fast: {self.fail_fast}') - class ParallelAPRBMCProver(ParallelAPRProver): + proof: APRBMCProof + _checked_nodes: list[int] + def __init__( self, proof: APRBMCProof, @@ -1398,6 +1399,50 @@ def __init__( self.max_iterations = max_iterations self.iterations = 0 self.fail_fast = fail_fast + self._checked_nodes = [] + + def commit(self, proof: APRProof, update: APRProofResult) -> None: + """ + Should update `proof` according to `update`. + If `steps()` or `commit()` has been called on a proof `proof`, `commit()` may never again be called on `proof`. + Must only be called with an `update` that was returned by `step.execute()` where `step` was returned by `self.steps(proof)`. + Steps for a proof `proof` can have their results submitted any time after they are made available by `self.steps(proof)`, including in any order and multiple times, and the Prover must be able to handle this. + """ + + if not isinstance(proof, APRBMCProof): + raise ValueError(f'Proof {proof.id} cannot be used with ParallelAPRBMCProver.') + + if self.max_iterations is not None and self.iterations >= self.max_iterations: + return + + if type(update) is APRProofExtendResult: + node = proof.kcfg.node(update.node_id) + if node.id not in self._checked_nodes: + _LOGGER.info(f'Checking bmc depth for node {proof.id}: {node.id}') + self._checked_nodes.append(node.id) + _prior_loops = [ + succ.source.id + for succ in proof.shortest_path_to(node.id) + if self.kcfg_explore.kcfg_semantics.same_loop(succ.source.cterm, node.cterm) + ] + prior_loops: list[NodeIdLike] = [] + for _pl in _prior_loops: + if not ( + proof.kcfg.zero_depth_between(_pl, node.id) + or any(proof.kcfg.zero_depth_between(_pl, pl) for pl in prior_loops) + ): + prior_loops.append(_pl) + _LOGGER.info(f'Prior loop heads for node {proof.id}: {(node.id, prior_loops)}') + if len(prior_loops) > proof.bmc_depth: + proof.add_bounded(node.id) + + self.iterations += 1 + if self.max_iterations is not None and self.iterations >= self.max_iterations: + _LOGGER.warning(f'Reached iteration bound {proof.id}: {self.max_iterations}') + + return + + super().commit(proof, update) @dataclass(frozen=True, eq=True) diff --git a/src/tests/integration/k-files/imp-simple-spec.k b/src/tests/integration/k-files/imp-simple-spec.k index 299131530..d1ba40473 100644 --- a/src/tests/integration/k-files/imp-simple-spec.k +++ b/src/tests/integration/k-files/imp-simple-spec.k @@ -84,6 +84,13 @@ module IMP-SIMPLE-SPEC requires 0 <=Int S ensures 0 while ($n <= $s) { $n = $n + 1 ; } + => . ... + + ($n |-> (0 => (?N))) + ($s |-> S:Int) + + claim [bmc-two-loops-symbolic]: while ($s <= $n) { $n = $n + -1 ; } while ($k <= $m) { $m = $m + -1 ; } $i = 1; diff --git a/src/tests/integration/proof/test_imp_parallel.py b/src/tests/integration/proof/test_imp_parallel.py index ffbf5c81a..425274133 100644 --- a/src/tests/integration/proof/test_imp_parallel.py +++ b/src/tests/integration/proof/test_imp_parallel.py @@ -5,9 +5,10 @@ import pytest +from pyk.proof.show import KCFGShow, NodePrinter from pyk.proof.parallel import prove_parallel from pyk.proof.proof import ProofStatus -from pyk.proof.reachability import APRProof, APRProofProcessData, ParallelAPRProver +from pyk.proof.reachability import APRBMCProof, APRProof, APRProofProcessData, ParallelAPRBMCProver, ParallelAPRProver from pyk.testing import KCFGExploreTest, KPrintTest, KProveTest from pyk.utils import single @@ -112,3 +113,55 @@ def test_imp_parallel_prove( assert len(list(results)) == 1 assert list(results)[0].status == expected_status + + def test_imp_bmc_parallel_prove( + self, + kcfg_explore: KCFGExplore, + kprove: KProve, + proof_dir: Path, + _kore_server: KoreServer, + ) -> None: + claim_id = 'bmc-infinite-loop' + expected_status = ProofStatus.PASSED + + spec_file = K_FILES / 'imp-simple-spec.k' + spec_module = 'IMP-SIMPLE-SPEC' + + spec_label = f'{spec_module}.{claim_id}' + + claim = single(kprove.get_claims(Path(spec_file), spec_module_name=spec_module, claim_labels=[spec_label])) + proof = APRBMCProof.from_claim_with_bmc_depth(kprove.definition, claim, 5) + + semantics = self.semantics(kprove.definition) + parallel_prover = ParallelAPRBMCProver( + proof=proof, + module_name=kprove.main_module, + definition_dir=kprove.definition_dir, + execute_depth=100, + kprint=kprove, + kcfg_semantics=semantics, + id=claim_id, + trace_rewrites=False, + cut_point_rules= ['IMP.while'], + terminal_rules=(), + bug_report=None, + bug_report_id=None, + port=_kore_server.port, + ) + + process_data = APRProofProcessData( + kprint=kprove, + kcfg_semantics=semantics, + definition_dir=kprove.definition_dir, + module_name=kprove.main_module, + ) + + results, _ = prove_parallel( + proofs={'proof1': proof}, + provers={'proof1': parallel_prover}, + max_workers=1, + process_data=process_data, + ) + + assert len(list(results)) == 1 + assert list(results)[0].status == expected_status From 238ffb0906233e86cc77a24c261869a7c58e3f25 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Fri, 9 Feb 2024 18:16:38 -0600 Subject: [PATCH 102/116] Fix formatting --- src/tests/integration/proof/test_imp_parallel.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/tests/integration/proof/test_imp_parallel.py b/src/tests/integration/proof/test_imp_parallel.py index 425274133..128dc9de9 100644 --- a/src/tests/integration/proof/test_imp_parallel.py +++ b/src/tests/integration/proof/test_imp_parallel.py @@ -5,7 +5,6 @@ import pytest -from pyk.proof.show import KCFGShow, NodePrinter from pyk.proof.parallel import prove_parallel from pyk.proof.proof import ProofStatus from pyk.proof.reachability import APRBMCProof, APRProof, APRProofProcessData, ParallelAPRBMCProver, ParallelAPRProver @@ -142,7 +141,7 @@ def test_imp_bmc_parallel_prove( kcfg_semantics=semantics, id=claim_id, trace_rewrites=False, - cut_point_rules= ['IMP.while'], + cut_point_rules=['IMP.while'], terminal_rules=(), bug_report=None, bug_report_id=None, From 47bfa4ac2868717840cc6f83192860f19db7351e Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Fri, 9 Feb 2024 20:45:42 -0600 Subject: [PATCH 103/116] Set haskell option in the tests --- src/pyk/testing/_kompiler.py | 2 ++ src/tests/integration/proof/test_imp_parallel.py | 6 ++++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/pyk/testing/_kompiler.py b/src/pyk/testing/_kompiler.py index 8eb95a394..e8b5d1c96 100644 --- a/src/pyk/testing/_kompiler.py +++ b/src/pyk/testing/_kompiler.py @@ -1,5 +1,6 @@ from __future__ import annotations +import os from abc import ABC, abstractmethod from enum import Enum from typing import ClassVar # noqa: TC003 @@ -192,6 +193,7 @@ def _kore_server( llvm_dir: Path | None, bug_report: BugReport | None, ) -> Iterator[KoreServer]: + os.environ["GHCRTS"] = '-N2' match server_type: case ServerType.LEGACY: assert not llvm_dir diff --git a/src/tests/integration/proof/test_imp_parallel.py b/src/tests/integration/proof/test_imp_parallel.py index 128dc9de9..b61764eea 100644 --- a/src/tests/integration/proof/test_imp_parallel.py +++ b/src/tests/integration/proof/test_imp_parallel.py @@ -60,6 +60,7 @@ def test_imp_parallel_prove( proof_dir: Path, _kore_server: KoreServer, ) -> None: + spec_file = K_FILES / 'imp-simple-spec.k' spec_module = 'IMP-SIMPLE-SPEC' @@ -106,7 +107,7 @@ def test_imp_parallel_prove( results, _ = prove_parallel( proofs={'proof1': proof}, provers={'proof1': parallel_prover}, - max_workers=1, + max_workers=2, process_data=process_data, ) @@ -120,6 +121,7 @@ def test_imp_bmc_parallel_prove( proof_dir: Path, _kore_server: KoreServer, ) -> None: + claim_id = 'bmc-infinite-loop' expected_status = ProofStatus.PASSED @@ -158,7 +160,7 @@ def test_imp_bmc_parallel_prove( results, _ = prove_parallel( proofs={'proof1': proof}, provers={'proof1': parallel_prover}, - max_workers=1, + max_workers=2, process_data=process_data, ) From 4c745a714d554779290e49843c50f0f7834af6d3 Mon Sep 17 00:00:00 2001 From: Everett Hildenbrandt Date: Sat, 10 Feb 2024 17:34:25 +0000 Subject: [PATCH 104/116] proof/reachability: inline BMC logic into APRProof and APRProver --- src/pyk/proof/reachability.py | 389 +++++++--------------------------- 1 file changed, 80 insertions(+), 309 deletions(-) diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index e8dea7454..3b6c1eac4 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -41,11 +41,15 @@ class APRProof(Proof, KCFGExploration): https://doi.org/10.23638/LMCS-15(2:5)2019 Note that reachability logic formula `phi =>A psi` has *not* the same meaning as CTL/CTL*'s `phi -> AF psi`, since reachability logic ignores infinite traces. + This implementation extends the above with bounded model checking, allowing the user + to specify an optional loop iteration bound for each loop in execution. """ node_refutations: dict[int, RefutationProof] # TODO _node_refutatations init: int target: int + bmc_depth: int | None + _bounded: set[int] logs: dict[int, tuple[LogEntry, ...]] circularity: bool failure_info: APRFailureInfo | None @@ -58,6 +62,8 @@ def __init__( init: NodeIdLike, target: NodeIdLike, logs: dict[int, tuple[LogEntry, ...]], + bmc_depth: int | None = None, + bounded: Iterable[int] | None = None, proof_dir: Path | None = None, node_refutations: dict[int, str] | None = None, subproof_ids: Iterable[str] = (), @@ -70,6 +76,8 @@ def __init__( self.failure_info = None self.init = kcfg._resolve(init) self.target = kcfg._resolve(target) + self.bmc_depth = bmc_depth + self._bounded = set(bounded) if bounded is not None else set() self.logs = logs self.circularity = circularity self.node_refutations = {} @@ -104,11 +112,20 @@ def pending(self) -> list[KCFG.Node]: def failing(self) -> list[KCFG.Node]: return [nd for nd in self.kcfg.leaves if self.is_failing(nd.id)] + @property + def bounded(self) -> list[KCFG.Node]: + return [nd for nd in self.kcfg.leaves if self.is_bounded(nd.id)] + def is_refuted(self, node_id: NodeIdLike) -> bool: return self.kcfg._resolve(node_id) in self.node_refutations.keys() def is_pending(self, node_id: NodeIdLike) -> bool: - return self.is_explorable(node_id) and not self.is_target(node_id) and not self.is_refuted(node_id) + return ( + self.is_explorable(node_id) + and not self.is_target(node_id) + and not self.is_refuted(node_id) + and not self.is_bounded(node_id) + ) def is_init(self, node_id: NodeIdLike) -> bool: return self.kcfg._resolve(node_id) == self.kcfg._resolve(self.init) @@ -123,8 +140,18 @@ def is_failing(self, node_id: NodeIdLike) -> bool: and not self.is_target(node_id) and not self.is_refuted(node_id) and not self.kcfg.is_vacuous(node_id) + and not self.is_bounded(node_id) ) + def is_bounded(self, node_id: NodeIdLike) -> bool: + return self.kcfg._resolve(node_id) in self._bounded + + def add_bounded(self, nid: NodeIdLike) -> None: + self._bounded.add(self.kcfg._resolve(nid)) + + def remove_bounded(self, node_id: int) -> None: + self._bounded.discard(node_id) + def shortest_path_to(self, node_id: NodeIdLike) -> tuple[KCFG.Successor, ...]: spb = self.kcfg.shortest_path_between(self.init, node_id) assert spb is not None @@ -132,6 +159,8 @@ def shortest_path_to(self, node_id: NodeIdLike) -> tuple[KCFG.Successor, ...]: def prune(self, node_id: NodeIdLike, keep_nodes: Iterable[NodeIdLike] = ()) -> list[int]: pruned_nodes = super().prune(node_id, keep_nodes=list(keep_nodes) + [self.init, self.target]) + for nid in pruned_nodes: + self.remove_bounded(nid) return pruned_nodes @staticmethod @@ -178,6 +207,9 @@ def from_dict(cls: type[APRProof], dct: Mapping[str, Any], proof_dir: Path | Non else: logs = {} + bounded = dct['bounded'] + bmc_depth = dct['bmc_depth'] if 'bmc_depth' in dct else None + return APRProof( id, kcfg, @@ -185,6 +217,8 @@ def from_dict(cls: type[APRProof], dct: Mapping[str, Any], proof_dir: Path | Non init_node, target_node, logs=logs, + bmc_depth=bmc_depth, + bounded=bounded, circularity=circularity, admitted=admitted, proof_dir=proof_dir, @@ -198,6 +232,7 @@ def from_claim( claim: KClaim, logs: dict[int, tuple[LogEntry, ...]], proof_dir: Path | None = None, + bmc_depth: int | None = None, **kwargs: Any, ) -> APRProof: kcfg_dir = proof_dir / claim.label / 'kcfg' if proof_dir is not None else None @@ -210,6 +245,7 @@ def from_claim( init=init_node, target=target_node, logs=logs, + bmc_depth=bmc_depth, proof_dir=proof_dir, circularity=claim.is_circularity, admitted=claim.is_trusted, @@ -319,6 +355,9 @@ def dict(self) -> dict[str, Any]: dct['terminal'] = sorted(self._terminal) dct['init'] = self.init dct['target'] = self.target + dct['bounded'] = list(self._bounded) + if self.bmc_depth is not None: + dct['bmc_depth'] = self.bmc_depth dct['node_refutations'] = {node_id: proof.id for (node_id, proof) in self.node_refutations.items()} dct['circularity'] = self.circularity logs = {int(k): [l.to_dict() for l in ls] for k, ls in self.logs.items()} @@ -341,6 +380,8 @@ def summary(self) -> CompositeSummary: len(self.kcfg.stuck), len(self._terminal), len(self.node_refutations), + self.bmc_depth, + len(self._bounded), len(self.subproof_ids), ), *subproofs_summaries, @@ -359,6 +400,8 @@ def read_proof_data(proof_dir: Path, id: str) -> APRProof: kcfg = KCFG.read_cfg_data(cfg_dir, id) init = int(proof_dict['init']) target = int(proof_dict['target']) + bounded = proof_dict['bounded'] + bmc_depth = int(proof_dict['bmc_depth']) if 'bmc_depth' in proof_dict else None circularity = bool(proof_dict['circularity']) admitted = bool(proof_dict['admitted']) terminal = proof_dict['terminal'] @@ -372,6 +415,8 @@ def read_proof_data(proof_dir: Path, id: str) -> APRProof: terminal=terminal, init=init, target=target, + bounded=bounded, + bmc_depth=bmc_depth, logs=logs, circularity=circularity, admitted=admitted, @@ -403,6 +448,10 @@ def write_proof_data(self) -> None: logs = {int(k): [l.to_dict() for l in ls] for k, ls in self.logs.items()} dct['logs'] = logs + dct['bounded'] = sorted(self._bounded) + if self.bmc_depth is not None: + dct['bmc_depth'] = self.bmc_depth + proof_json.write_text(json.dumps(dct)) _LOGGER.info(f'Wrote proof data for {self.id}: {proof_json}') self.kcfg.write_cfg_data() @@ -474,225 +523,6 @@ def construct_node_refutation(self, node: KCFG.Node) -> RefutationProof | None: return refutation -class APRBMCProof(APRProof): - """APRBMCProof and APRBMCProver perform bounded model-checking of an all-path reachability logic claim.""" - - bmc_depth: int - _bounded: set[int] - - def __init__( - self, - id: str, - kcfg: KCFG, - terminal: Iterable[int], - init: NodeIdLike, - target: NodeIdLike, - logs: dict[int, tuple[LogEntry, ...]], - bmc_depth: int, - bounded: Iterable[int] | None = None, - proof_dir: Path | None = None, - subproof_ids: Iterable[str] = (), - node_refutations: dict[int, str] | None = None, - circularity: bool = False, - admitted: bool = False, - ): - super().__init__( - id, - kcfg, - terminal, - init, - target, - logs, - proof_dir=proof_dir, - subproof_ids=subproof_ids, - node_refutations=node_refutations, - circularity=circularity, - admitted=admitted, - ) - self.bmc_depth = bmc_depth - self._bounded = set(bounded) if bounded is not None else set() - - @staticmethod - def read_proof_data(proof_dir: Path, id: str) -> APRBMCProof: - proof_subdir = proof_dir / id - proof_json = proof_subdir / 'proof.json' - proof_dict = json.loads(proof_json.read_text()) - cfg_dir = proof_subdir / 'kcfg' - kcfg = KCFG.read_cfg_data(cfg_dir, id) - init = int(proof_dict['init']) - target = int(proof_dict['target']) - circularity = bool(proof_dict['circularity']) - terminal = proof_dict['terminal'] - admitted = bool(proof_dict['admitted']) - logs = {int(k): tuple(LogEntry.from_dict(l) for l in ls) for k, ls in proof_dict['logs'].items()} - subproof_ids = proof_dict['subproof_ids'] - node_refutations = {kcfg._resolve(node_id): proof_id for (node_id, proof_id) in proof_dict['node_refutations']} - bounded = proof_dict['bounded'] - bmc_depth = int(proof_dict['bmc_depth']) - - return APRBMCProof( - id=id, - kcfg=kcfg, - terminal=terminal, - init=init, - target=target, - logs=logs, - circularity=circularity, - admitted=admitted, - bounded=bounded, - bmc_depth=bmc_depth, - proof_dir=proof_dir, - subproof_ids=subproof_ids, - node_refutations=node_refutations, - ) - - def write_proof_data(self) -> None: - if self.proof_dir is None or self.proof_subdir is None: - _LOGGER.info(f'Skipped saving proof {self.id} since no save dir was specified.') - return - ensure_dir_path(self.proof_dir) - ensure_dir_path(self.proof_subdir) - proof_json = self.proof_subdir / 'proof.json' - dct: dict[str, list[int] | list[str] | bool | str | int | dict[int, str] | dict[int, list[dict[str, Any]]]] = {} - - dct['id'] = self.id - dct['subproof_ids'] = self.subproof_ids - dct['admitted'] = self.admitted - dct['type'] = 'APRBMCProof' - dct['init'] = self.kcfg._resolve(self.init) - dct['target'] = self.kcfg._resolve(self.target) - dct['node_refutations'] = { - self.kcfg._resolve(node_id): proof.id for (node_id, proof) in self.node_refutations.items() - } - dct['circularity'] = self.circularity - logs = {int(k): [l.to_dict() for l in ls] for k, ls in self.logs.items()} - dct['logs'] = logs - dct['terminal'] = sorted(self._terminal) - dct['bounded'] = sorted(self._bounded) - dct['bmc_depth'] = self.bmc_depth - - proof_json.write_text(json.dumps(dct)) - _LOGGER.info(f'Wrote proof data for {self.id}: {proof_json}') - self.kcfg.write_cfg_data() - - @property - def bounded(self) -> list[KCFG.Node]: - return [nd for nd in self.kcfg.leaves if self.is_bounded(nd.id)] - - def is_bounded(self, node_id: NodeIdLike) -> bool: - return self.kcfg._resolve(node_id) in self._bounded - - def is_pending(self, node_id: NodeIdLike) -> bool: - return super().is_pending(node_id) and not self.is_bounded(node_id) - - def is_failing(self, node_id: NodeIdLike) -> bool: - return super().is_failing(node_id) and not self.is_bounded(node_id) - - def prune(self, node_id: NodeIdLike, keep_nodes: Iterable[NodeIdLike] = ()) -> list[int]: - pruned_nodes = super().prune(node_id, keep_nodes=keep_nodes) - for nid in pruned_nodes: - self.remove_bounded(nid) - return pruned_nodes - - @classmethod - def from_dict(cls: type[APRBMCProof], dct: Mapping[str, Any], proof_dir: Path | None = None) -> APRBMCProof: - kcfg = KCFG.from_dict(dct['kcfg']) - terminal = dct['terminal'] - init = dct['init'] - target = dct['target'] - bounded = dct['bounded'] - - admitted = dct.get('admitted', False) - circularity = dct.get('circularity', False) - bmc_depth = dct['bmc_depth'] - subproof_ids = dct['subproof_ids'] if 'subproof_ids' in dct else [] - node_refutations: dict[int, str] = {} - if 'node_refutation' in dct: - node_refutations = {kcfg._resolve(node_id): proof_id for (node_id, proof_id) in dct['node_refutations']} - id = dct['id'] - if 'logs' in dct: - logs = {int(k): tuple(LogEntry.from_dict(l) for l in ls) for k, ls in dct['logs'].items()} - else: - logs = {} - - return APRBMCProof( - id, - kcfg, - terminal, - init, - target, - logs, - bmc_depth, - bounded=bounded, - proof_dir=proof_dir, - circularity=circularity, - subproof_ids=subproof_ids, - node_refutations=node_refutations, - admitted=admitted, - ) - - @property - def dict(self) -> dict[str, Any]: - dct = super().dict - dct['type'] = 'APRBMCProof' - dct['bmc_depth'] = self.bmc_depth - dct['bounded'] = list(self._bounded) - logs = {int(k): [l.to_dict() for l in ls] for k, ls in self.logs.items()} - dct['logs'] = logs - dct['circularity'] = self.circularity - return dct - - @staticmethod - def from_claim_with_bmc_depth( - defn: KDefinition, claim: KClaim, bmc_depth: int, proof_dir: Path | None = None - ) -> APRBMCProof: - kcfg_dir = proof_dir / claim.label / 'kcfg' if proof_dir is not None else None - - kcfg, init_node, target_node = KCFG.from_claim(defn, claim, cfg_dir=kcfg_dir) - - return APRBMCProof( - claim.label, - kcfg, - [], - bmc_depth=bmc_depth, - init=init_node, - target=target_node, - logs={}, - proof_dir=proof_dir, - circularity=claim.is_circularity, - admitted=claim.is_trusted, - ) - - def add_bounded(self, nid: NodeIdLike) -> None: - self._bounded.add(self.kcfg._resolve(nid)) - - def remove_bounded(self, node_id: int) -> None: - self._bounded.discard(node_id) - - @property - def summary(self) -> CompositeSummary: - subproofs_summaries = [subproof.summary for subproof in self.subproofs] - return CompositeSummary( - [ - APRBMCSummary( - self.id, - self.bmc_depth, - self.status, - len(self.kcfg.nodes), - len(self.pending), - len(self.failing), - len(self.kcfg.vacuous), - len(self.kcfg.stuck), - len(self._terminal), - len(self.node_refutations), - len(self._bounded), - len(self.subproof_ids), - ), - *subproofs_summaries, - ] - ) - - class APRProver(Prover): proof: APRProof @@ -705,6 +535,7 @@ class APRProver(Prover): _checked_for_terminal: set[int] _checked_for_subsumption: set[int] + _checked_for_bounded: set[int] def __init__( self, @@ -749,6 +580,7 @@ def _inject_module(module_name: str, import_name: str, sentences: list[KRuleLike self._checked_for_terminal = set() self._checked_for_subsumption = set() + self._checked_for_bounded = set() self._check_all_terminals() def nonzero_depth(self, node: KCFG.Node) -> bool: @@ -800,6 +632,26 @@ def advance_pending_node( cut_point_rules: Iterable[str] = (), terminal_rules: Iterable[str] = (), ) -> None: + if self.proof.bmc_depth is not None and node.id not in self._checked_for_bounded: + _LOGGER.info(f'Checking bmc depth for node {self.proof.id}: {node.id}') + self._checked_for_bounded.add(node.id) + _prior_loops = [ + succ.source.id + for succ in self.proof.shortest_path_to(node.id) + if self.kcfg_explore.kcfg_semantics.same_loop(succ.source.cterm, node.cterm) + ] + prior_loops: list[NodeIdLike] = [] + for _pl in _prior_loops: + if not ( + self.proof.kcfg.zero_depth_between(_pl, node.id) + or any(self.proof.kcfg.zero_depth_between(_pl, pl) for pl in prior_loops) + ): + prior_loops.append(_pl) + _LOGGER.info(f'Prior loop heads for node {self.proof.id}: {(node.id, prior_loops)}') + if len(prior_loops) > self.proof.bmc_depth: + self.proof.add_bounded(node.id) + return + if self.proof.target not in self.proof._terminal: if self.always_check_subsumption and self._check_subsume(node): return @@ -883,11 +735,13 @@ class APRSummary(ProofSummary): stuck: int terminal: int refuted: int + bmc_depth: int | None + bounded: int subproofs: int @property def lines(self) -> list[str]: - return [ + _lines = [ f'APRProof: {self.id}', f' status: {self.status}', f' admitted: {self.admitted}', @@ -898,8 +752,12 @@ def lines(self) -> list[str]: f' stuck: {self.stuck}', f' terminal: {self.terminal}', f' refuted: {self.refuted}', - f'Subproofs: {self.subproofs}', + f' bounded: {self.bounded}', ] + if self.bmc_depth is not None: + _lines.append(f' bmc depth: {self.bmc_depth}') + _lines.append(f'Subproofs: {self.subproofs}') + return _lines @dataclass(frozen=True) @@ -996,90 +854,3 @@ def print(self) -> list[str]: res_lines.append('') res_lines.append('Join the Runtime Verification Discord server for support: https://discord.gg/CurfmXNtbN') return res_lines - - -class APRBMCProver(APRProver): - proof: APRBMCProof - _checked_nodes: list[int] - - def __init__( - self, - proof: APRBMCProof, - kcfg_explore: KCFGExplore, - counterexample_info: bool = False, - always_check_subsumption: bool = True, - fast_check_subsumption: bool = False, - ) -> None: - super().__init__( - proof, - kcfg_explore=kcfg_explore, - counterexample_info=counterexample_info, - always_check_subsumption=always_check_subsumption, - fast_check_subsumption=fast_check_subsumption, - ) - self._checked_nodes = [] - - def advance_pending_node( - self, - node: KCFG.Node, - execute_depth: int | None = None, - cut_point_rules: Iterable[str] = (), - terminal_rules: Iterable[str] = (), - ) -> None: - if node.id not in self._checked_nodes: - _LOGGER.info(f'Checking bmc depth for node {self.proof.id}: {node.id}') - self._checked_nodes.append(node.id) - _prior_loops = [ - succ.source.id - for succ in self.proof.shortest_path_to(node.id) - if self.kcfg_explore.kcfg_semantics.same_loop(succ.source.cterm, node.cterm) - ] - prior_loops: list[NodeIdLike] = [] - for _pl in _prior_loops: - if not ( - self.proof.kcfg.zero_depth_between(_pl, node.id) - or any(self.proof.kcfg.zero_depth_between(_pl, pl) for pl in prior_loops) - ): - prior_loops.append(_pl) - _LOGGER.info(f'Prior loop heads for node {self.proof.id}: {(node.id, prior_loops)}') - if len(prior_loops) > self.proof.bmc_depth: - self.proof.add_bounded(node.id) - return - super().advance_pending_node( - node=node, - execute_depth=execute_depth, - cut_point_rules=cut_point_rules, - terminal_rules=terminal_rules, - ) - - -@dataclass(frozen=True) -class APRBMCSummary(ProofSummary): - id: str - bmc_depth: int - status: ProofStatus - nodes: int - pending: int - failing: int - vacuous: int - stuck: int - terminal: int - refuted: int - bounded: int - subproofs: int - - @property - def lines(self) -> list[str]: - return [ - f'APRBMCProof(depth={self.bmc_depth}): {self.id}', - f' status: {self.status}', - f' nodes: {self.nodes}', - f' pending: {self.pending}', - f' failing: {self.failing}', - f' vacuous: {self.vacuous}', - f' stuck: {self.stuck}', - f' terminal: {self.terminal}', - f' refuted: {self.refuted}', - f' bounded: {self.bounded}', - f'Subproofs: {self.subproofs}', - ] From c155cdc6ee1951bf1b1a35054accfcad3dbeeb54 Mon Sep 17 00:00:00 2001 From: Everett Hildenbrandt Date: Sat, 10 Feb 2024 17:37:40 +0000 Subject: [PATCH 105/116] proof/{proof,show,__init__}: cleanup remaining references to BMC provers --- src/pyk/proof/__init__.py | 2 +- src/pyk/proof/proof.py | 6 +++--- src/pyk/proof/show.py | 13 +------------ 3 files changed, 5 insertions(+), 16 deletions(-) diff --git a/src/pyk/proof/__init__.py b/src/pyk/proof/__init__.py index 55999e0e6..52923e989 100644 --- a/src/pyk/proof/__init__.py +++ b/src/pyk/proof/__init__.py @@ -1,3 +1,3 @@ from .equality import EqualityProof, EqualityProver from .proof import ProofStatus -from .reachability import APRBMCProof, APRBMCProver, APRProof, APRProver +from .reachability import APRProof, APRProver diff --git a/src/pyk/proof/proof.py b/src/pyk/proof/proof.py index f4d0558c5..d44d40247 100644 --- a/src/pyk/proof/proof.py +++ b/src/pyk/proof/proof.py @@ -29,7 +29,7 @@ class ProofStatus(Enum): class Proof(ABC): - _PROOF_TYPES: Final = {'APRProof', 'APRBMCProof', 'EqualityProof', 'RefutationProof'} + _PROOF_TYPES: Final = {'APRProof', 'EqualityProof', 'RefutationProof'} id: str proof_dir: Path | None @@ -199,7 +199,7 @@ def from_dict(cls: type[Proof], dct: Mapping[str, Any], proof_dir: Path | None = def read_proof(cls: type[Proof], id: str, proof_dir: Path) -> Proof: # these local imports allow us to call .to_dict() based on the proof type we read from JSON from .equality import EqualityProof, RefutationProof # noqa - from .reachability import APRBMCProof, APRProof # noqa + from .reachability import APRProof # noqa proof_path = proof_dir / f'{hash_str(id)}.json' if Proof.proof_exists(id, proof_dir): @@ -216,7 +216,7 @@ def read_proof(cls: type[Proof], id: str, proof_dir: Path) -> Proof: def read_proof_data(proof_dir: Path, id: str) -> Proof: # these local imports allow us to call .to_dict() based on the proof type we read from JSON from .equality import EqualityProof, RefutationProof # noqa - from .reachability import APRBMCProof, APRProof # noqa + from .reachability import APRProof # noqa proof_path = proof_dir / id / 'proof.json' if Proof.proof_data_exists(id, proof_dir): diff --git a/src/pyk/proof/show.py b/src/pyk/proof/show.py index 20747610e..5564e8d6e 100644 --- a/src/pyk/proof/show.py +++ b/src/pyk/proof/show.py @@ -16,7 +16,7 @@ from ..kcfg import KCFG from ..kcfg.kcfg import NodeIdLike from ..ktool.kprint import KPrint - from .reachability import APRBMCProof, APRProof + from .reachability import APRProof _LOGGER: Final = logging.getLogger(__name__) @@ -40,17 +40,6 @@ def node_attrs(self, kcfg: KCFG, node: KCFG.Node) -> list[str]: attrs.append('terminal') if 'stuck' in attrs: attrs.remove('stuck') - return attrs - - -class APRBMCProofNodePrinter(APRProofNodePrinter): - proof: APRBMCProof - - def __init__(self, proof: APRBMCProof, kprint: KPrint, full_printer: bool = False, minimize: bool = False): - super().__init__(proof, kprint, full_printer=full_printer, minimize=minimize) - - def node_attrs(self, kcfg: KCFG, node: KCFG.Node) -> list[str]: - attrs = super().node_attrs(kcfg, node) if self.proof.is_bounded(node.id): attrs.append('bounded') if 'stuck' in attrs: From 220bb7b2e2b9a05cea6d10c268803b42546589d0 Mon Sep 17 00:00:00 2001 From: Everett Hildenbrandt Date: Sat, 10 Feb 2024 17:43:25 +0000 Subject: [PATCH 106/116] tests/unit/test_proof: update unit tests for unified BMC prover --- src/tests/unit/test_proof.py | 47 ++++++++++++++++-------------------- 1 file changed, 21 insertions(+), 26 deletions(-) diff --git a/src/tests/unit/test_proof.py b/src/tests/unit/test_proof.py index 416f0322d..f56fdfdcb 100644 --- a/src/tests/unit/test_proof.py +++ b/src/tests/unit/test_proof.py @@ -9,7 +9,7 @@ from pyk.prelude.kint import intToken from pyk.proof.equality import EqualityProof, EqualitySummary from pyk.proof.proof import CompositeSummary, Proof, ProofStatus -from pyk.proof.reachability import APRBMCProof, APRBMCSummary, APRFailureInfo, APRProof, APRSummary +from pyk.proof.reachability import APRFailureInfo, APRProof, APRSummary from .test_kcfg import node, node_dicts, term @@ -25,7 +25,7 @@ def proof_dir(tmp_path_factory: TempPathFactory) -> Path: return tmp_path_factory.mktemp('proofs') -def apr_proof(i: int, proof_dir: Path) -> APRProof: +def apr_proof(i: int, proof_dir: Path, bmc_depth: int | None = None) -> APRProof: return APRProof( id=f'apr_proof_{i}', kcfg=KCFG.from_dict({'nodes': node_dicts(i)}), @@ -34,19 +34,7 @@ def apr_proof(i: int, proof_dir: Path) -> APRProof: target=node(1).id, logs={}, proof_dir=proof_dir, - ) - - -def aprbmc_proof(i: int, proof_dir: Path) -> APRBMCProof: - return APRBMCProof( - id=f'aprbmc_proof_{i}', - init=node(1).id, - target=node(1).id, - bmc_depth=i, - kcfg=KCFG.from_dict({'nodes': node_dicts(i)}), - terminal=[], - logs={}, - proof_dir=proof_dir, + bmc_depth=bmc_depth, ) @@ -80,15 +68,15 @@ def test_read_proof_apr(self, proof_dir: Path) -> None: assert proof_from_disk.dict == sample_proof.dict def test_read_proof_aprbmc(self, proof_dir: Path) -> None: - sample_proof = APRBMCProof( + sample_proof = APRProof( id='aprbmc_proof_1', - bmc_depth=1, kcfg=KCFG.from_dict({'nodes': node_dicts(1)}), terminal=[], init=node(1).id, target=node(1).id, logs={}, proof_dir=proof_dir, + bmc_depth=1, ) # Given @@ -202,7 +190,7 @@ def test_apr_proof_from_dict_heterogeneous_subproofs(proof_dir: Path) -> None: # Given sub_proof_1 = equality_proof(1, proof_dir) sub_proof_2 = apr_proof(2, proof_dir) - sub_proof_3 = aprbmc_proof(3, proof_dir) + sub_proof_3 = apr_proof(3, proof_dir, bmc_depth=3) proof = apr_proof(1, proof_dir) # When @@ -244,7 +232,7 @@ def test_proof_module_name(test_id: str, proof_id: str, expected: str) -> None: def test_aprbmc_proof_from_dict_no_subproofs(proof_dir: Path) -> None: # Given - proof = aprbmc_proof(1, proof_dir) + proof = apr_proof(1, proof_dir, bmc_depth=1) # When proof.write_proof() @@ -258,7 +246,7 @@ def test_aprbmc_proof_from_dict_no_subproofs(proof_dir: Path) -> None: def test_aprbmc_proof_from_dict_one_subproofs(proof_dir: Path) -> None: # Given eq_proof = equality_proof(1, proof_dir) - proof = aprbmc_proof(1, proof_dir) + proof = apr_proof(1, proof_dir, bmc_depth=1) # When eq_proof.write_proof_data() @@ -275,7 +263,7 @@ def test_aprbmc_proof_from_dict_heterogeneous_subproofs(proof_dir: Path) -> None # Given eq_proof = equality_proof(1, proof_dir) subproof = apr_proof(2, proof_dir) - proof = aprbmc_proof(1, proof_dir) + proof = apr_proof(1, proof_dir, bmc_depth=1) # When eq_proof.write_proof_data() @@ -363,6 +351,8 @@ def test_apr_proof_summary(proof_dir: Path) -> None: stuck=0, terminal=0, refuted=0, + bmc_depth=None, + bounded=0, subproofs=0, ) ] @@ -370,15 +360,15 @@ def test_apr_proof_summary(proof_dir: Path) -> None: def test_aprbmc_proof_summary(proof_dir: Path) -> None: - proof = aprbmc_proof(1, proof_dir) + proof = apr_proof(1, proof_dir, bmc_depth=1) assert len(proof.summary.summaries) == 1 assert proof.summary == CompositeSummary( [ - APRBMCSummary( - id='aprbmc_proof_1', + APRSummary( + id='apr_proof_1', status=ProofStatus.PASSED, - bmc_depth=1, + admitted=False, nodes=1, pending=0, failing=0, @@ -386,8 +376,9 @@ def test_aprbmc_proof_summary(proof_dir: Path) -> None: stuck=0, terminal=0, refuted=0, - subproofs=0, + bmc_depth=1, bounded=0, + subproofs=0, ) ] ) @@ -423,6 +414,8 @@ def test_apr_proof_summary_subproofs(proof_dir: Path) -> None: stuck=0, terminal=0, refuted=0, + bmc_depth=None, + bounded=0, subproofs=1, ) @@ -439,6 +432,8 @@ def test_apr_proof_summary_subproofs(proof_dir: Path) -> None: stuck=0, terminal=0, refuted=0, + bmc_depth=None, + bounded=0, subproofs=1, ), EqualitySummary( From b8baa480d86d5f49aaf496bd1fe5b68c35e91700 Mon Sep 17 00:00:00 2001 From: Everett Hildenbrandt Date: Sat, 10 Feb 2024 17:46:50 +0000 Subject: [PATCH 107/116] tests/integration/proof/test_imp: update for unified BMC prover --- src/tests/integration/proof/test_imp.py | 68 ++++++------------------- 1 file changed, 16 insertions(+), 52 deletions(-) diff --git a/src/tests/integration/proof/test_imp.py b/src/tests/integration/proof/test_imp.py index 69258677e..6e67a6d06 100644 --- a/src/tests/integration/proof/test_imp.py +++ b/src/tests/integration/proof/test_imp.py @@ -14,8 +14,8 @@ from pyk.prelude.kbool import BOOL, andBool, notBool, orBool from pyk.prelude.kint import intToken from pyk.prelude.ml import mlAnd, mlBottom, mlEqualsFalse, mlEqualsTrue, mlTop -from pyk.proof import APRBMCProof, APRBMCProver, APRProof, APRProver, ProofStatus -from pyk.proof.show import APRBMCProofNodePrinter, APRProofNodePrinter +from pyk.proof import APRProof, APRProver, ProofStatus +from pyk.proof.show import APRProofNodePrinter from pyk.testing import KCFGExploreTest, KProveTest from pyk.utils import single @@ -944,11 +944,7 @@ def _node_printer(cterm: CTerm) -> list[str]: ) proof = APRProof.from_claim(kprove.definition, claim, logs={}, proof_dir=proof_dir) - prover = APRProver( - proof, - kcfg_explore=kcfg_explore, - ) - + prover = APRProver(proof, kcfg_explore=kcfg_explore) prover.advance_proof( max_iterations=max_iterations, execute_depth=max_depth, @@ -986,13 +982,9 @@ def test_all_path_bmc_reachability_prove( kprove.get_claims(Path(spec_file), spec_module_name=spec_module, claim_labels=[f'{spec_module}.{claim_id}']) ) - proof = APRBMCProof.from_claim_with_bmc_depth(kprove.definition, claim, bmc_depth) + proof = APRProof.from_claim(kprove.definition, claim, logs={}, bmc_depth=bmc_depth) kcfg_explore.simplify(proof.kcfg, {}) - - prover = APRBMCProver( - proof, - kcfg_explore=kcfg_explore, - ) + prover = APRProver(proof, kcfg_explore=kcfg_explore) prover.advance_proof( max_iterations=max_iterations, execute_depth=max_depth, @@ -1001,7 +993,7 @@ def test_all_path_bmc_reachability_prove( ) kcfg_show = KCFGShow( - kcfg_explore.kprint, node_printer=APRBMCProofNodePrinter(proof, kcfg_explore.kprint, full_printer=True) + kcfg_explore.kprint, node_printer=APRProofNodePrinter(proof, kcfg_explore.kprint, full_printer=True) ) cfg_lines = kcfg_show.show(proof.kcfg) _LOGGER.info('\n'.join(cfg_lines)) @@ -1032,10 +1024,7 @@ def test_failure_info( proof = APRProof.from_claim(kprove.definition, claim, logs={}) kcfg_explore.simplify(proof.kcfg, {}) - prover = APRProver( - proof, - kcfg_explore=kcfg_explore, - ) + prover = APRProver(proof, kcfg_explore=kcfg_explore) prover.advance_proof() failure_info = prover.failure_info() @@ -1066,12 +1055,9 @@ def test_apr_prove_read_write_node_data( ) proofs_dir = proof_dir - proof = APRProof.from_claim(kprove.definition, claim, proof_dir=proofs_dir, logs={}) + proof = APRProof.from_claim(kprove.definition, claim, logs={}, proof_dir=proofs_dir) kcfg_explore.simplify(proof.kcfg, {}) - prover = APRProver( - proof, - kcfg_explore=kcfg_explore, - ) + prover = APRProver(proof, kcfg_explore=kcfg_explore) prover.advance_proof(execute_depth=1) proof_from_disk = APRProof.read_proof_data(proof_dir=proofs_dir, id=proof.id) @@ -1094,15 +1080,12 @@ def test_aprbmc_prove_read_write_node_data( ) proofs_dir = proof_dir - proof = APRBMCProof.from_claim_with_bmc_depth(kprove.definition, claim, proof_dir=proofs_dir, bmc_depth=3) + proof = APRProof.from_claim(kprove.definition, claim, logs={}, proof_dir=proofs_dir, bmc_depth=3) kcfg_explore.simplify(proof.kcfg, {}) - prover = APRBMCProver( - proof, - kcfg_explore=kcfg_explore, - ) + prover = APRProver(proof, kcfg_explore=kcfg_explore) prover.advance_proof(execute_depth=1) - proof_from_disk = APRBMCProof.read_proof_data(proof_dir=proofs_dir, id=proof.id) + proof_from_disk = APRProof.read_proof_data(proof_dir=proofs_dir, id=proof.id) assert proof.dict == proof_from_disk.dict assert proof.kcfg.nodes == proof_from_disk.kcfg.nodes @@ -1121,18 +1104,8 @@ def test_fail_fast( ) ) - proof = APRProof.from_claim( - kprove.definition, - claim, - logs={}, - proof_dir=proof_dir, - ) - - prover = APRProver( - proof, - kcfg_explore=kcfg_explore, - ) - + proof = APRProof.from_claim(kprove.definition, claim, logs={}, proof_dir=proof_dir) + prover = APRProver(proof, kcfg_explore=kcfg_explore) prover.advance_proof(fail_fast=False) # Both branches will be checked and fail (fail_fast=False) @@ -1141,17 +1114,8 @@ def test_fail_fast( assert len(proof._terminal) == 3 assert len(proof.failing) == 2 - proof = APRProof.from_claim( - kprove.definition, - claim, - logs={}, - proof_dir=proof_dir, - ) - - prover = APRProver( - proof, - kcfg_explore=kcfg_explore, - ) + proof = APRProof.from_claim(kprove.definition, claim, logs={}, proof_dir=proof_dir) + prover = APRProver(proof, kcfg_explore=kcfg_explore) prover.advance_proof(fail_fast=True) From 5dda1cc36b45f85679e19059d9e1efdd7de19ce3 Mon Sep 17 00:00:00 2001 From: Everett Hildenbrandt Date: Sat, 10 Feb 2024 17:49:25 +0000 Subject: [PATCH 108/116] tests/integration/proof/test_goto: migrate to unified BMC prover --- src/tests/integration/proof/test_goto.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/src/tests/integration/proof/test_goto.py b/src/tests/integration/proof/test_goto.py index 8d8c381f3..ee5317a02 100644 --- a/src/tests/integration/proof/test_goto.py +++ b/src/tests/integration/proof/test_goto.py @@ -11,8 +11,8 @@ from pyk.kcfg.show import KCFGShow from pyk.prelude.ml import mlEqualsTrue from pyk.prelude.utils import token -from pyk.proof import APRBMCProof, APRBMCProver, ProofStatus -from pyk.proof.show import APRBMCProofNodePrinter +from pyk.proof import APRProof, APRProver, ProofStatus +from pyk.proof.show import APRProofNodePrinter from pyk.testing import KCFGExploreTest, KProveTest from pyk.utils import single @@ -27,7 +27,6 @@ from pyk.kast.outer import KDefinition from pyk.kcfg import KCFGExplore from pyk.ktool.kprove import KProve - from pyk.proof import APRProof _LOGGER: Final = logging.getLogger(__name__) @@ -115,12 +114,9 @@ def test_all_path_bmc_reachability_prove( kprove.get_claims(Path(spec_file), spec_module_name=spec_module, claim_labels=[f'{spec_module}.{claim_id}']) ) - proof = APRBMCProof.from_claim_with_bmc_depth(kprove.definition, claim, bmc_depth) + proof = APRProof.from_claim(kprove.definition, claim, logs={}, bmc_depth=bmc_depth) kcfg_explore.simplify(proof.kcfg, {}) - prover = APRBMCProver( - proof, - kcfg_explore=kcfg_explore, - ) + prover = APRProver(proof, kcfg_explore=kcfg_explore) prover.advance_proof( max_iterations=max_iterations, execute_depth=max_depth, @@ -129,7 +125,7 @@ def test_all_path_bmc_reachability_prove( ) kcfg_show = KCFGShow( - kcfg_explore.kprint, node_printer=APRBMCProofNodePrinter(proof, kcfg_explore.kprint, full_printer=True) + kcfg_explore.kprint, node_printer=APRProofNodePrinter(proof, kcfg_explore.kprint, full_printer=True) ) cfg_lines = kcfg_show.show(proof.kcfg) _LOGGER.info('\n'.join(cfg_lines)) From c8ec74cab94d088dcc1cca164eb84c3deff73de4 Mon Sep 17 00:00:00 2001 From: devops Date: Sat, 10 Feb 2024 18:00:12 +0000 Subject: [PATCH 109/116] Set Version: 0.1.620 --- docs/conf.py | 4 ++-- package/version | 2 +- pyproject.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 6104f3479..7e98bdab0 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -9,8 +9,8 @@ project = 'pyk' author = 'Runtime Verification, Inc' copyright = '2024, Runtime Verification, Inc' -version = '0.1.619' -release = '0.1.619' +version = '0.1.620' +release = '0.1.620' # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration diff --git a/package/version b/package/version index 0c5bcacb3..d859e87da 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.619 +0.1.620 diff --git a/pyproject.toml b/pyproject.toml index b64125e61..19e4e7984 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.619" +version = "0.1.620" description = "" authors = [ "Runtime Verification, Inc. ", From 9a5ad26862f5a170fc7c9423c964fdbb307c6d4f Mon Sep 17 00:00:00 2001 From: Everett Hildenbrandt Date: Sat, 10 Feb 2024 18:01:08 +0000 Subject: [PATCH 110/116] proof/reachability: inline tiny method --- src/pyk/proof/reachability.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index 3b6c1eac4..818ca5af3 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -149,9 +149,6 @@ def is_bounded(self, node_id: NodeIdLike) -> bool: def add_bounded(self, nid: NodeIdLike) -> None: self._bounded.add(self.kcfg._resolve(nid)) - def remove_bounded(self, node_id: int) -> None: - self._bounded.discard(node_id) - def shortest_path_to(self, node_id: NodeIdLike) -> tuple[KCFG.Successor, ...]: spb = self.kcfg.shortest_path_between(self.init, node_id) assert spb is not None @@ -160,7 +157,7 @@ def shortest_path_to(self, node_id: NodeIdLike) -> tuple[KCFG.Successor, ...]: def prune(self, node_id: NodeIdLike, keep_nodes: Iterable[NodeIdLike] = ()) -> list[int]: pruned_nodes = super().prune(node_id, keep_nodes=list(keep_nodes) + [self.init, self.target]) for nid in pruned_nodes: - self.remove_bounded(nid) + self._bounded.discard(nid) return pruned_nodes @staticmethod From 1a3dce65c029a0c922a9854ad6fb801ecd82041c Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Mon, 12 Feb 2024 17:55:41 -0600 Subject: [PATCH 111/116] Consolidate ParallelAPRBMCProver into ParallelAPRProver --- src/pyk/proof/reachability.py | 137 ++++-------------- src/pyk/testing/_kompiler.py | 2 +- .../integration/proof/test_imp_parallel.py | 6 +- 3 files changed, 34 insertions(+), 111 deletions(-) diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index 3beeb8179..808e183d2 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -1198,6 +1198,8 @@ class ParallelAPRProver(parallel.Prover[APRProof, APRProofResult, APRProofProces iterations: int fail_fast: bool + _checked_nodes: list[int] + def __init__( self, proof: APRProof, @@ -1254,6 +1256,7 @@ def __init__( self.max_iterations = max_iterations self.iterations = 0 self.fail_fast = fail_fast + self._checked_nodes = [] def __del__(self) -> None: self.client.close() @@ -1321,6 +1324,34 @@ def commit(self, proof: APRProof, update: APRProofResult) -> None: if self.max_iterations is not None and self.iterations >= self.max_iterations: return + if isinstance(proof, APRBMCProof): + if type(update) is APRProofExtendResult: + node = proof.kcfg.node(update.node_id) + if node.id not in self._checked_nodes: + _LOGGER.info(f'Checking bmc depth for node {proof.id}: {node.id}') + self._checked_nodes.append(node.id) + _prior_loops = [ + succ.source.id + for succ in proof.shortest_path_to(node.id) + if self.kcfg_explore.kcfg_semantics.same_loop(succ.source.cterm, node.cterm) + ] + prior_loops: list[NodeIdLike] = [] + for _pl in _prior_loops: + if not ( + proof.kcfg.zero_depth_between(_pl, node.id) + or any(proof.kcfg.zero_depth_between(_pl, pl) for pl in prior_loops) + ): + prior_loops.append(_pl) + _LOGGER.info(f'Prior loop heads for node {proof.id}: {(node.id, prior_loops)}') + if len(prior_loops) > proof.bmc_depth: + proof.add_bounded(node.id) + + self.iterations += 1 + if self.max_iterations is not None and self.iterations >= self.max_iterations: + _LOGGER.warning(f'Reached iteration bound {proof.id}: {self.max_iterations}') + + return + self.prover._check_all_terminals() self.total_cterm_extend_time += update.extend_cterm_time @@ -1349,112 +1380,6 @@ def commit(self, proof: APRProof, update: APRProofResult) -> None: _LOGGER.warning(f'Reached iteration bound {proof.id}: {self.max_iterations}') -class ParallelAPRBMCProver(ParallelAPRProver): - proof: APRBMCProof - _checked_nodes: list[int] - - def __init__( - self, - proof: APRBMCProof, - module_name: str, - definition_dir: str | Path, - execute_depth: int | None, - kprint: KPrint, - kcfg_semantics: KCFGSemantics | None, - port: int, - id: str | None, - trace_rewrites: bool, - cut_point_rules: Iterable[str], - terminal_rules: Iterable[str], - bug_report_id: str | None, - llvm_definition_dir: Path | None = None, - command: str | Iterable[str] | None = None, - bug_report: BugReport | None = None, - smt_timeout: int | None = None, - smt_retry_limit: int | None = None, - smt_tactic: str | None = None, - haskell_log_format: KoreExecLogFormat = KoreExecLogFormat.ONELINE, - haskell_log_entries: Iterable[str] = (), - log_axioms_file: Path | None = None, - max_iterations: int | None = None, - fail_fast: bool = False, - ) -> None: - self.execute_depth = execute_depth - self.cut_point_rules = cut_point_rules - self.terminal_rules = terminal_rules - self.kprint = kprint - self.kcfg_semantics = kcfg_semantics - self.id = id - self.trace_rewrites = trace_rewrites - self.port = port - self.bug_report = bug_report - self.bug_report_id = bug_report_id - self.total_cterm_extend_time = 0 - self.total_cterm_implies_time = 0 - self.client = KoreClient( - host='localhost', - port=self.port, - bug_report=self.bug_report, - bug_report_id=self.bug_report_id, - ) - self.kcfg_explore = KCFGExplore( - kprint=self.kprint, - kore_client=self.client, - kcfg_semantics=self.kcfg_semantics, - id=self.id, - trace_rewrites=self.trace_rewrites, - ) - self.prover = APRBMCProver(proof=proof, kcfg_explore=self.kcfg_explore) - self.prover._check_all_terminals() - self.max_iterations = max_iterations - self.iterations = 0 - self.fail_fast = fail_fast - self._checked_nodes = [] - - def commit(self, proof: APRProof, update: APRProofResult) -> None: - """ - Should update `proof` according to `update`. - If `steps()` or `commit()` has been called on a proof `proof`, `commit()` may never again be called on `proof`. - Must only be called with an `update` that was returned by `step.execute()` where `step` was returned by `self.steps(proof)`. - Steps for a proof `proof` can have their results submitted any time after they are made available by `self.steps(proof)`, including in any order and multiple times, and the Prover must be able to handle this. - """ - - if not isinstance(proof, APRBMCProof): - raise ValueError(f'Proof {proof.id} cannot be used with ParallelAPRBMCProver.') - - if self.max_iterations is not None and self.iterations >= self.max_iterations: - return - - if type(update) is APRProofExtendResult: - node = proof.kcfg.node(update.node_id) - if node.id not in self._checked_nodes: - _LOGGER.info(f'Checking bmc depth for node {proof.id}: {node.id}') - self._checked_nodes.append(node.id) - _prior_loops = [ - succ.source.id - for succ in proof.shortest_path_to(node.id) - if self.kcfg_explore.kcfg_semantics.same_loop(succ.source.cterm, node.cterm) - ] - prior_loops: list[NodeIdLike] = [] - for _pl in _prior_loops: - if not ( - proof.kcfg.zero_depth_between(_pl, node.id) - or any(proof.kcfg.zero_depth_between(_pl, pl) for pl in prior_loops) - ): - prior_loops.append(_pl) - _LOGGER.info(f'Prior loop heads for node {proof.id}: {(node.id, prior_loops)}') - if len(prior_loops) > proof.bmc_depth: - proof.add_bounded(node.id) - - self.iterations += 1 - if self.max_iterations is not None and self.iterations >= self.max_iterations: - _LOGGER.warning(f'Reached iteration bound {proof.id}: {self.max_iterations}') - - return - - super().commit(proof, update) - - @dataclass(frozen=True, eq=True) class APRProofStep(parallel.ProofStep[APRProofResult, APRProofProcessData]): proof_id: str diff --git a/src/pyk/testing/_kompiler.py b/src/pyk/testing/_kompiler.py index e8b5d1c96..248b25107 100644 --- a/src/pyk/testing/_kompiler.py +++ b/src/pyk/testing/_kompiler.py @@ -193,7 +193,7 @@ def _kore_server( llvm_dir: Path | None, bug_report: BugReport | None, ) -> Iterator[KoreServer]: - os.environ["GHCRTS"] = '-N2' + os.environ['GHCRTS'] = '-N2' match server_type: case ServerType.LEGACY: assert not llvm_dir diff --git a/src/tests/integration/proof/test_imp_parallel.py b/src/tests/integration/proof/test_imp_parallel.py index b61764eea..becdfc946 100644 --- a/src/tests/integration/proof/test_imp_parallel.py +++ b/src/tests/integration/proof/test_imp_parallel.py @@ -7,7 +7,7 @@ from pyk.proof.parallel import prove_parallel from pyk.proof.proof import ProofStatus -from pyk.proof.reachability import APRBMCProof, APRProof, APRProofProcessData, ParallelAPRBMCProver, ParallelAPRProver +from pyk.proof.reachability import APRBMCProof, APRProof, APRProofProcessData, ParallelAPRProver from pyk.testing import KCFGExploreTest, KPrintTest, KProveTest from pyk.utils import single @@ -60,7 +60,6 @@ def test_imp_parallel_prove( proof_dir: Path, _kore_server: KoreServer, ) -> None: - spec_file = K_FILES / 'imp-simple-spec.k' spec_module = 'IMP-SIMPLE-SPEC' @@ -121,7 +120,6 @@ def test_imp_bmc_parallel_prove( proof_dir: Path, _kore_server: KoreServer, ) -> None: - claim_id = 'bmc-infinite-loop' expected_status = ProofStatus.PASSED @@ -134,7 +132,7 @@ def test_imp_bmc_parallel_prove( proof = APRBMCProof.from_claim_with_bmc_depth(kprove.definition, claim, 5) semantics = self.semantics(kprove.definition) - parallel_prover = ParallelAPRBMCProver( + parallel_prover = ParallelAPRProver( proof=proof, module_name=kprove.main_module, definition_dir=kprove.definition_dir, From 237a1c57f2fad8f8792532049d65ab46a9862ed4 Mon Sep 17 00:00:00 2001 From: devops Date: Mon, 12 Feb 2024 23:55:56 +0000 Subject: [PATCH 112/116] Set Version: 0.1.621 --- docs/conf.py | 4 ++-- package/version | 2 +- pyproject.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 7e98bdab0..17063bd52 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -9,8 +9,8 @@ project = 'pyk' author = 'Runtime Verification, Inc' copyright = '2024, Runtime Verification, Inc' -version = '0.1.620' -release = '0.1.620' +version = '0.1.621' +release = '0.1.621' # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration diff --git a/package/version b/package/version index d859e87da..fdc1a9101 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.620 +0.1.621 diff --git a/pyproject.toml b/pyproject.toml index 19e4e7984..15b9dffad 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.620" +version = "0.1.621" description = "" authors = [ "Runtime Verification, Inc. ", From c70e9672717d7801b40afdece61f4aa71e4c94b7 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Wed, 14 Feb 2024 09:55:33 -0600 Subject: [PATCH 113/116] Fix process data being locked to one instance for all proofs --- src/pyk/proof/parallel.py | 211 +++++++++++++----- src/pyk/proof/reachability.py | 15 +- .../integration/proof/test_imp_parallel.py | 4 +- .../integration/proof/test_parallel_prove.py | 6 +- 4 files changed, 179 insertions(+), 57 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index e57110548..46c305aaf 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -1,9 +1,11 @@ from __future__ import annotations +import os import time from abc import ABC, abstractmethod from dataclasses import dataclass from multiprocessing import Process, Queue +from queue import Empty # from concurrent.futures import CancelledError, ProcessPoolExecutor, wait from typing import TYPE_CHECKING, Any, Generic, TypeVar @@ -97,89 +99,196 @@ class ProfilingInfo: total_time = 0 -def prove_parallel( - proofs: Mapping[str, Proof], - provers: Mapping[str, Prover], - max_workers: int, - process_data: Any, -) -> tuple[Iterable[Proof], ProfilingInfo]: - explored: set[tuple[str, ProofStep]] = set() +class SemanticsProcessPool: + _max_workers: int + processes: list[Process] + in_queue: Queue + out_queue: Queue + busy_queue: Queue + pending_jobs: int = 0 + process_data: Mapping[str, Any] - in_queue: Queue = Queue() - out_queue: Queue = Queue() + def __init__(self, max_workers: int): + self._max_workers = max_workers + self.in_queue = Queue() + self.out_queue = Queue() + self.busy_queue = Queue() + self.processes = [] - pending_jobs: int = 0 + def submit(self, proof_id: str, proof_step: ProofStep) -> None: + process_status = {} - profile = ProfilingInfo() + while True: + try: + msg = self.busy_queue.get_nowait() + pid, status = msg + process_status[pid] = status + except Empty: + break - total_init_time = time.time_ns() + if not any(process_status.keys()): + self.processes.append(Process(target=self.run_process, args=(self.process_data,))) - def run_process(data: Any) -> None: + self.in_queue.put((proof_id, proof_step)) + self.pending_jobs += 1 + + def run_process(self) -> None: while True: - dequeued = in_queue.get() + self.busy_queue.put((os.getpid(), True)) + dequeued = self.in_queue.get() + self.busy_queue.put((os.getpid(), False)) if dequeued == 0: break proof_id, proof_step = dequeued + data = self.process_data.get(proof_id) update = proof_step.exec(data) - out_queue.put((proof_id, update)) + self.out_queue.put((proof_id, update)) + + +# def prove_parallel( +# proofs: Mapping[str, Proof], +# provers: Mapping[str, Prover], +# process_data: Mapping[str, Any], +# max_workers: int, +# ) -> tuple[Iterable[Proof], ProfilingInfo]: +# explored: set[tuple[str, ProofStep]] = set() +# +# in_queue: Queue = Queue() +# out_queue: Queue = Queue() +# +# pending_jobs: int = 0 +# +# profile = ProfilingInfo() +# +# total_init_time = time.time_ns() +# +# def run_process(data: Any) -> None: +# while True: +# dequeued = in_queue.get() +# if dequeued == 0: +# break +# proof_id, proof_step = dequeued +# data = process_data.get(proof_id) +# update = proof_step.exec(data) +# out_queue.put((proof_id, update)) +# +# def submit(proof_id: str) -> None: +# proof = proofs[proof_id] +# prover = provers[proof_id] +# steps_init_time = time.time_ns() +# steps = prover.steps(proof) +# profile.total_steps_time += time.time_ns() - steps_init_time +# for step in steps: # <-- get next steps (represented by e.g. pending nodes, ...) +# if (proof_id, step) in explored: +# continue +# explored.add((proof_id, step)) +# in_queue.put((proof_id, step)) +# nonlocal pending_jobs +# pending_jobs += 1 +# +# processes = [Process(target=run_process, args=(process_data,)) for _ in range(max_workers)] +# for process in processes: +# process.start() +# +# for proof_id in proofs.keys(): +# submit(proof_id) +# +# while pending_jobs > 0: +# wait_init_time = time.time_ns() +# proof_id, update = out_queue.get() +# profile.total_wait_time += time.time_ns() - wait_init_time +# pending_jobs -= 1 +# +# proof = proofs[proof_id] +# prover = provers[proof_id] +# +# commit_init_time = time.time_ns() +# prover.commit(proof, update) # <-- update the proof (can be in-memory, access disk with locking, ...) +# profile.total_commit_time += time.time_ns() - commit_init_time +# +# match proof.status: +# # terminate on first failure, yield partial results, etc. +# case ProofStatus.FAILED: +# ... +# case ProofStatus.PENDING: +# steps_init_time = time.time_ns() +# if not list(prover.steps(proof)): +# raise ValueError('Prover violated expectation. status is pending with no further steps.') +# profile.total_steps_time += time.time_ns() - steps_init_time +# case ProofStatus.PASSED: +# steps_init_time = time.time_ns() +# if list(prover.steps(proof)): +# raise ValueError('Prover violated expectation. status is passed with further steps.') +# profile.total_steps_time += time.time_ns() - steps_init_time +# +# submit(proof_id) +# +# for _ in range(max_workers): +# in_queue.put(0) +# +# for process in processes: +# process.join() +# +# profile.total_time = time.time_ns() - total_init_time +# +# return proofs.values(), profile + - def submit(proof_id: str) -> None: +def prove_parallel( + proofs: Mapping[str, Proof], + provers: Mapping[str, Prover], + process_data: Mapping[str, Any], + max_workers: int, +) -> Iterable[Proof]: + pending: dict[Future[Any], str] = {} + explored: set[tuple[str, ProofStep]] = set() + + def submit(proof_id: str, pool: SemanticsProcessPool) -> None: proof = proofs[proof_id] prover = provers[proof_id] - steps_init_time = time.time_ns() - steps = prover.steps(proof) - profile.total_steps_time += time.time_ns() - steps_init_time - for step in steps: # <-- get next steps (represented by e.g. pending nodes, ...) + for step in prover.steps(proof): # <-- get next steps (represented by e.g. pending nodes, ...) if (proof_id, step) in explored: continue explored.add((proof_id, step)) - in_queue.put((proof_id, step)) - nonlocal pending_jobs - pending_jobs += 1 + future = pool.submit((proof_id, step)) # <-- schedule steps for execution + pending[future] = proof_id - processes = [Process(target=run_process, args=(process_data,)) for _ in range(max_workers)] - for process in processes: - process.start() + pool = SemanticsProcessPool(max_workers=max_workers) + # with ProcessPoolExecutor(max_workers=max_workers) as pool: + for proof_id in proofs: + submit(proof_id, pool) - for proof_id in proofs.keys(): - submit(proof_id) - - while pending_jobs > 0: - wait_init_time = time.time_ns() - proof_id, update = out_queue.get() - profile.total_wait_time += time.time_ns() - wait_init_time - pending_jobs -= 1 + while pending: + done, _ = wait(pending, return_when='FIRST_COMPLETED') + future = done.pop() + proof_id = pending[future] proof = proofs[proof_id] prover = provers[proof_id] + try: + update = future.result() + except CancelledError as err: + raise RuntimeError(f'Task was cancelled for proof {proof_id}') from err + except TimeoutError as err: + raise RuntimeError( + f"Future for proof {proof_id} was not finished executing and timed out. This shouldn't happen since this future was already waited on." + ) from err + except Exception as err: + raise RuntimeError('Exception was raised in ProofStep.exec() for proof {proof_id}.') from err - commit_init_time = time.time_ns() prover.commit(proof, update) # <-- update the proof (can be in-memory, access disk with locking, ...) - profile.total_commit_time += time.time_ns() - commit_init_time match proof.status: # terminate on first failure, yield partial results, etc. case ProofStatus.FAILED: ... case ProofStatus.PENDING: - steps_init_time = time.time_ns() if not list(prover.steps(proof)): raise ValueError('Prover violated expectation. status is pending with no further steps.') - profile.total_steps_time += time.time_ns() - steps_init_time case ProofStatus.PASSED: - steps_init_time = time.time_ns() if list(prover.steps(proof)): raise ValueError('Prover violated expectation. status is passed with further steps.') - profile.total_steps_time += time.time_ns() - steps_init_time - - submit(proof_id) - - for _ in range(max_workers): - in_queue.put(0) - - for process in processes: - process.join() - - profile.total_time = time.time_ns() - total_init_time - return proofs.values(), profile + submit(proof_id, pool) + pending.pop(future) + return proofs.values() diff --git a/src/pyk/proof/reachability.py b/src/pyk/proof/reachability.py index 2b68ba471..0eb027402 100644 --- a/src/pyk/proof/reachability.py +++ b/src/pyk/proof/reachability.py @@ -959,6 +959,10 @@ class ParallelAPRProver(parallel.Prover[APRProof, APRProofResult, APRProofProces iterations: int fail_fast: bool + counterexample_info: bool + always_check_subsumption: bool + fast_check_subsumption: bool + _checked_nodes: list[int] def __init__( @@ -986,6 +990,9 @@ def __init__( log_axioms_file: Path | None = None, max_iterations: int | None = None, fail_fast: bool = False, + counterexample_info: bool = False, + always_check_subsumption: bool = False, + fast_check_subsumption: bool = False, ) -> None: self.execute_depth = execute_depth self.cut_point_rules = cut_point_rules @@ -1012,7 +1019,13 @@ def __init__( id=self.id, trace_rewrites=self.trace_rewrites, ) - self.prover = APRProver(proof=proof, kcfg_explore=self.kcfg_explore) + self.prover = APRProver( + proof=proof, + kcfg_explore=self.kcfg_explore, + counterexample_info=counterexample_info, + always_check_subsumption=always_check_subsumption, + fast_check_subsumption=fast_check_subsumption, + ) self.prover._check_all_terminals() self.max_iterations = max_iterations self.iterations = 0 diff --git a/src/tests/integration/proof/test_imp_parallel.py b/src/tests/integration/proof/test_imp_parallel.py index 0a8d2f3fa..4e2894083 100644 --- a/src/tests/integration/proof/test_imp_parallel.py +++ b/src/tests/integration/proof/test_imp_parallel.py @@ -107,7 +107,7 @@ def test_imp_parallel_prove( proofs={'proof1': proof}, provers={'proof1': parallel_prover}, max_workers=2, - process_data=process_data, + process_data={'proof1': process_data}, ) assert len(list(results)) == 1 @@ -159,7 +159,7 @@ def test_imp_bmc_parallel_prove( proofs={'proof1': proof}, provers={'proof1': parallel_prover}, max_workers=2, - process_data=process_data, + process_data={'proof1': process_data}, ) assert len(list(results)) == 1 diff --git a/src/tests/integration/proof/test_parallel_prove.py b/src/tests/integration/proof/test_parallel_prove.py index 28083bab1..4515cdfcb 100644 --- a/src/tests/integration/proof/test_parallel_prove.py +++ b/src/tests/integration/proof/test_parallel_prove.py @@ -93,7 +93,7 @@ def commit(self, proof: TreeExploreProof, update: int) -> None: def test_parallel_prove() -> None: prover = TreeExploreProver() proof = TreeExploreProof(0, 9, SIMPLE_TREE, set()) - results, _ = prove_parallel({'proof1': proof}, {'proof1': prover}, max_workers=2, process_data=None) + results, _ = prove_parallel({'proof1': proof}, {'proof1': prover}, max_workers=2, process_data={}) assert len(list(results)) == 1 assert len(list(prover.steps(proof))) == 0 assert list(results)[0].status == ProofStatus.PASSED @@ -102,7 +102,7 @@ def test_parallel_prove() -> None: def test_parallel_fail() -> None: prover = TreeExploreProver() proof = TreeExploreProof(0, 9, SIMPLE_TREE, {6}) - results, _ = prove_parallel({'proof1': proof}, {'proof1': prover}, max_workers=2, process_data=None) + results, _ = prove_parallel({'proof1': proof}, {'proof1': prover}, max_workers=2, process_data={}) assert len(list(results)) == 1 assert len(list(prover.steps(proof))) == 0 assert list(results)[0].status == ProofStatus.FAILED @@ -116,7 +116,7 @@ def test_parallel_multiple_proofs() -> None: proofs, provers_map, max_workers=4, - process_data=None, + process_data={}, ) assert len(list(results)) == 3 for proof in proofs.values(): From f36524ddd0d1e44ee97be902b1a9d6e04f150f46 Mon Sep 17 00:00:00 2001 From: devops Date: Wed, 14 Feb 2024 15:58:02 +0000 Subject: [PATCH 114/116] Set Version: 0.1.624 --- docs/conf.py | 4 ++-- package/version | 2 +- pyproject.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 683a6c0a9..5844121ef 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -9,8 +9,8 @@ project = 'pyk' author = 'Runtime Verification, Inc' copyright = '2024, Runtime Verification, Inc' -version = '0.1.623' -release = '0.1.623' +version = '0.1.624' +release = '0.1.624' # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration diff --git a/package/version b/package/version index cc5737c83..89c6121ca 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.623 +0.1.624 diff --git a/pyproject.toml b/pyproject.toml index c701d7d49..71b05c9fa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.623" +version = "0.1.624" description = "" authors = [ "Runtime Verification, Inc. ", From 1d38ad7f5abce105b07dbbb92f2a585395284370 Mon Sep 17 00:00:00 2001 From: Noah Watson Date: Thu, 15 Feb 2024 12:50:03 -0600 Subject: [PATCH 115/116] Readd queues system --- src/pyk/proof/parallel.py | 232 +++++++++++++++++++------------------- 1 file changed, 116 insertions(+), 116 deletions(-) diff --git a/src/pyk/proof/parallel.py b/src/pyk/proof/parallel.py index 46c305aaf..2cf9201d2 100644 --- a/src/pyk/proof/parallel.py +++ b/src/pyk/proof/parallel.py @@ -145,150 +145,150 @@ def run_process(self) -> None: self.out_queue.put((proof_id, update)) -# def prove_parallel( -# proofs: Mapping[str, Proof], -# provers: Mapping[str, Prover], -# process_data: Mapping[str, Any], -# max_workers: int, -# ) -> tuple[Iterable[Proof], ProfilingInfo]: -# explored: set[tuple[str, ProofStep]] = set() -# -# in_queue: Queue = Queue() -# out_queue: Queue = Queue() -# -# pending_jobs: int = 0 -# -# profile = ProfilingInfo() -# -# total_init_time = time.time_ns() -# -# def run_process(data: Any) -> None: -# while True: -# dequeued = in_queue.get() -# if dequeued == 0: -# break -# proof_id, proof_step = dequeued -# data = process_data.get(proof_id) -# update = proof_step.exec(data) -# out_queue.put((proof_id, update)) -# -# def submit(proof_id: str) -> None: -# proof = proofs[proof_id] -# prover = provers[proof_id] -# steps_init_time = time.time_ns() -# steps = prover.steps(proof) -# profile.total_steps_time += time.time_ns() - steps_init_time -# for step in steps: # <-- get next steps (represented by e.g. pending nodes, ...) -# if (proof_id, step) in explored: -# continue -# explored.add((proof_id, step)) -# in_queue.put((proof_id, step)) -# nonlocal pending_jobs -# pending_jobs += 1 -# -# processes = [Process(target=run_process, args=(process_data,)) for _ in range(max_workers)] -# for process in processes: -# process.start() -# -# for proof_id in proofs.keys(): -# submit(proof_id) -# -# while pending_jobs > 0: -# wait_init_time = time.time_ns() -# proof_id, update = out_queue.get() -# profile.total_wait_time += time.time_ns() - wait_init_time -# pending_jobs -= 1 -# -# proof = proofs[proof_id] -# prover = provers[proof_id] -# -# commit_init_time = time.time_ns() -# prover.commit(proof, update) # <-- update the proof (can be in-memory, access disk with locking, ...) -# profile.total_commit_time += time.time_ns() - commit_init_time -# -# match proof.status: -# # terminate on first failure, yield partial results, etc. -# case ProofStatus.FAILED: -# ... -# case ProofStatus.PENDING: -# steps_init_time = time.time_ns() -# if not list(prover.steps(proof)): -# raise ValueError('Prover violated expectation. status is pending with no further steps.') -# profile.total_steps_time += time.time_ns() - steps_init_time -# case ProofStatus.PASSED: -# steps_init_time = time.time_ns() -# if list(prover.steps(proof)): -# raise ValueError('Prover violated expectation. status is passed with further steps.') -# profile.total_steps_time += time.time_ns() - steps_init_time -# -# submit(proof_id) -# -# for _ in range(max_workers): -# in_queue.put(0) -# -# for process in processes: -# process.join() -# -# profile.total_time = time.time_ns() - total_init_time -# -# return proofs.values(), profile - - def prove_parallel( proofs: Mapping[str, Proof], provers: Mapping[str, Prover], process_data: Mapping[str, Any], max_workers: int, -) -> Iterable[Proof]: - pending: dict[Future[Any], str] = {} +) -> tuple[Iterable[Proof], ProfilingInfo]: explored: set[tuple[str, ProofStep]] = set() - def submit(proof_id: str, pool: SemanticsProcessPool) -> None: + in_queue: Queue = Queue() + out_queue: Queue = Queue() + + pending_jobs: int = 0 + + profile = ProfilingInfo() + + total_init_time = time.time_ns() + + def run_process(data: Any) -> None: + while True: + dequeued = in_queue.get() + if dequeued == 0: + break + proof_id, proof_step = dequeued + data = process_data.get(proof_id) + update = proof_step.exec(data) + out_queue.put((proof_id, update)) + + def submit(proof_id: str) -> None: proof = proofs[proof_id] prover = provers[proof_id] - for step in prover.steps(proof): # <-- get next steps (represented by e.g. pending nodes, ...) + steps_init_time = time.time_ns() + steps = prover.steps(proof) + profile.total_steps_time += time.time_ns() - steps_init_time + for step in steps: # <-- get next steps (represented by e.g. pending nodes, ...) if (proof_id, step) in explored: continue explored.add((proof_id, step)) - future = pool.submit((proof_id, step)) # <-- schedule steps for execution - pending[future] = proof_id + in_queue.put((proof_id, step)) + nonlocal pending_jobs + pending_jobs += 1 - pool = SemanticsProcessPool(max_workers=max_workers) - # with ProcessPoolExecutor(max_workers=max_workers) as pool: - for proof_id in proofs: - submit(proof_id, pool) + processes = [Process(target=run_process, args=(process_data,)) for _ in range(max_workers)] + for process in processes: + process.start() - while pending: - done, _ = wait(pending, return_when='FIRST_COMPLETED') - future = done.pop() + for proof_id in proofs.keys(): + submit(proof_id) + + while pending_jobs > 0: + wait_init_time = time.time_ns() + proof_id, update = out_queue.get() + profile.total_wait_time += time.time_ns() - wait_init_time + pending_jobs -= 1 - proof_id = pending[future] proof = proofs[proof_id] prover = provers[proof_id] - try: - update = future.result() - except CancelledError as err: - raise RuntimeError(f'Task was cancelled for proof {proof_id}') from err - except TimeoutError as err: - raise RuntimeError( - f"Future for proof {proof_id} was not finished executing and timed out. This shouldn't happen since this future was already waited on." - ) from err - except Exception as err: - raise RuntimeError('Exception was raised in ProofStep.exec() for proof {proof_id}.') from err + commit_init_time = time.time_ns() prover.commit(proof, update) # <-- update the proof (can be in-memory, access disk with locking, ...) + profile.total_commit_time += time.time_ns() - commit_init_time match proof.status: # terminate on first failure, yield partial results, etc. case ProofStatus.FAILED: ... case ProofStatus.PENDING: + steps_init_time = time.time_ns() if not list(prover.steps(proof)): raise ValueError('Prover violated expectation. status is pending with no further steps.') + profile.total_steps_time += time.time_ns() - steps_init_time case ProofStatus.PASSED: + steps_init_time = time.time_ns() if list(prover.steps(proof)): raise ValueError('Prover violated expectation. status is passed with further steps.') + profile.total_steps_time += time.time_ns() - steps_init_time + + submit(proof_id) + + for _ in range(max_workers): + in_queue.put(0) + + for process in processes: + process.join() + + profile.total_time = time.time_ns() - total_init_time + + return proofs.values(), profile - submit(proof_id, pool) - pending.pop(future) - return proofs.values() + +# def prove_parallel( +# proofs: Mapping[str, Proof], +# provers: Mapping[str, Prover], +# process_data: Mapping[str, Any], +# max_workers: int, +# ) -> Iterable[Proof]: +# pending: dict[Future[Any], str] = {} +# explored: set[tuple[str, ProofStep]] = set() +# +# def submit(proof_id: str, pool: SemanticsProcessPool) -> None: +# proof = proofs[proof_id] +# prover = provers[proof_id] +# for step in prover.steps(proof): # <-- get next steps (represented by e.g. pending nodes, ...) +# if (proof_id, step) in explored: +# continue +# explored.add((proof_id, step)) +# future = pool.submit((proof_id, step)) # <-- schedule steps for execution +# pending[future] = proof_id +# +# pool = SemanticsProcessPool(max_workers=max_workers) +# # with ProcessPoolExecutor(max_workers=max_workers) as pool: +# for proof_id in proofs: +# submit(proof_id, pool) +# +# while pending: +# done, _ = wait(pending, return_when='FIRST_COMPLETED') +# future = done.pop() +# +# proof_id = pending[future] +# proof = proofs[proof_id] +# prover = provers[proof_id] +# try: +# update = future.result() +# except CancelledError as err: +# raise RuntimeError(f'Task was cancelled for proof {proof_id}') from err +# except TimeoutError as err: +# raise RuntimeError( +# f"Future for proof {proof_id} was not finished executing and timed out. This shouldn't happen since this future was already waited on." +# ) from err +# except Exception as err: +# raise RuntimeError('Exception was raised in ProofStep.exec() for proof {proof_id}.') from err +# +# prover.commit(proof, update) # <-- update the proof (can be in-memory, access disk with locking, ...) +# +# match proof.status: +# # terminate on first failure, yield partial results, etc. +# case ProofStatus.FAILED: +# ... +# case ProofStatus.PENDING: +# if not list(prover.steps(proof)): +# raise ValueError('Prover violated expectation. status is pending with no further steps.') +# case ProofStatus.PASSED: +# if list(prover.steps(proof)): +# raise ValueError('Prover violated expectation. status is passed with further steps.') +# +# submit(proof_id, pool) +# pending.pop(future) +# return proofs.values() From e545bd8a76752732225674a043e5d262443d629e Mon Sep 17 00:00:00 2001 From: devops Date: Thu, 15 Feb 2024 18:51:30 +0000 Subject: [PATCH 116/116] Set Version: 0.1.628 --- docs/conf.py | 4 ++-- package/version | 2 +- pyproject.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index d4af59a9c..a2730dc62 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -9,8 +9,8 @@ project = 'pyk' author = 'Runtime Verification, Inc' copyright = '2024, Runtime Verification, Inc' -version = '0.1.627' -release = '0.1.627' +version = '0.1.628' +release = '0.1.628' # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration diff --git a/package/version b/package/version index 83ea503f0..1fa102d5a 100644 --- a/package/version +++ b/package/version @@ -1 +1 @@ -0.1.627 +0.1.628 diff --git a/pyproject.toml b/pyproject.toml index 04c16c9a1..853405d80 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pyk" -version = "0.1.627" +version = "0.1.628" description = "" authors = [ "Runtime Verification, Inc. ",