diff --git a/.github/workflows/commitlint.yaml b/.github/workflows/commitlint.yaml index bd6c21f349..9be589907c 100644 --- a/.github/workflows/commitlint.yaml +++ b/.github/workflows/commitlint.yaml @@ -1,28 +1,30 @@ -on: push +name: Validate PR metadata -name: Commit Message +on: + pull_request: + types: + - opened + - edited + - synchronize -# NOTE: Skip check on PR so as not to confuse contributors -# NOTE: Also install a PR title checker so we don't mess up merges -jobs: - commit-msg: - - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 +permissions: + pull-requests: read - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: "3.10" - - - name: Install Dependencies - run: | - python -m pip install --upgrade pip - pip install -r cz-requirement.txt +jobs: + validate-pr: + runs-on: ubuntu-latest + steps: + - uses: amannn/action-semantic-pull-request@v5 + name: Run conventional commit checker + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Check commit history - run: cz check --rev-range $(git rev-list --all --reverse | head -1)..HEAD + with: + types: | + feat + perf + fix + chore + refactor + docs + test diff --git a/.github/workflows/prtitle.yaml b/.github/workflows/prtitle.yaml deleted file mode 100644 index 62b1b0a4bb..0000000000 --- a/.github/workflows/prtitle.yaml +++ /dev/null @@ -1,30 +0,0 @@ -name: PR Title - -on: - pull_request_target: - types: - - opened - - edited - - synchronize - -jobs: - pr-title: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: "3.10" - - - name: Install Dependencies - run: | - python -m pip install --upgrade pip - pip install -r cz-requirement.txt - - - name: Check PR Title - env: - TITLE: ${{ github.event.pull_request.title }} - run: cz check --message "$TITLE" diff --git a/cz-requirement.txt b/cz-requirement.txt deleted file mode 100644 index 0933570909..0000000000 --- a/cz-requirement.txt +++ /dev/null @@ -1 +0,0 @@ -commitizen>=2.40,<2.41 diff --git a/docs/userguides/config.md b/docs/userguides/config.md index 0eb62f5897..768ae758b7 100644 --- a/docs/userguides/config.md +++ b/docs/userguides/config.md @@ -37,9 +37,58 @@ plugin: This helps keep your secrets out of Ape! +Similarly, any config key-name can also be set with the same named environment variable (with a prefix). + +If a configuration is left unset (i.e., not included in the `ape-config.(yaml|json|toml)` file, Ape will inspect the environment variables as a fallback, following the pattern `APE__SETTING`, where different plugins define different prefixes. + +For example, the following config: + +```yaml +contracts_folder: src/qwe +test: + number_of_accounts: 3 + show_internal: True +compile: + exclude: + - "one" + - "two" + - "three" + include_dependencies: true +``` + +could be entirely defined with environment variables as follows: + +```shell +APE_CONTRACTS_FOLDER=src/contracts +APE_TEST_NUMBER_OF_ACCOUNTS=3 +APE_TEST_SHOW_INTERNAL=true +APE_COMPILE_EXCLUDE='["one", "two", "three"]' +APE_COMPILE_INCLUDE_DEPENDENCIES=true +``` + +Notice the `ape-compile` and `ape-test` plugin include their plugin name `APE_COMPILE` and `APE_TEST` respectively where `contracts_folder` only has the prefix `APE_` since it is not part of a plugin. + +Here is the complete list of supported prefixes that come with Ape out-of-the-box: + +| Module/Plugin | Prefix | +| ------------- | ------------ | +| ape | APE | +| ape_cache | APE_CACHE | +| ape_compile | APE_COMPILE | +| ape_console | APE_CONSOLE | +| ape_ethereum | APE_ETHEREUM | +| ape_networks | APE_NETWORKS | +| ape_node | APE_NODE | +| ape_test | APE_TEST | + +Each plugin outside the core package may define its own prefix, but the standard is `APE_PLUGINNAME_`. + +Using environment variables assists in keeping secrets out of your config files. +However, the primary config should be file-driven and environment variables should only be used when necessary. + ## Base Path -Change the base path if it is different than your project root. +Change the base path if it is different from your project root. For example, imagine a project structure like: ``` @@ -170,33 +219,28 @@ contract = project.MyContract.deployments[0] Ape does not add or edit deployments in your `ape-config.yaml` file. ``` -## Node +## Name -When using the `node` provider, you can customize its settings. -For example, to change the URI for an Ethereum network, do: +Configure the name of the project: ```toml -[tool.ape.node.ethereum.mainnet] -uri = "http://localhost:5030" +[tool.ape] +name = "ape-project" ``` -Or the equivalent YAML: +If the name is not specified in `tool.ape` but is in `project`, Ape will use that as the project name: -```yaml -node: - ethereum: - mainnet: - uri: http://localhost:5030 +```toml +[project] +name = "ape-project" ``` -Now, the `ape-node` core plugin will use the URL `http://localhost:5030` to connect and make requests. +To configure this name using an `ape-config.yaml` file, do: -```{warning} -Instead of using `ape-node` to connect to an Infura or Alchemy node, use the [ape-infura](https://github.com/ApeWorX/ape-infura) or [ape-alchemy](https://github.com/ApeWorX/ape-alchemy) provider plugins instead, which have their own way of managing API keys via environment variables. +```yaml +name: ape-project ``` -For more information on networking as a whole, see [this guide](./networks.html). - ## Networks Set default network and network providers: @@ -246,6 +290,33 @@ ethereum: For the local network configuration, the default is `"max"`. Otherwise, it is `"auto"`. +## Node + +When using the `node` provider, you can customize its settings. +For example, to change the URI for an Ethereum network, do: + +```toml +[tool.ape.node.ethereum.mainnet] +uri = "http://localhost:5030" +``` + +Or the equivalent YAML: + +```yaml +node: + ethereum: + mainnet: + uri: http://localhost:5030 +``` + +Now, the `ape-node` core plugin will use the URL `http://localhost:5030` to connect and make requests. + +```{warning} +Instead of using `ape-node` to connect to an Infura or Alchemy node, use the [ape-infura](https://github.com/ApeWorX/ape-infura) or [ape-alchemy](https://github.com/ApeWorX/ape-alchemy) provider plugins instead, which have their own way of managing API keys via environment variables. +``` + +For more information on networking as a whole, see [this guide](./networks.html). + ## Plugins Set which `ape` plugins you want to always use. diff --git a/pyproject.toml b/pyproject.toml index 2c8e213a9b..11702954e0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,6 +34,7 @@ norecursedirs = "projects" # And 'pytest_ethereum' is not used and causes issues in some environments. addopts = """ -p no:pytest_ethereum +-p no:boa_test """ python_files = "test_*.py" diff --git a/setup.py b/setup.py index e4ddb91fb2..3e010e8722 100644 --- a/setup.py +++ b/setup.py @@ -49,8 +49,7 @@ "twine==3.8.0", # Package upload tool ], "dev": [ - # commitizen: Manage commits and publishing releases - (_HERE / "cz-requirement.txt").read_text().strip(), + "commitizen>=2.40,<2.41", # Semantic commit linting "pre-commit", # Ensure that linters are run prior to committing "pytest-watch", # `ptw` test watcher/runner "ipdb", # Debugger (Must use `export PYTHONBREAKPOINT=ipdb.set_trace`) diff --git a/src/ape/api/address.py b/src/ape/api/address.py index 7ab52a81c3..5aa9432e49 100644 --- a/src/ape/api/address.py +++ b/src/ape/api/address.py @@ -150,9 +150,8 @@ def code(self) -> "ContractCode": """ The raw bytes of the smart-contract code at the address. """ - - # TODO: Explore caching this (based on `self.provider.network` and examining code) - return self.provider.get_code(self.address) + # NOTE: Chain manager handles code caching. + return self.chain_manager.get_code(self.address) @property def codesize(self) -> int: diff --git a/src/ape/api/config.py b/src/ape/api/config.py index 141b226d02..7ae7640140 100644 --- a/src/ape/api/config.py +++ b/src/ape/api/config.py @@ -66,7 +66,7 @@ class PluginConfig(BaseSettings): a config API must register a subclass of this class. """ - model_config = SettingsConfigDict(extra="allow") + model_config = SettingsConfigDict(extra="allow", env_prefix="APE_") @classmethod def from_overrides( @@ -285,7 +285,7 @@ class ApeConfig(ExtraAttributesMixin, BaseSettings, ManagerAccessMixin): def __init__(self, *args, **kwargs): project_path = kwargs.get("project") - super(BaseSettings, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) # NOTE: Cannot reference `self` at all until after super init. self._project_path = project_path @@ -350,7 +350,7 @@ def __init__(self, *args, **kwargs): """ # NOTE: Plugin configs are technically "extras". - model_config = SettingsConfigDict(extra="allow") + model_config = SettingsConfigDict(extra="allow", env_prefix="APE_") @model_validator(mode="before") @classmethod diff --git a/src/ape/api/networks.py b/src/ape/api/networks.py index 6855cbce88..952a0f19e6 100644 --- a/src/ape/api/networks.py +++ b/src/ape/api/networks.py @@ -153,7 +153,6 @@ def custom_network(self) -> "NetworkAPI": A :class:`~ape.api.networks.NetworkAPI` for custom networks where the network is either not known, unspecified, or does not have an Ape plugin. """ - ethereum_class = None for plugin_name, ecosystem_class in self.plugin_manager.ecosystems: if plugin_name == "ethereum": @@ -1005,7 +1004,6 @@ def chain_id(self) -> int: **NOTE**: Unless overridden, returns same as :py:attr:`ape.api.providers.ProviderAPI.chain_id`. """ - return self.provider.chain_id @property @@ -1131,6 +1129,13 @@ def is_adhoc(self) -> bool: """ return self.name == "custom" and not self._is_custom + @property + def is_custom(self) -> bool: + """ + True when this network is a configured custom network. + """ + return self._is_custom + @cached_property def providers(self): # -> dict[str, Partial[ProviderAPI]] """ @@ -1162,6 +1167,8 @@ def providers(self): # -> dict[str, Partial[ProviderAPI]] (self.is_fork and "Fork" in provider_class.__name__) or (not self.is_fork and "Fork" not in provider_class.__name__) ) + and provider_class.__name__ + == "Node" # Ensure uses Node class instead of GethDev ) ): # NOTE: Lazily load provider config diff --git a/src/ape/exceptions.py b/src/ape/exceptions.py index 7ac9d177b7..262a214d3f 100644 --- a/src/ape/exceptions.py +++ b/src/ape/exceptions.py @@ -303,7 +303,7 @@ def __init__( source_traceback: _SOURCE_TRACEBACK_ARG = None, base_err: Optional[Exception] = None, project: Optional["ProjectManager"] = None, - set_ape_traceback: bool = True, # Overriden default. + set_ape_traceback: bool = True, # Overridden default. ): self.txn = txn self.contract_address = contract_address diff --git a/src/ape/managers/_contractscache.py b/src/ape/managers/_contractscache.py index fe5677bc3e..d83a484250 100644 --- a/src/ape/managers/_contractscache.py +++ b/src/ape/managers/_contractscache.py @@ -270,13 +270,22 @@ def _delete_proxy(self, address: AddressType): def __contains__(self, address: AddressType) -> bool: return self.get(address) is not None - def cache_deployment(self, contract_instance: ContractInstance): + def cache_deployment( + self, + contract_instance: ContractInstance, + proxy_info: Optional[ProxyInfoAPI] = None, + detect_proxy: bool = True, + ): """ Cache the given contract instance's type and deployment information. Args: contract_instance (:class:`~ape.contracts.base.ContractInstance`): The contract to cache. + proxy_info (Optional[ProxyInfoAPI]): Pass in the proxy info, if it is known, to + avoid the potentially expensive look-up. + detect_proxy (bool): Set to ``False`` to avoid detecting if the contract is a + proxy. """ address = contract_instance.address contract_type = contract_instance.contract_type # may be a proxy @@ -285,24 +294,22 @@ def cache_deployment(self, contract_instance: ContractInstance): # in case it is needed somewhere. It may get overridden. self.contract_types.memory[address] = contract_type - if proxy_info := self.provider.network.ecosystem.get_proxy_info(address): - # The user is caching a deployment of a proxy with the target already set. - self.cache_proxy_info(address, proxy_info) - if implementation_contract := self.get(proxy_info.target): - updated_proxy_contract = _get_combined_contract_type( - contract_type, proxy_info, implementation_contract - ) - self.contract_types[address] = updated_proxy_contract + if proxy_info: + # Was given proxy info. + self._cache_proxy_contract(address, proxy_info, contract_type, contract_instance) - # Use this contract type in the user's contract instance. - contract_instance.contract_type = updated_proxy_contract + elif detect_proxy: + # Proxy info was not provided. Use the connected ecosystem to figure it out. + if proxy_info := self.provider.network.ecosystem.get_proxy_info(address): + # The user is caching a deployment of a proxy with the target already set. + self._cache_proxy_contract(address, proxy_info, contract_type, contract_instance) else: - # No implementation yet. Just cache proxy. + # Cache as normal. self.contract_types[address] = contract_type else: - # Regular contract. Cache normally. + # Cache as normal; do not do expensive proxy detection. self.contract_types[address] = contract_type # Cache the deployment now. @@ -312,6 +319,26 @@ def cache_deployment(self, contract_instance: ContractInstance): return contract_type + def _cache_proxy_contract( + self, + address: AddressType, + proxy_info: ProxyInfoAPI, + contract_type: ContractType, + contract_instance: ContractInstance, + ): + self.cache_proxy_info(address, proxy_info) + if implementation_contract := self.get(proxy_info.target): + updated_proxy_contract = _get_combined_contract_type( + contract_type, proxy_info, implementation_contract + ) + self.contract_types[address] = updated_proxy_contract + + # Use this contract type in the user's contract instance. + contract_instance.contract_type = updated_proxy_contract + else: + # No implementation yet. Just cache proxy. + self.contract_types[address] = contract_type + def cache_proxy_info(self, address: AddressType, proxy_info: ProxyInfoAPI): """ Cache proxy info for a particular address, useful for plugins adding already @@ -492,6 +519,8 @@ def get( address: AddressType, default: Optional[ContractType] = None, fetch_from_explorer: bool = True, + proxy_info: Optional[ProxyInfoAPI] = None, + detect_proxy: bool = True, ) -> Optional[ContractType]: """ Get a contract type by address. @@ -506,6 +535,9 @@ def get( fetch_from_explorer (bool): Set to ``False`` to avoid fetching from an explorer. Defaults to ``True``. Only fetches if it needs to (uses disk & memory caching otherwise). + proxy_info (Optional[ProxyInfoAPI]): Pass in the proxy info, if it is known, + to avoid the potentially expensive look-up. + detect_proxy (bool): Set to ``False`` to avoid detecting if it is a proxy. Returns: Optional[ContractType]: The contract type if it was able to get one, @@ -531,13 +563,14 @@ def get( else: # Contract is not cached yet. Check broader sources, such as an explorer. - # First, detect if this is a proxy. - if not (proxy_info := self.proxy_infos[address_key]): - if proxy_info := self.provider.network.ecosystem.get_proxy_info(address_key): - self.proxy_infos[address_key] = proxy_info + if not proxy_info and detect_proxy: + # Proxy info not provided. Attempt to detect. + if not (proxy_info := self.proxy_infos[address_key]): + if proxy_info := self.provider.network.ecosystem.get_proxy_info(address_key): + self.proxy_infos[address_key] = proxy_info if proxy_info: - # Contract is a proxy. + # Contract is a proxy (either was detected or provided). implementation_contract_type = self.get(proxy_info.target, default=default) proxy_contract_type = ( self._get_contract_type_from_explorer(address_key) @@ -554,12 +587,6 @@ def get( self.contract_types[address_key] = contract_type_to_cache return contract_type_to_cache - if not self.provider.get_code(address_key): - if default: - self.contract_types[address_key] = default - - return default - # Also gets cached to disk for faster lookup next time. if fetch_from_explorer: contract_type = self._get_contract_type_from_explorer(address_key) @@ -594,6 +621,8 @@ def instance_at( txn_hash: Optional[Union[str, "HexBytes"]] = None, abi: Optional[Union[list[ABI], dict, str, Path]] = None, fetch_from_explorer: bool = True, + proxy_info: Optional[ProxyInfoAPI] = None, + detect_proxy: bool = True, ) -> ContractInstance: """ Get a contract at the given address. If the contract type of the contract is known, @@ -618,6 +647,9 @@ def instance_at( fetch_from_explorer (bool): Set to ``False`` to avoid fetching from the explorer. Defaults to ``True``. Won't fetch unless it needs to (uses disk & memory caching first). + proxy_info (Optional[ProxyInfoAPI]): Pass in the proxy info, if it is known, to avoid + the potentially expensive look-up. + detect_proxy (bool): Set to ``False`` to avoid detecting if the contract is a proxy. Returns: :class:`~ape.contracts.base.ContractInstance` @@ -640,7 +672,11 @@ def instance_at( try: # Always attempt to get an existing contract type to update caches contract_type = self.get( - contract_address, default=contract_type, fetch_from_explorer=fetch_from_explorer + contract_address, + default=contract_type, + fetch_from_explorer=fetch_from_explorer, + proxy_info=proxy_info, + detect_proxy=detect_proxy, ) except Exception as err: if contract_type or abi: diff --git a/src/ape/managers/accounts.py b/src/ape/managers/accounts.py index e45939204e..69ebc31906 100644 --- a/src/ape/managers/accounts.py +++ b/src/ape/managers/accounts.py @@ -41,8 +41,7 @@ class TestAccountManager(list, ManagerAccessMixin): @log_instead_of_fail(default="") def __repr__(self) -> str: - accounts_str = ", ".join([a.address for a in self.accounts]) - return f"[{accounts_str}]" + return f"" @cached_property def containers(self) -> dict[str, TestAccountContainerAPI]: @@ -54,6 +53,13 @@ def containers(self) -> dict[str, TestAccountContainerAPI]: for plugin_name, (container_type, account_type) in account_types } + @property + def hd_path(self) -> str: + """ + The HD path used for generating the test accounts. + """ + return self.config_manager.get_config("test").hd_path + @property def accounts(self) -> Iterator[AccountAPI]: for container in self.containers.values(): @@ -76,15 +82,14 @@ def __getitem__(self, account_id): @__getitem__.register def __getitem_int(self, account_id: int): - if account_id in self._accounts_by_index: - return self._accounts_by_index[account_id] - - original_account_id = account_id if account_id < 0: account_id = len(self) + account_id + if account_id in self._accounts_by_index: + return self._accounts_by_index[account_id] + account = self.containers["test"].get_test_account(account_id) - self._accounts_by_index[original_account_id] = account + self._accounts_by_index[account_id] = account return account @__getitem__.register @@ -265,7 +270,7 @@ def __iter__(self) -> Iterator[AccountAPI]: @log_instead_of_fail(default="") def __repr__(self) -> str: - return "[" + ", ".join(repr(a) for a in self) + "]" + return "" @cached_property def test_accounts(self) -> TestAccountManager: diff --git a/src/ape/managers/chain.py b/src/ape/managers/chain.py index 0a1866a4b6..c4e640554e 100644 --- a/src/ape/managers/chain.py +++ b/src/ape/managers/chain.py @@ -37,8 +37,7 @@ if TYPE_CHECKING: from rich.console import Console as RichConsole - from ape.types.trace import GasReport, SourceTraceback - from ape.types.vm import SnapshotID + from ape.types import BlockID, ContractCode, GasReport, SnapshotID, SourceTraceback class BlockContainer(BaseManager): @@ -703,7 +702,7 @@ def _get_console(self, *args, **kwargs): class ChainManager(BaseManager): """ A class for managing the state of the active blockchain. - Also handy for querying data about the chain and managing local caches. + Also, handy for querying data about the chain and managing local caches. Access the chain manager singleton from the root ``ape`` namespace. Usage example:: @@ -716,6 +715,7 @@ class ChainManager(BaseManager): _block_container_map: dict[int, BlockContainer] = {} _transaction_history_map: dict[int, TransactionHistory] = {} _reports: ReportManager = ReportManager() + _code: dict[str, dict[str, dict[AddressType, "ContractCode"]]] = {} @cached_property def contracts(self) -> ContractCache: @@ -757,7 +757,6 @@ def chain_id(self) -> int: The blockchain ID. See `ChainList `__ for a comprehensive list of IDs. """ - network_name = self.provider.network.name if network_name not in self._chain_id_map: self._chain_id_map[network_name] = self.provider.chain_id @@ -966,3 +965,26 @@ def get_receipt(self, transaction_hash: str) -> ReceiptAPI: raise TransactionNotFoundError(transaction_hash=transaction_hash) return receipt + + def get_code( + self, address: AddressType, block_id: Optional["BlockID"] = None + ) -> "ContractCode": + network = self.provider.network + + # Two reasons to avoid caching: + # 1. dev networks - chain isolation makes this mess up + # 2. specifying block_id= kwarg - likely checking if code + # exists at the time and shouldn't use cache. + skip_cache = network.is_dev or block_id is not None + if skip_cache: + return self.provider.get_code(address, block_id=block_id) + + self._code.setdefault(network.ecosystem.name, {}) + self._code[network.ecosystem.name].setdefault(network.name, {}) + if address in self._code[network.ecosystem.name][network.name]: + return self._code[network.ecosystem.name][network.name][address] + + # Get from RPC for the first time AND use cache. + code = self.provider.get_code(address) + self._code[network.ecosystem.name][network.name][address] = code + return code diff --git a/src/ape/managers/compilers.py b/src/ape/managers/compilers.py index b37a2cefec..38725c674c 100644 --- a/src/ape/managers/compilers.py +++ b/src/ape/managers/compilers.py @@ -300,7 +300,7 @@ def get_custom_error(self, err: ContractLogicError) -> Optional[CustomError]: """ Get a custom error for the given contract logic error using the contract-type found from address-data in the error. Returns ``None`` if the given error is - not a custom-error or it is not able to find the associated contract type or + not a custom-error, or it is not able to find the associated contract type or address. Args: diff --git a/src/ape/managers/networks.py b/src/ape/managers/networks.py index 29322ca24b..b7153be94f 100644 --- a/src/ape/managers/networks.py +++ b/src/ape/managers/networks.py @@ -557,7 +557,7 @@ def get_provider_from_choice( default_network = self.default_ecosystem.default_network return default_network.get_provider(provider_settings=provider_settings) - elif _is_custom_network(network_choice): + elif _is_adhoc_url(network_choice): # Custom network w/o ecosystem & network spec. return self.create_custom_provider(network_choice) @@ -568,7 +568,7 @@ def get_provider_from_choice( provider_value = ":".join(selections[2:]) selections[2] = provider_value selections = selections[:3] - if _is_custom_network(provider_value): + if _is_adhoc_url(provider_value): selections[1] = selections[1] or "custom" if selections == network_choice or len(selections) == 1: @@ -754,7 +754,7 @@ def _validate_filter(arg: Optional[Union[list[str], str]], options: set[str]): return filters -def _is_custom_network(value: str) -> bool: +def _is_adhoc_url(value: str) -> bool: return ( value.startswith("http://") or value.startswith("https://") diff --git a/src/ape/types/coverage.py b/src/ape/types/coverage.py index 1179fe7758..631fea315e 100644 --- a/src/ape/types/coverage.py +++ b/src/ape/types/coverage.py @@ -1040,7 +1040,7 @@ def prettify(self, html_str: str) -> str: This is a custom method not part of the HTMLParser spec that ingests a coverage HTML str, handles the formatting, returns it, and resets this formatter's instance, so that the operation - is more functionable. + is more functional. """ self.feed(html_str) result = self.prettified_html diff --git a/src/ape/utils/misc.py b/src/ape/utils/misc.py index b369f7b1c5..695ce513a5 100644 --- a/src/ape/utils/misc.py +++ b/src/ape/utils/misc.py @@ -218,7 +218,14 @@ def load_config(path: Path, expand_envars=True, must_exist=False) -> dict: contents = expand_environment_variables(contents) if path.name == "pyproject.toml": - config = tomllib.loads(contents).get("tool", {}).get("ape", {}) + pyproject_toml = tomllib.loads(contents) + config = pyproject_toml.get("tool", {}).get("ape", {}) + + # Utilize [project] for some settings. + if project_settings := pyproject_toml.get("project"): + if "name" not in config and "name" in project_settings: + config["name"] = project_settings["name"] + elif path.suffix in (".json",): config = json.loads(contents) elif path.suffix in (".yml", ".yaml"): diff --git a/src/ape/utils/os.py b/src/ape/utils/os.py index 6a3a4213df..07419df681 100644 --- a/src/ape/utils/os.py +++ b/src/ape/utils/os.py @@ -1,6 +1,7 @@ import json import os import re +import stat import sys import tarfile import zipfile @@ -372,6 +373,14 @@ def extract_archive(archive_file: Path, destination: Optional[Path] = None): raise ValueError(f"Unsupported zip format: '{archive_file.suffix}'.") +def _remove_readonly(func, path, excinfo): + """ + Error handler for shutil.rmtree that handles removing read-only files. + """ + os.chmod(path, stat.S_IWRITE) + func(path) + + class CacheDirectory: """ A directory for caching data where each data item is named diff --git a/src/ape_cache/config.py b/src/ape_cache/config.py index 264516b738..dc45f7f1a3 100644 --- a/src/ape_cache/config.py +++ b/src/ape_cache/config.py @@ -1,5 +1,8 @@ +from pydantic_settings import SettingsConfigDict + from ape.api.config import PluginConfig class CacheConfig(PluginConfig): size: int = 1024**3 # 1gb + model_config = SettingsConfigDict(extra="allow", env_prefix="APE_CACHE_") diff --git a/src/ape_compile/config.py b/src/ape_compile/config.py index 012e590b04..ef0c749f5f 100644 --- a/src/ape_compile/config.py +++ b/src/ape_compile/config.py @@ -3,6 +3,7 @@ from typing import Union from pydantic import field_serializer, field_validator +from pydantic_settings import SettingsConfigDict from ape.api.config import ConfigEnum, PluginConfig from ape.utils.misc import SOURCE_EXCLUDE_PATTERNS @@ -53,6 +54,8 @@ class Config(PluginConfig): Extra selections to output. Outputs to ``.build/{key.lower()}``. """ + model_config = SettingsConfigDict(extra="allow", env_prefix="APE_COMPILE_") + @field_validator("exclude", mode="before") @classmethod def validate_exclude(cls, value): diff --git a/src/ape_console/config.py b/src/ape_console/config.py index 48b432ced2..9066867714 100644 --- a/src/ape_console/config.py +++ b/src/ape_console/config.py @@ -1,6 +1,10 @@ +from pydantic_settings import SettingsConfigDict + from ape.api.config import PluginConfig class ConsoleConfig(PluginConfig): plugins: list[str] = [] """Additional IPython plugins to include in your session.""" + + model_config = SettingsConfigDict(extra="allow", env_prefix="APE_CONSOLE_") diff --git a/src/ape_ethereum/ecosystem.py b/src/ape_ethereum/ecosystem.py index 94aff193be..61efac483a 100644 --- a/src/ape_ethereum/ecosystem.py +++ b/src/ape_ethereum/ecosystem.py @@ -157,6 +157,8 @@ class NetworkConfig(PluginConfig): request_headers: dict = {} """Optionally config extra request headers whenever using this network.""" + model_config = SettingsConfigDict(extra="allow", env_prefix="APE_ETHEREUM_") + @field_validator("gas_limit", mode="before") @classmethod def validate_gas_limit(cls, value): @@ -233,7 +235,7 @@ class BaseEthereumConfig(PluginConfig): # NOTE: This gets appended to Ape's root User-Agent string. request_headers: dict = {} - model_config = SettingsConfigDict(extra="allow") + model_config = SettingsConfigDict(extra="allow", env_prefix="APE_ETHEREUM_") @model_validator(mode="before") @classmethod @@ -458,7 +460,7 @@ def encode_contract_blueprint( ) def get_proxy_info(self, address: AddressType) -> Optional[ProxyInfo]: - contract_code = self.provider.get_code(address) + contract_code = self.chain_manager.get_code(address) if isinstance(contract_code, bytes): contract_code = to_hex(contract_code) @@ -1150,12 +1152,15 @@ def _enrich_calltree(self, call: dict, **kwargs) -> dict: except KeyError: name = call["method_id"] else: - assert isinstance(method_abi, MethodABI) # For mypy - - # Check if method name duplicated. If that is the case, use selector. - times = len([x for x in contract_type.methods if x.name == method_abi.name]) - name = (method_abi.name if times == 1 else method_abi.selector) or call["method_id"] - call = self._enrich_calldata(call, method_abi, **kwargs) + if isinstance(method_abi, MethodABI): + # Check if method name duplicated. If that is the case, use selector. + times = len([x for x in contract_type.methods if x.name == method_abi.name]) + name = (method_abi.name if times == 1 else method_abi.selector) or call[ + "method_id" + ] + call = self._enrich_calldata(call, method_abi, **kwargs) + else: + name = call.get("method_id") or "0x" else: name = call.get("method_id") or "0x" diff --git a/src/ape_ethereum/provider.py b/src/ape_ethereum/provider.py index a5989084ad..da4a919805 100644 --- a/src/ape_ethereum/provider.py +++ b/src/ape_ethereum/provider.py @@ -572,13 +572,13 @@ def estimate_gas_cost(self, txn: TransactionAPI, block_id: Optional["BlockID"] = @cached_property def chain_id(self) -> int: default_chain_id = None - if self.network.name != "custom" and not self.network.is_dev: + if (not self.network.is_adhoc and self.network.is_custom) or not self.network.is_dev: # If using a live network, the chain ID is hardcoded. default_chain_id = self.network.chain_id try: if hasattr(self.web3, "eth"): - return self.web3.eth.chain_id + return self._get_chain_id() except ProviderNotConnectedError: if default_chain_id is not None: @@ -586,6 +586,14 @@ def chain_id(self) -> int: raise # Original error + except ValueError as err: + # Possible syncing error. + raise ProviderError( + err.args[0].get("message") + if all((hasattr(err, "args"), err.args, isinstance(err.args[0], dict))) + else "Error getting chain ID." + ) + if default_chain_id is not None: return default_chain_id @@ -606,6 +614,10 @@ def priority_fee(self) -> int: "eth_maxPriorityFeePerGas not supported in this RPC. Please specify manually." ) from err + def _get_chain_id(self) -> int: + result = self.make_request("eth_chainId", []) + return result if isinstance(result, int) else int(result, 16) + def get_block(self, block_id: "BlockID") -> BlockAPI: if isinstance(block_id, str) and block_id.isnumeric(): block_id = int(block_id) @@ -1603,15 +1615,7 @@ def _complete_connect(self): if not self.network.is_dev: self.web3.eth.set_gas_price_strategy(rpc_gas_price_strategy) - # Check for chain errors, including syncing - try: - chain_id = self.web3.eth.chain_id - except ValueError as err: - raise ProviderError( - err.args[0].get("message") - if all((hasattr(err, "args"), err.args, isinstance(err.args[0], dict))) - else "Error getting chain id." - ) + chain_id = self.chain_id # NOTE: We have to check both earliest and latest # because if the chain was _ever_ PoA, we need @@ -1627,7 +1631,7 @@ def _complete_connect(self): except Exception: # Some chains are "light" and we may not be able to detect - # if it need PoA middleware. + # if it needs PoA middleware. continue else: diff --git a/src/ape_ethereum/query.py b/src/ape_ethereum/query.py index 0cece4e740..7dd8a4f9ca 100644 --- a/src/ape_ethereum/query.py +++ b/src/ape_ethereum/query.py @@ -39,7 +39,7 @@ def perform_contract_creation_query( Find when a contract was deployed using binary search and block tracing. """ # skip the search if there is still no code at address at head - if not self.provider.get_code(query.contract): + if not self.chain_manager.get_code(query.contract): return None def find_creation_block(lo, hi): @@ -47,13 +47,13 @@ def find_creation_block(lo, hi): # takes log2(height), doesn't work with contracts that have been reinit. while hi - lo > 1: mid = (lo + hi) // 2 - code = self.provider.get_code(query.contract, block_id=mid) + code = self.chain_manager.get_code(query.contract, block_id=mid) if not code: lo = mid else: hi = mid - if self.provider.get_code(query.contract, block_id=hi): + if self.chain_manager.get_code(query.contract, block_id=hi): return hi return None diff --git a/src/ape_networks/config.py b/src/ape_networks/config.py index 381cd268b2..a519ed64fe 100644 --- a/src/ape_networks/config.py +++ b/src/ape_networks/config.py @@ -1,5 +1,7 @@ from typing import Optional +from pydantic_settings import SettingsConfigDict + from ape.api.config import PluginConfig @@ -26,6 +28,8 @@ class CustomNetwork(PluginConfig): request_header: dict = {} """The HTTP request header.""" + model_config = SettingsConfigDict(extra="allow", env_prefix="APE_NETWORKS_") + @property def is_fork(self) -> bool: """ @@ -36,3 +40,4 @@ def is_fork(self) -> bool: class NetworksConfig(PluginConfig): custom: list[CustomNetwork] = [] + model_config = SettingsConfigDict(extra="allow", env_prefix="APE_NETWORKS_") diff --git a/src/ape_node/provider.py b/src/ape_node/provider.py index f8af5eeb08..579306de8f 100644 --- a/src/ape_node/provider.py +++ b/src/ape_node/provider.py @@ -297,7 +297,7 @@ class EthereumNetworkConfig(PluginConfig): # Make sure to run via `geth --dev` (or similar) local: dict = {**DEFAULT_SETTINGS.copy(), "chain_id": DEFAULT_TEST_CHAIN_ID} - model_config = SettingsConfigDict(extra="allow") + model_config = SettingsConfigDict(extra="allow", env_prefix="APE_NODE_") @field_validator("local", mode="before") @classmethod @@ -357,7 +357,7 @@ class EthereumNodeConfig(PluginConfig): Optionally specify request headers to use whenever using this provider. """ - model_config = SettingsConfigDict(extra="allow") + model_config = SettingsConfigDict(extra="allow", env_prefix="APE_NODE_") @field_validator("call_trace_approach", mode="before") @classmethod diff --git a/src/ape_pm/dependency.py b/src/ape_pm/dependency.py index 6e0828c90f..657a6441c0 100644 --- a/src/ape_pm/dependency.py +++ b/src/ape_pm/dependency.py @@ -16,7 +16,7 @@ from ape.managers.project import _version_to_options from ape.utils._github import _GithubClient, github_client from ape.utils.basemodel import ManagerAccessMixin -from ape.utils.os import clean_path, extract_archive, get_package_path, in_tempdir +from ape.utils.os import _remove_readonly, clean_path, extract_archive, get_package_path, in_tempdir def _fetch_local(src: Path, destination: Path, config_override: Optional[dict] = None): @@ -204,8 +204,12 @@ def __repr__(self) -> str: def fetch(self, destination: Path): destination.parent.mkdir(exist_ok=True, parents=True) if ref := self.ref: + # NOTE: destination path should not exist at this point, + # so delete it in case it's left over from a failure. + if destination.is_dir(): + shutil.rmtree(destination) + # Fetch using git-clone approach (by git-reference). - # NOTE: destination path does not exist at this point. self._fetch_ref(ref, destination) else: # Fetch using Version API from GitHub. @@ -222,7 +226,8 @@ def fetch(self, destination: Path): # NOTE: When using ref-from-a-version, ensure # it didn't create the destination along the way; # else, the ref is cloned in the wrong spot. - shutil.rmtree(destination, ignore_errors=True) + if destination.is_dir(): + shutil.rmtree(destination, onerror=_remove_readonly) try: self._fetch_ref(version, destination) except Exception: diff --git a/src/ape_test/accounts.py b/src/ape_test/accounts.py index a0abea03a5..6f41dbe615 100644 --- a/src/ape_test/accounts.py +++ b/src/ape_test/accounts.py @@ -14,6 +14,7 @@ from ape.exceptions import ProviderNotConnectedError, SignatureError from ape.types.signatures import MessageSignature, TransactionSignature from ape.utils._web3_compat import sign_hash +from ape.utils.misc import log_instead_of_fail from ape.utils.testing import ( DEFAULT_NUMBER_OF_TEST_ACCOUNTS, DEFAULT_TEST_HD_PATH, @@ -81,7 +82,12 @@ def generate_account(self, index: Optional[int] = None) -> "TestAccountAPI": account = self.init_test_account( new_index, generated_account.address, generated_account.private_key ) - self.generated_accounts.append(account) + + # Only cache if being created outside the expected number of accounts. + # Else, ends up cached twice and caused logic problems elsewhere. + if new_index >= self.number_of_accounts: + self.generated_accounts.append(account) + return account @classmethod @@ -113,6 +119,10 @@ def alias(self) -> str: def address(self) -> "AddressType": return self.network_manager.ethereum.decode_address(self.address_str) + @log_instead_of_fail(default="") + def __repr__(self) -> str: + return f"<{self.__class__.__name__}_{self.index} {self.address_str}>" + def sign_message(self, msg: Any, **signer_options) -> Optional[MessageSignature]: # Convert str and int to SignableMessage if needed if isinstance(msg, str): diff --git a/src/ape_test/config.py b/src/ape_test/config.py index 3ce2609dfe..adb2541a6f 100644 --- a/src/ape_test/config.py +++ b/src/ape_test/config.py @@ -1,6 +1,7 @@ from typing import TYPE_CHECKING, NewType, Optional, Union from pydantic import NonNegativeInt, field_validator +from pydantic_settings import SettingsConfigDict from ape.api.config import PluginConfig from ape.utils.basemodel import ManagerAccessMixin @@ -19,11 +20,13 @@ class EthTesterProviderConfig(PluginConfig): chain_id: int = DEFAULT_TEST_CHAIN_ID auto_mine: bool = True + model_config = SettingsConfigDict(extra="allow", env_prefix="APE_TEST_") class GasExclusion(PluginConfig): contract_name: str = "*" # If only given method, searches across all contracts. method_name: Optional[str] = None # By default, match all methods in a contract + model_config = SettingsConfigDict(extra="allow", env_prefix="APE_TEST_") CoverageExclusion = NewType("CoverageExclusion", GasExclusion) @@ -48,6 +51,8 @@ class GasConfig(PluginConfig): Report-types to use. Currently, only supports `terminal`. """ + model_config = SettingsConfigDict(extra="allow", env_prefix="APE_TEST_") + @field_validator("reports", mode="before") @classmethod def validate_reports(cls, values): @@ -89,6 +94,8 @@ class CoverageReportsConfig(PluginConfig): Set to ``True`` to generate HTML coverage reports. """ + model_config = SettingsConfigDict(extra="allow", env_prefix="APE_TEST_") + @property def has_any(self) -> bool: return any(x not in ({}, None, False) for x in (self.html, self.terminal, self.xml)) @@ -119,6 +126,8 @@ class CoverageConfig(PluginConfig): use ``prefix_*`` to skip all items with a certain prefix. """ + model_config = SettingsConfigDict(extra="allow", env_prefix="APE_TEST_") + class IsolationConfig(PluginConfig): enable_session: bool = True @@ -146,6 +155,8 @@ class IsolationConfig(PluginConfig): Set to ``False`` to disable function isolation. """ + model_config = SettingsConfigDict(extra="allow", env_prefix="APE_TEST_") + def get_isolation(self, scope: "Scope") -> bool: return getattr(self, f"enable_{scope.name.lower()}") @@ -209,6 +220,8 @@ class ApeTestConfig(PluginConfig): ``False`` to disable all and ``True`` (default) to disable all. """ + model_config = SettingsConfigDict(extra="allow", env_prefix="APE_TEST_") + @field_validator("balance", mode="before") @classmethod def validate_balance(cls, value): diff --git a/tests/functional/conftest.py b/tests/functional/conftest.py index 85c013a710..c8ca706759 100644 --- a/tests/functional/conftest.py +++ b/tests/functional/conftest.py @@ -639,6 +639,7 @@ def fn(name="mock"): mock.tracked_settings = [] mock.ast = None mock.pcmap = None + mock.abi = [] def mock_compile(paths, project=None, settings=None): settings = settings or {} @@ -691,7 +692,7 @@ def create_mock_sepolia(ethereum, eth_tester_provider, vyper_contract_instance): @contextmanager def fn(): # Ensuring contract exists before hack. - # This allow the network to be past genesis which is more realistic. + # This allows the network to be past genesis which is more realistic. _ = vyper_contract_instance eth_tester_provider.network.name = "sepolia" yield eth_tester_provider.network diff --git a/tests/functional/geth/test_chain.py b/tests/functional/geth/test_chain.py new file mode 100644 index 0000000000..09bd82ab41 --- /dev/null +++ b/tests/functional/geth/test_chain.py @@ -0,0 +1,18 @@ +from tests.conftest import geth_process_test + + +@geth_process_test +def test_get_code(mocker, chain, geth_contract, mock_sepolia): + # NOTE: Using mock_sepolia because code doesn't get cached in local networks. + actual = chain.get_code(geth_contract.address) + expected = chain.provider.get_code(geth_contract.address) + assert actual == expected + + # Ensure uses cache (via not using provider). + provider_spy = mocker.spy(chain.provider.web3.eth, "get_code") + _ = chain.get_code(geth_contract.address) + assert provider_spy.call_count == 0 + + # block_id test, cache should interfere. + actual_2 = chain.get_code(geth_contract.address, block_id=0) + assert not actual_2 # Doesn't exist at block 0. diff --git a/tests/functional/geth/test_contracts_cache.py b/tests/functional/geth/test_contracts_cache.py index 92782c49fa..f95ddc0f3e 100644 --- a/tests/functional/geth/test_contracts_cache.py +++ b/tests/functional/geth/test_contracts_cache.py @@ -39,7 +39,7 @@ def get_contract_type(address, *args, **kwargs): raise ValueError("Fake explorer only knows about proxy and target contracts.") with create_mock_sepolia() as network: - # Setup our network to use our fake explorer. + # Set up our network to use our fake explorer. mock_explorer.get_contract_type.side_effect = get_contract_type network.__dict__["explorer"] = mock_explorer diff --git a/tests/functional/geth/test_provider.py b/tests/functional/geth/test_provider.py index 2af47b08ab..b2f32b8a57 100644 --- a/tests/functional/geth/test_provider.py +++ b/tests/functional/geth/test_provider.py @@ -240,7 +240,14 @@ def test_connect_to_chain_that_started_poa(mock_web3, web3_factory, ethereum): to fetch blocks during the PoA portion of the chain. """ mock_web3.eth.get_block.side_effect = ExtraDataLengthError - mock_web3.eth.chain_id = ethereum.sepolia.chain_id + + def make_request(rpc, arguments): + if rpc == "eth_chainId": + return {"result": ethereum.sepolia.chain_id} + + return None + + mock_web3.provider.make_request.side_effect = make_request web3_factory.return_value = mock_web3 provider = ethereum.sepolia.get_provider("node") provider.provider_settings = {"uri": "http://node.example.com"} # fake diff --git a/tests/functional/test_accounts.py b/tests/functional/test_accounts.py index 9f330de27f..a66c8a02ae 100644 --- a/tests/functional/test_accounts.py +++ b/tests/functional/test_accounts.py @@ -412,12 +412,19 @@ def test_send_transaction_sets_defaults(sender, receiver): assert receipt.required_confirmations == 0 +def test_account_index_access(accounts): + account = accounts[0] + assert account.index == 0 + last_account = accounts[-1] + assert last_account.index == len(accounts) - 1 + + def test_accounts_splice_access(accounts): - a, b = accounts[:2] - assert a == accounts[0] - assert b == accounts[1] - c = accounts[-1] - assert c == accounts[len(accounts) - 1] + alice, bob = accounts[:2] + assert alice == accounts[0] + assert bob == accounts[1] + cat = accounts[-1] + assert cat == accounts[len(accounts) - 1] expected = (len(accounts) // 2) if len(accounts) % 2 == 0 else (len(accounts) // 2 + 1) assert len(accounts[::2]) == expected @@ -612,9 +619,9 @@ def test_custom_num_of_test_accounts_config(accounts, project): assert len(accounts) == custom_number_of_test_accounts -def test_test_accounts_repr(accounts): +def test_test_accounts_repr(accounts, config): actual = repr(accounts) - assert all(a.address in actual for a in accounts) + assert config.get_config("test").hd_path in actual def test_account_comparison_to_non_account(core_account): @@ -629,11 +636,20 @@ def test_create_account(accounts): assert isinstance(created_account, TestAccount) assert created_account.index == length_at_start + length_at_start = len(accounts) second_created_account = accounts.generate_test_account() + assert len(accounts) == length_at_start + 1 assert created_account.address != second_created_account.address assert second_created_account.index == created_account.index + 1 + # Last index should now refer to the last-created account. + last_idx_acct = accounts[-1] + assert last_idx_acct.index == second_created_account.index + assert last_idx_acct.address == second_created_account.address + assert last_idx_acct.address != accounts[0].address + assert last_idx_acct.address != created_account.address + def test_dir(core_account): actual = dir(core_account) @@ -951,3 +967,14 @@ def test_get_deployment_address(owner, vyper_contract_container): assert instance_1.address == deployment_address_1 instance_2 = owner.deploy(vyper_contract_container, 490) assert instance_2.address == deployment_address_2 + + +def test_repr(account_manager): + """ + NOTE: __repr__ should be simple and fast! + Previously, we showed the repr of all the accounts. + That was a bad idea, as that can be very unnecessarily slow. + Hence, this test exists to ensure care is taken. + """ + actual = repr(account_manager) + assert actual == "" diff --git a/tests/functional/test_chain.py b/tests/functional/test_chain.py index 95c8ca2b48..cdac3be1c4 100644 --- a/tests/functional/test_chain.py +++ b/tests/functional/test_chain.py @@ -153,6 +153,8 @@ def test_set_pending_timestamp(chain): assert new_timestamp - start_timestamp == 3600 +# Flakey due to x-dist. +@pytest.mark.flaky(reruns=5) def test_set_pending_timestamp_with_deltatime(chain): start_timestamp = chain.pending_timestamp chain.mine(deltatime=5) diff --git a/tests/functional/test_config.py b/tests/functional/test_config.py index ec49a34c0f..2c31d22381 100644 --- a/tests/functional/test_config.py +++ b/tests/functional/test_config.py @@ -1,7 +1,7 @@ import os import re from pathlib import Path -from typing import TYPE_CHECKING, Optional, Union +from typing import TYPE_CHECKING, Any, Callable, Optional, Union import pytest from pydantic import ValidationError @@ -11,8 +11,12 @@ from ape.exceptions import ConfigError from ape.managers.config import CONFIG_FILE_NAME, merge_configs from ape.utils.os import create_tempdir -from ape_ethereum.ecosystem import EthereumConfig, NetworkConfig -from ape_networks import CustomNetwork +from ape_cache.config import CacheConfig +from ape_compile.config import Config as CompileConfig +from ape_ethereum.ecosystem import EthereumConfig, ForkedNetworkConfig, NetworkConfig +from ape_networks.config import CustomNetwork +from ape_node.provider import EthereumNetworkConfig, EthereumNodeConfig +from ape_test.config import CoverageReportsConfig, GasConfig, GasExclusion from tests.functional.conftest import PROJECT_WITH_LONG_CONTRACTS_FOLDER if TYPE_CHECKING: @@ -129,6 +133,73 @@ def test_model_validate_path_contracts_folder(): assert cfg.contracts_folder == str(path) +def test_model_validate_handles_environment_variables(): + def run_test(cls: Callable, attr: str, name: str, value: str, expected: Any = None): + expected = expected if expected is not None else value + before: str | None = os.environ.get(name) + os.environ[name] = value + try: + instance = cls() + assert getattr(instance, attr) == expected + finally: + if before is not None: + os.environ[name] = before + else: + os.environ.pop(name, None) + + # Test different config classes. + run_test(ApeConfig, "contracts_folder", "APE_CONTRACTS_FOLDER", "3465220869b2") + run_test(CacheConfig, "size", "APE_CACHE_SIZE", "8627", 8627) + run_test( + CompileConfig, "include_dependencies", "APE_COMPILE_INCLUDE_DEPENDENCIES", "true", True + ) + run_test( + ForkedNetworkConfig, "upstream_provider", "APE_ETHEREUM_UPSTREAM_PROVIDER", "411236f13659" + ) + run_test( + NetworkConfig, "required_confirmations", "APE_ETHEREUM_REQUIRED_CONFIRMATIONS", "6498", 6498 + ) + run_test(EthereumNetworkConfig, "mainnet", "APE_NODE_MAINNET", '{"a":"b"}', {"a": "b"}) + run_test(EthereumNodeConfig, "executable", "APE_NODE_EXECUTABLE", "40613177e494") + run_test(CoverageReportsConfig, "terminal", "APE_TEST_TERMINAL", "false", False) + run_test(GasConfig, "reports", "APE_TEST_REPORTS", '["terminal"]', ["terminal"]) + run_test(GasExclusion, "method_name", "APE_TEST_METHOD_NAME", "32aa54e3c5d2") + + # Assert that union types are handled. + run_test(NetworkConfig, "gas_limit", "APE_ETHEREUM_GAS_LIMIT", "0", 0) + run_test(NetworkConfig, "gas_limit", "APE_ETHEREUM_GAS_LIMIT", "0x100", 0x100) + run_test(NetworkConfig, "gas_limit", "APE_ETHEREUM_GAS_LIMIT", "auto") + run_test(NetworkConfig, "gas_limit", "APE_ETHEREUM_GAS_LIMIT", "max") + with pytest.raises(ValidationError, match=r"Value error, Invalid gas limit"): + run_test(NetworkConfig, "gas_limit", "APE_ETHEREUM_GAS_LIMIT", "something") + + # Assert that various bool variants are parsed correctly. + for bool_val in ("0", "False", "fALSE", "FALSE"): + run_test(NetworkConfig, "is_mainnet", "APE_ETHEREUM_IS_MAINNET", bool_val, False) + + for bool_val in ("1", "True", "tRUE", "TRUE"): + run_test(NetworkConfig, "is_mainnet", "APE_ETHEREUM_IS_MAINNET", bool_val, True) + + # We expect a failure when there's a type mismatch. + with pytest.raises( + ValidationError, + match=r"Input should be a valid boolean, unable to interpret input", + ): + run_test(NetworkConfig, "is_mainnet", "APE_ETHEREUM_IS_MAINNET", "not a boolean", False) + + with pytest.raises( + ValidationError, + match=r"Input should be a valid integer, unable to parse string as an integer", + ): + run_test( + NetworkConfig, + "required_confirmations", + "APE_ETHEREUM_REQUIRED_CONFIRMATIONS", + "not a number", + 42, + ) + + @pytest.mark.parametrize( "file", ("ape-config.yml", "ape-config.yaml", "ape-config.json", "pyproject.toml") ) @@ -152,7 +223,7 @@ def test_validate_file(file): assert "Excl*.json" in actual.compile.exclude -def test_validate_file_expands_env_vars(): +def test_validate_file_expands_environment_variables(): secret = "mycontractssecretfolder" env_var_name = "APE_TEST_CONFIG_SECRET_CONTRACTS_FOLDER" os.environ[env_var_name] = secret @@ -196,6 +267,16 @@ def test_validate_file_shows_linenos_handles_lists(): assert "-->4" in str(err.value) +def test_validate_file_uses_project_name(): + name = "apexampledapp" + with create_tempdir() as temp_dir: + file = temp_dir / "pyproject.toml" + content = f'[project]\nname = "{name}"\n' + file.write_text(content) + cfg = ApeConfig.validate_file(file) + assert cfg.name == name + + def test_deployments(networks_connected_to_tester, owner, vyper_contract_container, project): _ = networks_connected_to_tester # Connection needs to lookup config. diff --git a/tests/functional/test_contracts_cache.py b/tests/functional/test_contracts_cache.py index 3b1c01a4a7..0342080322 100644 --- a/tests/functional/test_contracts_cache.py +++ b/tests/functional/test_contracts_cache.py @@ -120,6 +120,45 @@ def test_instance_at_use_abi(chain, solidity_fallback_contract, owner): assert instance2.contract_type.abi == instance.contract_type.abi +def test_instance_at_provide_proxy(mocker, chain, vyper_contract_instance, owner): + address = vyper_contract_instance.address + container = _make_minimal_proxy(address=address.lower()) + proxy = container.deploy(sender=owner) + proxy_info = chain.contracts.proxy_infos[proxy.address] + + del chain.contracts[proxy.address] + + proxy_detection_spy = mocker.spy(chain.contracts.proxy_infos, "get_type") + + with pytest.raises(ContractNotFoundError): + # This just fails because we deleted it from the cache so Ape no + # longer knows what the contract type is. That is fine for this test! + chain.contracts.instance_at(proxy.address, proxy_info=proxy_info) + + # The real test: we check the spy to ensure we never attempted to look up + # the proxy info for the given address to `instance_at()`. + for call in proxy_detection_spy.call_args_list: + for arg in call[0]: + assert proxy.address != arg + + +def test_instance_at_skip_proxy(mocker, chain, vyper_contract_instance, owner): + address = vyper_contract_instance.address + del chain.contracts[address] + proxy_detection_spy = mocker.spy(chain.contracts.proxy_infos, "get_type") + + with pytest.raises(ContractNotFoundError): + # This just fails because we deleted it from the cache so Ape no + # longer knows what the contract type is. That is fine for this test! + chain.contracts.instance_at(address, detect_proxy=False) + + # The real test: we check the spy to ensure we never attempted to look up + # the proxy info for the given address to `instance_at()`. + for call in proxy_detection_spy.call_args_list: + for arg in call[0]: + assert address != arg + + def test_cache_deployment_live_network( chain, vyper_contract_instance, diff --git a/tests/functional/test_coverage.py b/tests/functional/test_coverage.py index ce9ae2a35d..63aa237b26 100644 --- a/tests/functional/test_coverage.py +++ b/tests/functional/test_coverage.py @@ -255,7 +255,7 @@ def init_profile(source_cov, src): try: # Hack in our mock compiler. - _ = compilers.registered_compilers # Ensure cache is exists. + _ = compilers.registered_compilers # Ensure cache exists. compilers.__dict__["registered_compilers"][mock_compiler.ext] = mock_compiler # Ensure our coverage tracker is using our new tmp project w/ the new src diff --git a/tests/functional/test_dependencies.py b/tests/functional/test_dependencies.py index a89a7451e9..58f7987ceb 100644 --- a/tests/functional/test_dependencies.py +++ b/tests/functional/test_dependencies.py @@ -1,4 +1,5 @@ import json +import os import shutil from pathlib import Path @@ -611,6 +612,24 @@ def test_fetch_ref(self, mock_client): "ApeWorX", "ApeNotAThing", path, branch="3.0.0" ) + def test_fetch_existing_destination_with_read_only_files(self, mock_client): + """ + Show it handles when the destination contains read-only files already + """ + dependency = GithubDependency(github="ApeWorX/ApeNotAThing", ref="3.0.0", name="apetestdep") + dependency._github_client = mock_client + + with create_tempdir() as path: + readonly_file = path / "readme.txt" + readonly_file.write_text("readme!") + + # NOTE: This only makes a difference on Windows. If using a UNIX system, + # rmtree still deletes readonly files regardless. Windows is more restrictive. + os.chmod(readonly_file, 0o444) # Read-only permissions + + dependency.fetch(path) + assert not readonly_file.is_file() + class TestPythonDependency: @pytest.fixture(scope="class", params=("site_package", "python", "pypi")) diff --git a/tests/functional/test_network_api.py b/tests/functional/test_network_api.py index 75caa24bac..a54de1ad11 100644 --- a/tests/functional/test_network_api.py +++ b/tests/functional/test_network_api.py @@ -237,6 +237,11 @@ def test_providers_custom_network(project, custom_networks_config_dict, ethereum network = ethereum.apenet actual = network.providers assert "node" in actual + node = actual["node"] + # NOTE: There was a bug where sometimes it would use the GethDev class + # and sometimes it would use the Node class. Node is what we want. + assert "Node" in repr(node.func) + assert "GethDev" not in repr(node.func) def test_providers_custom_non_fork_network_does_not_use_fork_provider( diff --git a/tests/functional/test_project.py b/tests/functional/test_project.py index 40482d0329..6d70e4e047 100644 --- a/tests/functional/test_project.py +++ b/tests/functional/test_project.py @@ -181,6 +181,7 @@ def test_isolate_in_tempdir_does_not_alter_sources(project): # First, create a bad source. with project.temp_config(contracts_folder="build"): new_src = project.contracts_folder / "newsource.json" + new_src.parent.mkdir(exist_ok=True, parents=True) new_src.write_text("this is not json, oops") project.sources.refresh() # Only need to be called when run with other tests. diff --git a/tests/functional/test_provider.py b/tests/functional/test_provider.py index 57b6b6d5e1..0876172d05 100644 --- a/tests/functional/test_provider.py +++ b/tests/functional/test_provider.py @@ -105,6 +105,35 @@ def test_chain_id_is_cached(eth_tester_provider): eth_tester_provider._web3 = web3 # Undo +def test_chain_id_from_ethereum_base_provider_is_cached(mock_web3, ethereum, eth_tester_provider): + """ + Simulated chain ID from a plugin (using base-ethereum class) to ensure is + also cached. + """ + + def make_request(rpc, arguments): + if rpc == "eth_chainId": + return {"result": 11155111} # Sepolia + + return eth_tester_provider.make_request(rpc, arguments) + + mock_web3.provider.make_request.side_effect = make_request + + class PluginProvider(Web3Provider): + def connect(self): + return + + def disconnect(self): + return + + provider = PluginProvider(name="sim", network=ethereum.sepolia) + provider._web3 = mock_web3 + assert provider.chain_id == 11155111 + # Unset to web3 to prove it does not check it again (else it would fail). + provider._web3 = None + assert provider.chain_id == 11155111 + + def test_chain_id_when_disconnected(eth_tester_provider): eth_tester_provider.disconnect() try: @@ -116,6 +145,11 @@ def test_chain_id_when_disconnected(eth_tester_provider): eth_tester_provider.connect() +def test_chain_id_adhoc(networks): + with networks.parse_network_choice("https://www.shibrpc.com") as bor: + assert bor.chain_id == 109 + + def test_get_receipt_not_exists_with_timeout(eth_tester_provider): unknown_txn = "0x053cba5c12172654d894f66d5670bab6215517a94189a9ffc09bc40a589ec04d" expected = ( @@ -658,3 +692,32 @@ def test_update_settings_invalidates_snapshots(eth_tester_provider, chain): assert snapshot in chain._snapshots[eth_tester_provider.chain_id] eth_tester_provider.update_settings({}) assert snapshot not in chain._snapshots[eth_tester_provider.chain_id] + + +def test_connect_uses_cached_chain_id(mocker, mock_web3, ethereum, eth_tester_provider): + class PluginProvider(EthereumNodeProvider): + pass + + web3_factory_patch = mocker.patch("ape_ethereum.provider._create_web3") + web3_factory_patch.return_value = mock_web3 + + class ChainIDTracker: + call_count = 0 + + def make_request(self, rpc, args): + if rpc == "eth_chainId": + self.call_count += 1 + return {"result": "0xaa36a7"} # Sepolia + + return eth_tester_provider.make_request(rpc, args) + + chain_id_tracker = ChainIDTracker() + mock_web3.provider.make_request.side_effect = chain_id_tracker.make_request + + provider = PluginProvider(name="node", network=ethereum.sepolia) + provider.connect() + assert chain_id_tracker.call_count == 1 + provider.disconnect() + provider.connect() + # It is still cached from the previous connection. + assert chain_id_tracker.call_count == 1 diff --git a/tests/functional/test_proxy.py b/tests/functional/test_proxy.py index 3a8bd6bd7f..74168a5b3e 100644 --- a/tests/functional/test_proxy.py +++ b/tests/functional/test_proxy.py @@ -5,12 +5,22 @@ """ -def test_minimal_proxy(ethereum, minimal_proxy, chain): +def test_minimal_proxy(ethereum, minimal_proxy_container, chain, owner): + placeholder = "0xBEbeBeBEbeBebeBeBEBEbebEBeBeBebeBeBebebe" + if placeholder in chain.contracts: + del chain.contracts[placeholder] + + minimal_proxy = owner.deploy(minimal_proxy_container, sender=owner) + chain.provider.network.__dict__["explorer"] = None # Ensure no explorer, messes up test. actual = ethereum.get_proxy_info(minimal_proxy.address) assert actual is not None assert actual.type == ProxyType.Minimal # It is the placeholder value still. - assert actual.target == "0xBEbeBeBEbeBebeBeBEBEbebEBeBeBebeBeBebebe" + assert actual.target == placeholder + # Show getting the contract using the proxy address. contract = chain.contracts.instance_at(minimal_proxy.address) - assert contract.contract_type.abi == [] # No target ABIs; no proxy ABIs either. + abi = contract.contract_type.abi + if isinstance(abi, list): + assert abi == [] + # else: is messed up from other test (xdist).