diff --git a/.gitignore b/.gitignore index 050f6bf..175abbb 100644 --- a/.gitignore +++ b/.gitignore @@ -8,6 +8,7 @@ /global_prefs_override_backup.xml /.idea/ /venv/ +/.git-blame-ignore-revs scratch.py /user_config.py /tests/__pycache__ diff --git a/config.py b/config.py index da18f25..fce43de 100644 --- a/config.py +++ b/config.py @@ -73,6 +73,7 @@ def TEMP_FUNCTION(): DUMP_PROJECT_WEIGHTS: bool = False # Dump weights assigned to projects DUMP_PROJECT_PRIORITY: bool = False # Dump weights adjusted after considering current and past crunching time DUMP_RAC_MAG_RATIOS: bool = False # Dump the RAC:MAG ratios from each Gridcoin project +DUMP_DATABASE:bool = False # Dump the DATABASE # how many decimal places to round each stat to which is printed in the output table ROUNDING_DICT = { "MAGPERCREDIT": 5, @@ -101,7 +102,7 @@ def TEMP_FUNCTION(): PRICE_CHECK_INTERVAL = 1440 # how often to check GRC price in minutes, minimum delay of 60 minutes between checks. Default is 1440 (24 hrs) LOG_LEVEL = "WARNING" # Options are: 'DEBUG','INFO','WARNING','ERROR','NONE', default is 'WARNING' MAX_LOGFILE_SIZE_IN_MB = 10 # Default: 10 -ROLLING_WEIGHT_WINDOW = 60 # Use stats up to x days old for calculating intended weights vs actual crunch time, Default: 60. Note that "benchmarking" is applied to total time, not windowed time. Benchmarking will take 1% of ALL crunching time across ALL time history. This enables you set smaller "windows" and get faster reaction to weight changes without over-doing benchmarking. +ROLLING_WEIGHT_WINDOW = 90 # Use stats up to x days old for calculating intended weights vs actual crunch time, Default: 90. Note that "benchmarking" is applied to total time, not windowed time. Benchmarking will take 1% of ALL crunching time across ALL time history. This enables you set smaller "windows" and get faster reaction to weight changes without over-doing benchmarking. # BENCHMARKING SETTINGS: # Benchmarking is needed to determine profitability of a project. It is strongly suggested you keep these settings as they are, they are sane defaults. diff --git a/main.py b/main.py index 8f9819a..f5b674f 100644 --- a/main.py +++ b/main.py @@ -1,4 +1,5 @@ # DO NOT EDIT THIS FILE, EDIT USER_CONFIG.PY INSTEAD +# SPDX-License-Identifier: AGPL-3.0-only try: from math import floor, ceil import copy @@ -26,11 +27,11 @@ from typing import List, Union, Dict, Tuple, Set, Any import sys, signal - # this is needed for some async stuff + # This is needed for some async stuff import nest_asyncio nest_asyncio.apply() - # ignore deprecation warnings in Windows + # Ignore deprecation warnings in Windows import warnings except Exception as e: print( @@ -53,7 +54,9 @@ BOINC_PORT: int = 31416 BOINC_USERNAME: Union[str, None] = None BOINC_PASSWORD: Union[str, None] = None -MIN_RECHECK_TIME: int = 30 # minimum time in minutes before re-asking a project for work who previously said they were out +# Minimum time in minutes before re-asking a project for work who previously said +# they were out +MIN_RECHECK_TIME: int = 30 ABORT_UNSTARTED_TASKS: bool = False RECALCULATE_STATS_INTERVAL: int = 60 PRICE_CHECK_INTERVAL: int = 720 @@ -86,34 +89,35 @@ False # Dump weights adjusted after considering current and past crunching time ) DUMP_RAC_MAG_RATIOS: bool = False # Dump the RAC:MAG ratios from each Gridcoin project +DUMP_DATABASE: bool = False # Dump the DATABASE DEV_FEE_MODE: str = "CRUNCH" # valid values: CRUNCH|SIDESTAKE CRUNCHING_FOR_DEV: bool = False -DEV_EXIT_TEST: bool = False # only used for testing +DEV_EXIT_TEST: bool = False # Only used for testing # Some globals we need. I try to have all globals be ALL CAPS FORCE_DEV_MODE = ( - False # used for debugging purposes to force crunching under dev account + False # Used for debugging purposes to force crunching under dev account ) BOINC_PROJECT_NAMES = {} DATABASE = {} DATABASE[ "TABLE_SLEEP_REASON" -] = "" # sleep reason printed in table, must be reset at script start +] = "" # Sleep reason printed in table, must be reset at script start DATABASE[ "TABLE_STATUS" -] = "" # info status printed in table, must be reset at script start +] = "" # Info status printed in table, must be reset at script start SCRIPTED_RUN: bool = False SKIP_TABLE_UPDATES: bool = False HOST_COST_PER_HOUR = (HOST_POWER_USAGE / 1000) * LOCAL_KWH LAST_KNOWN_CPU_MODE = None LAST_KNOWN_GPU_MODE = None -LOOKUP_URL_TO_DATABASE = {} # lookup table for uppered URLS -> canonical URLs. +LOOKUP_URL_TO_DATABASE = {} # Lookup table for uppered URLS -> canonical URLs. LOOKUP_URL_TO_BOINC = ( {} -) # lookup table for uppered URLs -> BOINC urls. Note the key is NOT the canonical url, just an uppered URL for performance reasons. +) # Lookup table for uppered URLs -> BOINC urls. Note the key is NOT the canonical url, just an uppered URL for performance reasons. LOOKUP_URL_TO_BOINC_DEV = ( {} -) # lookup table for uppered URLs -> BOINC urls for dev client. Note the key is NOT the canonical url, just an uppered URL for performance reasons. +) # Lookup table for uppered URLs -> BOINC urls for dev client. Note the key is NOT the canonical url, just an uppered URL for performance reasons. ATTACHED_PROJECT_SET = set() ATTACHED_PROJECT_SET_DEV = set() COMBINED_STATS = {} @@ -122,36 +126,38 @@ TESTING: bool = False PRINT_URL_LOOKUP_TABLE: Dict[ str, str -] = {} # used to convert urls for printing to table -MAG_RATIO_SOURCE: Union[str, None] = None # VALID VALUES: WALLET|WEB +] = {} # Used to convert urls for printing to table +MAG_RATIO_SOURCE: Union[str, None] = None # Valid values: WALLET|WEB CHECK_SIDESTAKE_RESULTS = False loop = asyncio.get_event_loop() -# Translates BOINC's CPU and GPU Mode replies into English. Note difference between keys integer vs string. +# Translates BOINC's CPU and GPU Mode replies into English. Note difference between +# keys integer vs string. CPU_MODE_DICT = {1: "always", 2: "auto", 3: "never"} GPU_MODE_DICT = {"1": "always", "2": "auto", "3": "never"} ROUNDING_DICT = { "MAGPERCREDIT": 5, "AVGMAGPERHOUR": 3, } -DEV_BOINC_PASSWORD = "" # this is only used for printing to table, not used elsewhere +DEV_BOINC_PASSWORD = "" # This is only used for printing to table, not used elsewhere DEV_LOOP_RUNNING = False SAVE_STATS_DB = ( {} -) # keeps cache of saved stats databases so we don't write more often than we need to -# Dictionary for places we query in format key=url, value=Tuple[nickname,regex]. Note they all must match group 2 +) # Keeps cache of saved stats databases so we don't write more often than we need too +# Dictionary for places we query in format key=url, value=Tuple[nickname,regex]. +# Note they all must match group 2 PRICE_URL_DICT: Dict[str, Tuple[str, Union[str, re.Pattern]]] = { "https://coinmarketcap.com/currencies/gridcoin/": ( "coinmarketcap.com", - '("low24h":)(\d*.\d*)', + r'("low24h":)(\d*.\d*)', ), "https://finance.yahoo.com/quote/GRC-USD/": ( "yahoo.com", - '(data-field="regularMarketPrice" data-trend="none" data-pricehint="\d" value=")(\d*\.\d*)', + r'(data-field="regularMarketPrice" data-trend="none" data-pricehint="\d" value=")(\d*\.\d*)', ), "https://www.coingecko.com/en/coins/gridcoin-research": ( "coingecko", re.compile( - '(data-coin-id="243" data-coin-symbol="grc" data-target="price.price">\$)(\d*\.\d*)()', + r'(data-coin-id="243" data-coin-symbol="grc" data-target="price.price">\$)(\d*\.\d*)()', flags=re.MULTILINE | re.IGNORECASE, ), ), @@ -173,9 +179,9 @@ def resolve_url_database(url: str) -> str: uppered = uppered.replace("HTTP://", "") if uppered.startswith( "WWW." - ): # this is needed as WWW. may legitimately exist in a url outside of the starting portion + ): # This is needed as WWW. may legitimately exist in a url outside of the starting portion uppered = uppered.replace("WWW.", "") - if uppered.endswith("/"): # remove trailing slashes + if uppered.endswith("/"): # Remove trailing slashes uppered = uppered[:-1] if "WORLDCOMMUNITYGRID.ORG/BOINC" in uppered: uppered = "WORLDCOMMUNITYGRID.ORG" @@ -183,19 +189,19 @@ def resolve_url_database(url: str) -> str: return uppered -# import user settings from config +# Import user settings from config try: from config import * except Exception as e: print("Error opening config.py, using defaults! Error is: {}".format(e)) -# import addl user settings from user_config +# Import addl user settings from user_config if os.path.isfile("user_config.py"): try: - from user_config import * # you can ignore an unresolved reference error here in pycharm since user is expected to create this file + from user_config import * # You can ignore an unresolved reference error here in pycharm since user is expected to create this file import user_config except Exception as e: print("Error opening user_config.py, using defaults! Error is: {}".format(e)) -# verify all imports are upper-cased +# Verify all imports are upper-cased for variable in dir(config): if variable.startswith("__"): continue @@ -215,7 +221,7 @@ def resolve_url_database(url: str) -> str: ) print(error) quit() -# setup logging +# Setup logging log = logging.getLogger() if LOG_LEVEL == "NONE": log.addHandler(logging.NullHandler()) @@ -240,7 +246,7 @@ def resolve_url_database(url: str) -> str: ) ) -# canonicalize URLs given to us by user +# Canonicalize URLs given to us by user old_preferred_projects = copy.deepcopy(PREFERRED_PROJECTS) PREFERRED_PROJECTS = {} for url, amount in old_preferred_projects.items(): @@ -251,7 +257,7 @@ def resolve_url_database(url: str) -> str: canonicalized = resolve_url_database(url) IGNORED_PROJECTS.append(canonicalized) -# if user has no preferred projects, their % of crunching should be 0 +# If user has no preferred projects, their % of crunching should be 0 if len(PREFERRED_PROJECTS) == 0: preferred_projects_percent: float = 0 @@ -268,7 +274,7 @@ def resolve_url_database(url: str) -> str: elif FOUND_PLATFORM == "Darwin": BOINC_DATA_DIR = os.path.join("/Library/Application Support/BOINC Data/") else: - BOINC_DATA_DIR = "C:\ProgramData\BOINC\\" + BOINC_DATA_DIR = "C:\\ProgramData\\BOINC\\" if not GRIDCOIN_DATA_DIR: if FOUND_PLATFORM == "Linux": GRIDCOIN_DATA_DIR = os.path.join(Path.home(), ".GridcoinResearch/") @@ -278,13 +284,24 @@ def resolve_url_database(url: str) -> str: ) else: GRIDCOIN_DATA_DIR = os.path.join( - Path.home(), "AppData\Roaming\GridcoinResearch\\" + Path.home(), "AppData\\Roaming\\GridcoinResearch\\" ) class GridcoinClientConnection: - """ - A class for connecting to a Gridcoin wallet and issuing RPC commands. Currently quite barebones. + """Allows connecting to a Gridcoin wallet and issuing RPC commands. + + A class for connecting to a Gridcoin wallet and issuing RPC commands. Currently + quite barebones. + + Attributes: + config_file: + ip_address: + rpc_port: + rpc_user: + rpc_password: + retries: + retry_delay: """ def __init__( @@ -297,7 +314,18 @@ def __init__( retries: int = 3, retry_delay: int = 1, ): - self.configfile = config_file # absolute path to the client config file + """Initializes the instance based on the connection attributes. + + Attributes: + config_file: + ip_address: + rpc_port: + rpc_user: + rpc_password: + retries: int = 3, + retry_delay: int = 1, + """ + self.configfile = config_file # Absolute path to the client config file self.ipaddress = ip_address self.rpc_port = rpc_port self.rpcuser = rpc_user @@ -308,8 +336,18 @@ def __init__( def run_command( self, command: str, arguments: List[Union[str, bool]] = None ) -> Union[dict, None]: - """ - Runs a command, returns dict of json or None if error connecting to wallet + """Send command to local Gridcoin wallet + + Sends specifified Gridcoin command to the Gridcoin wallet instance and + retrieves result of the command execution. + + Args: + command: + arguments: + + Returns: + Response from command exectution as a dictionary of json, or None if + an error was encounted while connecting to the Gridcoin wallet instance. """ if not arguments: arguments = [] @@ -341,8 +379,13 @@ def run_command( return None def get_approved_project_urls(self) -> List[str]: - """ - :return: A list of UPPERCASED project URLs using gridcoin command listprojects + """Retrieves list of projects appoved for Gridcoin. + + Retrieves the list of projects from the local Gridcoin wallet that are + approved for earning Gridcoin. + + Returns: + A list of UPPERCASED project URLs using gridcoin command listprojects """ return_list = [] all_projects = self.run_command("listprojects") @@ -352,21 +395,37 @@ def get_approved_project_urls(self) -> List[str]: class BoincClientConnection: - """ - A simple class for grepping BOINC config files etc. Doesn't do any RPC communication. This class and any - usage of it should be wrapped in try/except clauses as it does not do any error handling internally. + """Access to BOINC client configuration files. + + A simple class for grepping BOINC config files etc. Doesn't do any RPC communication + + Note: Usage of it should be wrapped in try/except clauses as it does not + do any error handling internally. + + Attributes: + config_dir: """ def __init__(self, config_dir: str = None): + """Initializes the instance using the Gridcoin wallet configuration location. + + Args: + config_dir: + """ if config_dir is None: self.config_dir = "/var/lib/boinc-client" else: - self.config_dir = config_dir # absolute path to the client config dir + self.config_dir = config_dir # Absolute path to the client config dir def get_project_list(self) -> List[str]: - """ - :return: List of project URLs. This is all of them known, not just ones which are attached. - Note that some attached projects may not be on this list, as they are not included in BOINC by default. + """Retrieve the list of projects supported by the BOINC client + + Constructs a list of all projects known by the BOINC client. This may include + more projects than those currently attached to the BOINC client. This may also + not include some projects currently attached, if they are projects not included + with BOINC by default. + + Returns: List of project URLs. """ project_list_file = os.path.join(self.config_dir, "all_projects_list.xml") return_list = [] @@ -449,7 +508,7 @@ def resolve_url_boinc_rpc( if not known_boinc_projects: known_boinc_projects = ALL_PROJECT_URLS - # check quick lookup tables first + # Check quick lookup tables first if dev_mode: if original_uppered in LOOKUP_URL_TO_BOINC_DEV: return LOOKUP_URL_TO_BOINC_DEV[original_uppered] @@ -457,7 +516,7 @@ def resolve_url_boinc_rpc( if original_uppered in LOOKUP_URL_TO_BOINC: return LOOKUP_URL_TO_BOINC[original_uppered] - # do full lookup if that doesn't work + # Do full lookup if that doesn't work uppered = original_uppered.replace("HTTPS://WWW.", "") uppered = uppered.replace("HTTP://WWW.", "") uppered = uppered.replace("HTTPS://", "") @@ -499,17 +558,27 @@ def resolve_url_list_to_database(url_list: List[str]) -> List[str]: def shutdown_dev_client(quiet: bool = False) -> None: - new_loop = ( - asyncio.get_event_loop() - ) # this is needed in case this function is called while main loop is still waiting for an RPC command etc + """Shutdown developer BOINC client. + + Sends RPC quit command to running dev BOINC client. + + Args: + quiet: + + Raises: + Exception: An error occured shutting down the dev BOINC client. + """ + # This is needed in case this function is called while main loop is still + # waiting for an RPC command etc + new_loop = asyncio.get_event_loop() log.info("Attempting to shut down dev client at safe_exit...") try: dev_rpc_client = new_loop.run_until_complete( setup_connection(BOINC_IP, DEV_BOINC_PASSWORD, port=DEV_RPC_PORT) - ) # setup dev BOINC RPC connection + ) # Setup dev BOINC RPC connection authorize_response = new_loop.run_until_complete( dev_rpc_client.authorize(DEV_BOINC_PASSWORD) - ) # authorize dev RPC connection + ) # Authorize dev RPC connection shutdown_response = new_loop.run_until_complete( run_rpc_command(dev_rpc_client, "quit") ) @@ -518,9 +587,14 @@ def shutdown_dev_client(quiet: bool = False) -> None: def safe_exit(arg1, arg2) -> None: - """ - Function to safely exit tool by saving database, restoring original user preferences, and quitting dev BOINC client. - arg1/2 required by the signal handler library, but aren't used for anything inside this function + """Safely exit Find The Mag. + + Safely exit tool by saving database, restoring original user preferences, + and quitting dev BOINC client. + + Args: arg1 and arg2: + Required by the signal handler library, + but aren't used for anything inside this function """ print_and_log( "Program exiting gracefully. Please be patient this may take a few minutes", @@ -537,23 +611,23 @@ def safe_exit(arg1, arg2) -> None: new_loop = ( asyncio.get_event_loop() - ) # this is needed in case this function is called while main loop is still waiting for an RPC command etc + ) # This is needed in case this function is called while main loop is still waiting for an RPC command etc # Shutdown developer BOINC client, if running if ( not should_crunch_for_dev(False) and CRUNCHING_FOR_DEV or DEV_EXIT_TEST - ): # if we are crunching for dev and won't start crunching again on next run + ): # If we are crunching for dev and won't start crunching again on next run new_loop.run_until_complete(dev_cleanup(rpc_client=None)) shutdown_dev_client() - # restore crunching settings pre-dev-mode + # Restore crunching settings pre-dev-mode if CRUNCHING_FOR_DEV: try: rpc_client = new_loop.run_until_complete( setup_connection(BOINC_IP, BOINC_PASSWORD, port=BOINC_PORT) - ) # setup dev BOINC RPC connection + ) # Setup dev BOINC RPC connection authorize_response = new_loop.run_until_complete( rpc_client.authorize(BOINC_PASSWORD) - ) # authorize dev RPC connection + ) # Authorize dev RPC connection new_loop.run_until_complete( run_rpc_command(rpc_client, "set_gpu_mode", LAST_KNOWN_GPU_MODE) ) @@ -624,9 +698,16 @@ async def get_stats_helper(rpc_client: libs.pyboinc.rpc_client) -> list: async def get_task_list(rpc_client: libs.pyboinc.rpc_client) -> list: - """ + """List of active, waiting, or paused BOINC tasks. + Return list of tasks from BOINC client which are not completed/failed. These can be active tasks, tasks waiting to be started, or paused tasks. + + Args: + rpc_client: + + Returns: + List of BOINC tasks. """ # Known task states # 2: Active @@ -649,14 +730,26 @@ async def get_task_list(rpc_client: libs.pyboinc.rpc_client) -> list: async def is_boinc_crunching(rpc_client: libs.pyboinc.rpc_client) -> bool: - """ - Returns True is boinc is crunching, false if not or unable to determine + """Check if BOINC is actively crunching tasks. + + Queries BOINC client as to crunching status. Returns True is BOINC client + is crunching, false otherwise. + + Args: + rpc_client: + + Returns: + True if crunching, or False if not crunching or unsure. + + Raises: + Exception: An error occured attempting to check the BOINC client crunching status. """ try: reply = await run_rpc_command(rpc_client, "get_cc_status") task_suspend_reason = int(reply["task_suspend_reason"]) if task_suspend_reason != 0: - # These are documented at https://github.com/BOINC/boinc/blob/73a7754e7fd1ae3b7bf337e8dd42a7a0b42cf3d2/android/BOINC/app/src/main/java/edu/berkeley/boinc/utils/BOINCDefs.kt + # These are documented at + # https://github.com/BOINC/boinc/blob/73a7754e7fd1ae3b7bf337e8dd42a7a0b42cf3d2/android/BOINC/app/src/main/java/edu/berkeley/boinc/utils/BOINCDefs.kt log.debug( "Determined BOINC client is not crunching task_suspend_reason: {}".format( task_suspend_reason @@ -685,8 +778,17 @@ async def is_boinc_crunching(rpc_client: libs.pyboinc.rpc_client) -> bool: async def setup_connection( boinc_ip: str = BOINC_IP, boinc_password: str = BOINC_PASSWORD, port: int = 31416 ) -> Union[libs.pyboinc.rpc_client.RPCClient, None]: - """ + """Create BOINC RPC client connection. + Sets up a BOINC RPC client connection + + Args: + boinc_ip: + boinc_password: + port: + + Returns: + """ rpc_client = None if not boinc_ip: @@ -696,8 +798,15 @@ async def setup_connection( def temp_check() -> bool: - """ - Returns True if we should keep crunching based on temperature or have issues measuring temp, False otherwise + """Checks if temperature is within acceptable limit. + + Confirms if we should keep crunching based on temperature, or not. + + Returns: + True if we should keep crunching, False otherwise. + + Raises: + Exception: An error occured attempting to read the temperature. """ if not ENABLE_TEMP_CONTROL: return True @@ -743,17 +852,32 @@ def temp_check() -> bool: def update_fetch( update_text: str = None, current_ver: float = None ) -> Tuple[bool, bool, Union[str, None]]: - """ - Check for updates. Return True and string to print if updates found, False otherwise - @update_text: used for testing purposes - @current_ver: added for testing purposes - @return: If update is available, if it is a security update, a string to print + """Check if FindTheMag updates are avialable. + + Check with FindTheMag repository on GitHub whether or not an update is + available. If avaialble, inform the user and provide some information. + + Update checks are performed no often then once per week. Check times are + stored in the database for future reference. + + Args: + update_text: Used for testing purposes. Default: None + current_ver: Added for testing purposes. Default: None + + Returns: + A tuple consisting of: + A bool, set to True if and update is available. + A bool, set to True if the update is a security update. + A string containing update related information. + + Raises: + Exception: An error occured when attempting to parse the retrieved update file. """ update_return = False return_string = "" security_update_return = False - # added for testing purposes + # Added for testing purposes if update_text: resp = update_text else: @@ -830,8 +954,9 @@ def update_fetch( def update_check() -> None: - """ - Check for updates to the FindTheMag tool + """Check if FindTheMag updates are avialable. + + Check for updates to the FindTheMag tool and logs information on any updates found. """ available, security, print_me = update_fetch() if available: @@ -843,6 +968,24 @@ def get_grc_price(sample_text: str = None) -> Union[float, None]: Gets average GRC price from three online sources. Returns None if unable to determine @sample_text: Used for testing. Just a "view source" of all pages added together """ + """Retrieve current average Gridcoin price. + + Calculates the average GRC price based on values from three online sources. + + Note: Retrieving the prices is dependent on the target website formatting. If the + source website changes significantly, retrieval may fail until the relevant + search pattern in updated. + + Args: + sample_text: Used for testing. + Typicaly a "view source" of all pages added together. + + Returns: + Average GCR price in decimal, or None if unable to determine price. + + Raises: + Exception: An error occurred accessing an online GRC price source. + """ import requests as req found_prices = [] @@ -883,9 +1026,20 @@ def get_grc_price(sample_text: str = None) -> Union[float, None]: def get_approved_project_urls_web(query_result: str = None) -> Dict[str, str]: - """ - Gets current whitelist from Gridcoinstats - @query_result: used for testing + """List of projects currently witelised by Gridcoin. + + Gets current whitelist from the Gridcoinstats website. Limits fetching + from website to once every 24 hours through caching list in database. + + Args: + query_result: Used for testing. + + Returns: + A dictionary mapping base URLs to project names. + + Raises: + Exception: An error occurred fetching stats data from the website. + Exception: An error occurred parsing data from the source website. """ # Check if cache is available if "GSPROJECTLIST" in DATABASE and "GSRESOLVERDICT" in DATABASE: @@ -968,8 +1122,19 @@ def stuck_xfer(xfer: dict) -> bool: def xfers_happening(xfer_list: list) -> bool: - """ - Returns True if any active xfers are happening, false if none are happening, if only stalled xfers exist, or if unable to determine + """Confirms whether or not the BOINC client has any active transfers. + + Checks list of transfers for any that are active. + + Args: + xfer_list: List of transfers. + + Returns: + True if any active xfers are happening, False if none are happening, or + if only stalled xfers exist, or if unable to determine. + + Raises: + Exception: An error occurred parsing entry in transfer list. """ # Known statuses: # 0 = Active @@ -991,13 +1156,21 @@ def xfers_happening(xfer_list: list) -> bool: def wait_till_no_xfers(rpc_client: libs.pyboinc.rpc_client) -> None: - """ + """Wait on BOINC client to finish all pending transfers. + Wait for BOINC to finish all pending xfers, return None when done + + Args: + rpc_client: Connection to BOINC client instance. + + Raises: + Exception: An error occurred attempting to communicate with the BOINC client. """ max_loops = 30 current_loops = 0 - loop_wait_in_seconds = 30 # wait this long between loops - # Every ten seconds we will request the list of file transfers from BOINC until there are none left + loop_wait_in_seconds = 30 # Wait this long between loops + # Every ten seconds we will request the list of file transfers from BOINC until + # there are none left. while current_loops < max_loops: current_loops += 1 # Ask BOINC for a list of file transfers @@ -1018,7 +1191,7 @@ def wait_till_no_xfers(rpc_client: libs.pyboinc.rpc_client) -> None: sleep(loop_wait_in_seconds) continue if isinstance(allow_response, str): - cleaned_response = re.sub("\s*", "", allow_response) + cleaned_response = re.sub(r"\s*", "", allow_response) if cleaned_response == "": # There are no transfers, yay! return if xfers_happening(allow_response): @@ -1030,9 +1203,21 @@ def wait_till_no_xfers(rpc_client: libs.pyboinc.rpc_client) -> None: def get_gridcoin_config_parameters(gridcoin_dir: str) -> Dict[str, str]: - """ - :param gridcoin_dir: Absolute path to a gridcoin config directory - :return: All config parameters found, preferring those in the json file to the conf. Note that sidestakes become a list as there may be multiple + """Retrive Gridcoin wallet configuration. + + Parses Gridcoin configuration .json and .conf file for configuration parameters. + Preference is given to those in the json file over those in the to the conf file. + + Note that sidestakes become a list as there may be multiple. + + Args: + gridcoin_dir: Absolute path to a gridcoin config directory. + + Returns: + A dictionary of all config parameters found, + + Raises: + Exception: An error occurred while parsing the config file. """ return_dict = dict() dupes = {} @@ -1102,12 +1287,17 @@ def get_gridcoin_config_parameters(gridcoin_dir: str) -> Dict[str, str]: def check_sidestake( config_params: Dict[str, Union[str, List[str]]], address: str, minval: float ) -> bool: - """ + """Confirms whether or not the given address is being adequately sidestaked. + Checks if a given address is being sidestaked to or not. Returns False if value < minval - :param config_params: config_params from get_gridcoin_config_parameters - :param address: address to check - :param minval: minimum value to pass check - :return: True or False + + Args: + config_params: config_params from get_config_parameters + address: address to check + minval: minimum value to pass check + + Returns: + True if given address is sidestaked for more than the given minium. """ if "enablesidestaking" not in config_params: return False @@ -1125,10 +1315,17 @@ def check_sidestake( def project_url_from_stats_file(statsfilename: str) -> str: + """Guess a projec url using stats file name. + + Guess a project URL from the name of a stats file. + + Args: + statsfilename: + + Returns: + URL for project associated with stats file, or stats file name if URL unknown. """ - Guess a project url from the name of a stats file - """ - # remove extraneous information from name + # Remove extraneous information from name statsfilename = statsfilename.replace("job_log_", "") statsfilename = statsfilename.replace(".txt", "") statsfilename = statsfilename.replace("_", "/") @@ -1136,8 +1333,16 @@ def project_url_from_stats_file(statsfilename: str) -> str: def project_url_from_credit_history_file(filename: str) -> str: - """ - Guess a project url from credit history file name + """Guess a project URL using credit history file name + + Guess a project URL from credit history file name. + + Args: + filename: + + Returns: + URL for project associated with stats file, or credit history + file name if URL unknown. """ filename = filename.replace("statistics_", "") filename = filename.replace(".xml", "") @@ -1148,22 +1353,33 @@ def project_url_from_credit_history_file(filename: str) -> str: def stat_file_to_list( stat_file_abs_path: str = None, content: str = None ) -> List[Dict[str, str]]: - """ - Turns a BOINC job log into list of dicts we can use, each dict is a task. Dicts have keys below: - STARTTIME,ESTTIME,CPUTIME,ESTIMATEDFLOPS,TASKNAME,WALLTIME,EXITCODE + """Retrieve a list of tasks and related stats from BOINC client log file. + + Turns a BOINC job log into list of dictionaries we can use, each dictionary + is a task. + Dictionaries have the following keys: + STARTTIME,ESTTIME,CPUTIME,ESTIMATEDFLOPS,TASKNAME,WALLTIME,EXITCODE + Note that ESTIMATEDFLOPS comes from the project and EXITCODE will always be zero. All values and keys in dicts are strings. - @content: added for testing - """ - """ - BOINC's job log format is: + BOINC's job log format is: + [ue] Estimated runtime BOINC Client estimate (seconds) + [ct] CPU time Measured CPU runtime at completion (seconds) + [fe] Estimated FLOPs count From project (integer) + [nm] Task name From project + [et] Elapsed time Wallclock runtime at completion (seconds) + + Args: + stat_file_abs_path: BOINC client statistics log file with absolute path + content: Added for testing purposes. + + Returns: + List dictionaries, each a BOINC task with statistics. -[ue] Estimated runtime BOINC Client estimate (seconds) -[ct] CPU time Measured CPU runtime at completion (seconds) -[fe] Estimated FLOPs count From project (integer) -[nm] Task name From project -[et] Elapsed time Wallclock runtime at completion (seconds) + Raises: + Exception: An error occurred when attempting to read a BOINC job log file. + Exception: An error occurred when attempting to parse a BOINC job log file. """ stats_list = [] try: @@ -1226,9 +1442,27 @@ async def run_rpc_command( arg2: Union[str, None] = None, arg2_val: Union[str, None] = None, ) -> Union[str, Dict[Any, Any], List[Any]]: - """ - Runs command on BOINC client via RPC. Has try/except and retries, returns None if unsuccessful + """Send command to BOINC client via RPC + + Runs command on BOINC client via RPC Example: run_rpc_command(rpc_client,'project_nomorework','http://project.com/project') + + Attempts to communicate with the BOINC client multiple times based on internal + parameters. + + Args: + rpc_client: Connection to BOINC client instance. + command: Command to be executed by the BOINC client. + arg1: Optional parameter for BOINC command. + arg1_val: Value for optional parameter. + arg2: Optional parameter for BOINC command. + arg2_val: Value for optional parameter. + + Returns: + Response from BOINC client, or None if unsuccessful. + + Raises: + Exception: An error occurred attempting to communicated with the BOINC client. """ max_retries = 3 retry_wait = 5 @@ -1269,10 +1503,24 @@ async def run_rpc_command( def credit_history_file_to_list(credithistoryfileabspath: str) -> List[Dict[str, str]]: - """ - Turns a BOINC credit history file into list of dicts we can use. Dicts have keys below: + """Retrieve BOINC credit history + + Turns a BOINC credit history file into list of dictionaries we can use. + + Dictionaries have keys below: TIME,USERTOTALCREDIT,USERRAC,HOSTTOTALCREDIT,HOSTRAC + Note that ESTIMATEDFLOPS comes from the project and EXITCODE will always be zero. + + Args: + credithistoryfileabspath: Filename with absolute path. + + Returns: + List of dicionaries with the following keys: + TIME,USERTOTALCREDIT,USERRAC,HOSTTOTALCREDIT,HOSTRAC + + Raises: + Exception: An error occurred attempting to read and parse the credit history file. """ statslist = [] try: @@ -1519,16 +1767,28 @@ async def boinc_client_to_stats( def config_files_to_stats( config_dir_abs_path: str, ) -> Dict[str, Dict[str, Union[int, float, Dict[str, Union[float, str]]]]]: - """ - :param config_dir_abs_path: Absolute path to BOINC data directory - :return: Dict of stats, or empty dict if encounters errors + """Extract BOINC statistics from all available log and stats files. + + Identifies all job log and statistics files in the specified directory. Extracts + all stats from found files and constructs dictionaries of them. + + Args: + config_dir_abs_path: Absolute path to BOINC data directory. + + Returns: + Dictionary of statistics in format COMBINED_STATS_EXAMPLE in main.py, or + an empty dictionary if unable to retrieve a list of statistics files. + + Raises: + Exception: An error occurred retrieving list of statistics files. + Exception: An error occurred parsing credit history files. """ stats_files: List[str] = [] credit_history_files: List[str] = [] return_stats = {} template_dict = {"CREDIT_HISTORY": {}, "WU_HISTORY": {}, "COMPILED_STATS": {}} - # find files to search through, add them to lists + # Find files to search through, add them to lists try: for file in os.listdir(config_dir_abs_path): if "job_log" in file: @@ -1559,7 +1819,7 @@ def config_files_to_stats( project_url = resolve_url_database(project_url) credithistorylist = credit_history_file_to_list(credit_history_file) - # add info from credit history files + # Add info from credit history files for index, entry in enumerate(credithistorylist): try: # print('In credit_history_file for ' + project_url) @@ -1596,7 +1856,7 @@ def config_files_to_stats( credit_history[date]["CREDITAWARDED"] += delta_credits except Exception as e: log.error("Error parsing credit history files: {}".format(e)) - # find averages + # Find averages found_averages = calculate_credit_averages(return_stats) for url, stats_dict in found_averages.items(): combine_dicts(return_stats[url]["COMPILED_STATS"], stats_dict) @@ -1609,15 +1869,25 @@ def add_mag_to_combined_stats( approved_projects: List[str], preferred_projects: List[str], ) -> Tuple[dict, List[str]]: - """ - Add magnitude to combined_stats dict. Adds in dict and returns it as well. - :param combined_stats: COMBINED_STATS from main.py - :param mag_ratios: mag ratios returned from get_project_mag_ratios. A dict with project URL as key and mag ratio as value - :return: COMBINED_STATS w/ mag ratios added to it, list of projects which are being crunched but not on approved projects list + """Adds magnitude ratios to combined statistics + + Args: + combined_stats: COMBINED_STATS from main.py. + mag_ratios: Magnitude ratios returned from get_project_mag_ratios. + A dictionary with project URL as key and magnitude ratio as value + approved_projects: + preferred_projects: + + Returns: A tuple consisting of: + COMBINED_STATS with magnitude ratios added to it, + list of projects which are being crunched but not on approved projects list. """ unapproved_list = [] + if not mag_ratios: + log.error('In add_mag_to_combined_ratios but mag_ratios is empty. Setting all mag ratios to zero.') + mag_ratios={} for project_url, project_stats in combined_stats.items(): - found_mag_ratio = mag_ratios.get(project_url) + found_mag_ratio = mag_ratios.get(project_url,0) if not found_mag_ratio: if project_url not in approved_projects: if project_url not in preferred_projects: @@ -1670,13 +1940,18 @@ def get_most_mag_efficient_projects( percentdiff: int = 10, quiet: bool = False, ) -> List[str]: - """ - Given combinedstats, return most mag efficient project(s). This is the #1 most efficient project and any other projects which are within percentdiff of that number. - If no project found, return empty list - Ignores ignored projects - :param combinedstats: combinedstats dict - :param percentdiff: Maximum percent diff - :return: List of project URLs or empty list if none found + """Determines most magnitude efficient project(s). + + Given combinedstats, determines most mag efficient project(s). This is the #1 + most efficient project and any other projects which are within percentdiff of + that number. + + Args: + combinedstats: combinedstats dict + percentdiff: Maximum percent diff + + Returns: + List of project URLs, or empty list if none are found. """ return_list = [] highest_project = get_first_non_ignored_project( @@ -1750,8 +2025,25 @@ def get_most_mag_efficient_projects( def sidestake_prompt( check_sidestake_results: bool, check_type: str, address: str ) -> None: - """ - A function to interactively ask user if they want to setup a sidestake, sets up a sidestake if they say yes + """Enable sidestaking if approved by user. + + If sidestaking has not been enabled for the specified check_type, then prompt the user + for enabling sidestaking, and enable in the Gridcoin wallet for the specified address + and the entered percentage. + + Args: + check_sidestake_results: + True - sidestaking is currently enabled. + False - sidestaking currently not enabled. + check_type: + 'FOUNDATION' - sidestaking to the Gridcoin foundation. + 'DEVELOPER' - sidestaking to the FTM developer. + address: Gridcoin address of the check_type. + + Raises: + Exception: An error occurred while parsing the user's entered answer. + Exception: An error occurred attempting to access the Gridcoin wallet + configuration file. """ # If user is sidestaking, skip rest of this function if check_sidestake_results: @@ -1818,20 +2110,37 @@ def get_project_mag_ratios( response: dict = None, grc_projects: Union[Dict[str, str], None] = None, ) -> Union[Dict[str, float], None]: - """ - Returns project mag ratios or None if issues - :param grc_client: Should only be None if testing - :param lookback_period: number of superblocks to look back to determine average - :param response: Added for testing purposes - :param grc_projects: Output of listprojects command on wallet, should usually be None unless testing - :return: Dictionary w/ key as project URL and value as project mag ratio (mag per unit of RAC) + """Retrieve magnitude to RAC ratios for each project from Gridcoin client. + + Calculate the ratio of magnitude to RAC for each project the Gridcoin client + is aware of. Look back the number of specified superblocks for calculating the + average. + + A cache of the results is maintained and used if the Grindcoin client is unavailable. + + Args: + grc_client: Connection to Gridcoin client. If testing, set to None. + lookback_period: Number of superblocks to look back to determine average. + response: Used for testing purposes. + grc_projects: Set to None, unless for testing purposes. When testing + This is the output of the 'listprojects' command run on the Gridcoin client. + + Returns: + A dictionary with the key as project URL and value as project magnitude ratio + (mag per unit of RAC). + A value of None is returned in the event of an exception and no cached data. + + Raises: + Exception: An error occurred attempting to communicate with the Gridcoin client. """ global PROJECT_MAG_RATIOS_CACHE projects = {} return_dict = None try: if not response: - command_result = grc_client.run_command("superblocks", [lookback_period, True]) + command_result = grc_client.run_command( + "superblocks", [lookback_period, True] + ) response = command_result if not response: raise ConnectionError("Issues w superblocks command") @@ -1862,11 +2171,19 @@ def get_project_mag_ratios( def project_url_to_name_boinc(url: str, project_names: dict = None): - """ - Same as project_url_to_name except returns names for parsing BOINC logs - @param url: URL of a BOINC project - @param project_names: project names db from BOINC - @return: human-readable project name + """Attempt to convert specified project URL to the project name. + + This function is the same as project_url_to_name, except it returns names for + parsing BOINC logs. + + Args: + url: URL of desired BOINC project. + project_names: Dictionary of project names with the key as the project URL, + from the BOINC client database.. + + Returns: + The human-readable project name associated with the specified URL, or + the converted specified URL if the project is not found. """ if not project_names: project_names = BOINC_PROJECT_NAMES @@ -1878,11 +2195,19 @@ def project_url_to_name_boinc(url: str, project_names: dict = None): def project_url_to_name(url: str, project_names: Dict[str, str] = None): - """ - Low importance function only used when printing table. Don't use for anything else! - @param url: URL of a BOINC project - @param project_names: project names db from BOINC - @return: human-readable project name + """Attempt to convert specified project URL to the project name. + + This function is of low importance and must only be used when printing the table. + Do NOT USE for any other purpose. + + Args: + url: URL of desired BOINC project. + project_names: Dictionary of project names with the key as the project URL, + from the BOINC client database.. + + Returns: + The human-readable project name associated with the specified URL, or + the converted specified URL if the project is not found. """ if not project_names: project_names = BOINC_PROJECT_NAMES @@ -1898,9 +2223,24 @@ def project_url_to_name(url: str, project_names: Dict[str, str] = None): def left_align(yourstring: str, total_len: int, min_pad: int = 0) -> str: - """ - Return left-aligned string with a total len of X and min_padding (extra space on right side) of min_pad, cutting off string if needed - If min_pad==1, it looks like this 'yourstring ' + """Left-aligns specified string using given length and padding. + + Constructs a string of length total_len with yourstring left-aligned and + padded with spaces on the right. Padding includes at least min_pad spaces, + cutting off yourstring if required. + + Example: ("examplestring", 15, 1) will create a string that looks like + this: 'examplestring '. + + Returns: + Left-aligned string of total_len with min_pad padding of spaces on the + right of the text. + + TODO: + Confirm that returned string should be shorter than total_len based on + the value of min_pad, or should the length always be total_len. + Example ("yourstring",15,1) returns 'yourstring ' where the length + is actually 14 instead 15. """ if len(yourstring) >= total_len - min_pad: yourstring = yourstring[0 : total_len - (min_pad)] @@ -1910,9 +2250,27 @@ def left_align(yourstring: str, total_len: int, min_pad: int = 0) -> str: def center_align(yourstring: str, total_len: int, min_pad: int = 0) -> str: - """ - Return center-aligned string with a total len of X and min_padding (extra space on right & left side) of min_pad, cutting off string if needed - If min_pad==1, it looks like this ' yourstring '. If padding can't be equal on both sides, add +1 to right side + """Center-aligns specified string using given length and padding. + + Constructs a string of length total_len with yourstring center-aligned and + padded with spaces on the left and right. Padding includes at least min_pad + spaces, truncating yourstring if required. + + If the padding can not be equal on both sides, then an additional +1 padding is + added to the right side. + + Example: ("examplestring", 15, 1) will create a string that looks like + this: ' examplestring '. + + Returns: + Center-aligned string of total_len with min_pad padding of spaces on the + left and right of the text. + + TODO: + Confirm that returned string should be shorter than total_len based on + the value of min_pad, or should the length always be total_len. + Example ("yourstring",15,1) returns ' yourstring ' where the length + is actually 14 instead 15. """ total_min_pad = min_pad * 2 room_for_string = total_len - total_min_pad @@ -1930,13 +2288,27 @@ def print_table( sleep_reason: str = DATABASE["TABLE_SLEEP_REASON"], status: str = DATABASE["TABLE_STATUS"], dev_status: bool = False, + clear: bool = False, ): + """Outputs to console a text based table with current status and statistics. + + This is the main display of the program. It is refreshed automatically at set + intervals. Statistics are displayed for each project as well as general information + regarding the performance of FTM. + + Args: + table_dict: Dictionary of project statistics. + sortby: The table column attribute to sort the table rows by. + sleep_reason: Reason to sleep. + status: Most recent BOINC client status. + dev_status: Whether or not crunching is being done for the FTM developer. + """ if len(table_dict) == 0: print( "No projects have any assigned credit yet, a pretty table will appear here once you have earned some credit." ) # fmt: off - print(""" + print(r""" WNNXXXKKXW WNK0OkkxxkkkkdoK WX0kxdolx0XNNWWWNkoK @@ -2174,16 +2546,16 @@ def generate_stats( final_project_weights = {} dev_project_weights = {} # Canonicalize PREFERRED_PROJECTS list - to_del=[] + to_del = [] for url in preferred_projects.keys(): weight = preferred_projects[url] canonicalized = resolve_url_database(url) - if canonicalized!=url: + if canonicalized != url: to_del.append(url) preferred_projects[canonicalized] = weight for url in to_del: del preferred_projects[url] - # ignore unattached projects if requested + # Ignore unattached projects if requested if ignore_unattached: for project in approved_project_urls: boincified_url = resolve_url_boinc_rpc(project) @@ -2228,11 +2600,11 @@ def generate_stats( total_preferred_weight = (PREFERRED_PROJECTS_PERCENT / 100) * 1000 total_mining_weight = 1000 - total_preferred_weight total_mining_weight_remaining = total_mining_weight - # assign weight of 1 to all projects which didn't make the cut + # Assign weight of 1 to all projects which didn't make the cut for project_url in approved_project_urls: preferred_extract = preferred_projects.get(project_url) if preferred_extract: - continue # exclude preferred projects + continue # Exclude preferred projects if project_url in ignored_projects: final_project_weights[project_url] = 0 dev_project_weights[project_url] = 0 @@ -2247,7 +2619,7 @@ def generate_stats( continue if project_url not in most_efficient_projects or total_tasks < 10: weak_stats.append(project_url) - # assign weight of one to all project without enough stats + # Assign weight of one to all project without enough stats for project_url in weak_stats: final_project_weights[project_url] = 1 total_mining_weight_remaining -= 1 @@ -2330,7 +2702,7 @@ async def dev_cleanup(rpc_client: libs.pyboinc.rpc_client = None) -> None: try: rpc_client = loop.run_until_complete( setup_connection(BOINC_IP, DEV_BOINC_PASSWORD, port=DEV_RPC_PORT) - ) # setup dev BOINC RPC connection + ) # Setup dev BOINC RPC connection except Exception as e: log.error( "Asked to connect to dev client in dev_cleanup but unable to: {}".format( @@ -2406,7 +2778,7 @@ async def kill_all_unstarted_tasks( b = ET.SubElement(req, "name") b.text = name response = await rpc_client._request(req) - parsed = parse_generic(response) # returns True if successful + parsed = parse_generic(response) # Returns True if successful a = "21" else: # print('Keeping task {}'.format(task)) @@ -2431,7 +2803,7 @@ async def nnt_all_projects(rpc_client: libs.pyboinc.rpc_client) -> None: a = ET.SubElement(req, "project_url") a.text = project response = await rpc_client._request(req) - parsed = parse_generic(response) # returns True if successful + parsed = parse_generic(response) # Returns True if successful except Exception as e: log.error("Error NNTing all projects: {}".format(e)) @@ -2486,9 +2858,14 @@ def cache_full(project_name: str, messages) -> bool: if uppered_project not in str(message).upper(): continue difference = datetime.datetime.now() - message["time"] - if difference.seconds > 60 * 5: # if message is > 5 min old, skip + if difference.seconds > 60 * 5: # If message is > 5 min old, skip continue uppered_message_body = message["body"].upper() + if ( + """NOT REQUESTING TASKS: "NO NEW TASKS" REQUESTED VIA MANAGER""" + in uppered_message_body + ): + continue if uppered_project == message["project"].upper(): if ( "CPU: JOB CACHE FULL" in uppered_message_body @@ -2499,7 +2876,7 @@ def cache_full(project_name: str, messages) -> bool: log.debug("CPU cache appears full {}".format(message["body"])) if "NOT REQUESTING TASKS: DON'T NEED".upper() in uppered_message_body: if "GPU" not in message["body"].upper(): - gpu_full = True # if no GPU, GPU cache is always full + gpu_full = True # If no GPU, GPU cache is always full if ( "CPU: JOB CACHE FULL" in uppered_message_body or "NOT REQUESTING TASKS: DON'T NEED (JOB CACHE FULL)" @@ -2526,7 +2903,8 @@ def cache_full(project_name: str, messages) -> bool: else: if ( not gpu_full - ): # if GPU is not mentioned in log, this would always happen so using this to stop erroneous messages + ): # If GPU is not mentioned in log, this would always + # happen so using this to stop erroneous messages log.debug( "GPU cache appears not full {}".format(message["body"]) ) @@ -2555,9 +2933,9 @@ async def check_log_entries( message_count = int(parse_generic(msg_count_response)) req = ET.Element("get_messages") a = ET.SubElement(req, "seqno") - a.text = str(message_count - 50) # get ten most recent messages + a.text = str(message_count - 50) # Get ten most recent messages messages_response = await rpc_client._request(req) - messages = parse_generic(messages_response) # returns True if successful + messages = parse_generic(messages_response) # Returns True if successful if cache_full(project_name, messages): return True return False @@ -2571,7 +2949,8 @@ def project_backoff(project_name: str, messages) -> bool: Returns TRUE if project should be backed off. False otherwise or if unable to determine """ # Phrases which indicate project SHOULD be backed off - # removed 'project requested delay' from positive phrases because projects always provide this, even if work was provided! + # - removed 'project requested delay' from positive phrases because + # projects always provide this, even if work was provided! positive_phrases = [ "PROJECT HAS NO TASKS AVAILABLE", "SCHEDULER REQUEST FAILED", @@ -2609,7 +2988,7 @@ def project_backoff(project_name: str, messages) -> bool: if uppered_project not in uppered_message: continue difference = datetime.datetime.now() - message["time"] - if difference.seconds > 60 * 5: # if message is > 5 min old, skip + if difference.seconds > 60 * 5: # If message is > 5 min old, skip continue if backoff_ignore_message(message, ignore_phrases): continue @@ -2672,12 +3051,12 @@ async def check_log_entries_for_backoff( message_count = int(parse_generic(msg_count_response)) req = ET.Element("get_messages") a = ET.SubElement(req, "seqno") - a.text = str(message_count - 50) # get ten most recent messages + a.text = str(message_count - 50) # Get ten most recent messages messages_response = await rpc_client._request(req) - messages = parse_generic(messages_response) # returns True if successful + messages = parse_generic(messages_response) # Returns True if successful if project_name.upper() == "GPUGRID.NET": project_name = ( - "GPUGRID" # fix for log entries which show up under different name + "GPUGRID" # Fix for log entries which show up under different name ) return project_backoff(project_name, messages) except Exception as e: @@ -2698,13 +3077,13 @@ async def get_all_projects(rpc_client: libs.pyboinc.rpc_client) -> Dict[str, str messages_response = await rpc_client._request(req) project_status_reply = parse_generic( messages_response - ) # returns True if successful + ) # Returns True if successful project_names = {} for project in project_status_reply: project_names[project["url"]] = project["name"] project_names[ "https://gene.disi.unitn.it/test/" - ] = "TN-Grid" # added bc BOINC client does not list this project for some reason + ] = "TN-Grid" # Added bc BOINC client does not list this project for some reason return project_names @@ -2719,7 +3098,8 @@ async def get_attached_projects( found_projects.append(project.master_url) if isinstance( project.project_name, bool - ): # this happens if project is "attached" but unable to communicate w project due to it being down or some other issue + ): # This happens if project is "attached" but unable to communicate + # with the project due to it being down or some other issue project_names[project.master_url] = project.master_url else: project_names[project.master_url] = project.project_name @@ -2758,20 +3138,20 @@ async def prefs_check( : global_prefs : for testing only : disk usage : for testing only """ - # authorize BOINC client + # Authorize BOINC client authorize_response = await rpc_client.authorize() - # get prefs + # Get prefs return_val = True if not global_prefs: req = ET.Element("get_global_prefs_working") response = await rpc_client._request(req) - parsed = parse_generic(response) # returns True if successful + parsed = parse_generic(response) # Returns True if successful global_prefs = parsed if not disk_usage: - # get actual disk usage + # Get actual disk usage req = ET.Element("get_disk_usage") response = await rpc_client._request(req) - usage = parse_generic(response) # returns True if successful + usage = parse_generic(response) # Returns True if successful disk_usage = usage max_gb = int(float(global_prefs.get("disk_max_used_gb", 0))) used_max_gb = int(int(disk_usage["d_allowed"]) / 1024 / 1024 / 1024) @@ -2829,7 +3209,7 @@ def get_highest_priority_project( if not attached_projects: attached_projects = [] priority_dict = {} - # calculate total time from stats + # Calculate total time from stats total_xday_time = 0 total_time = 0 for found_key, projectstats in combined_stats.items(): @@ -2860,7 +3240,7 @@ def get_highest_priority_project( else: if ( weight == 1 - ): # benchmarking projects should be over ALL time not just recent time + ): # Benchmarking projects should be over ALL time not just recent time existing_time = combined_stats_extract["COMPILED_STATS"][ "TOTALWALLTIME" ] @@ -2921,7 +3301,7 @@ def get_project_mag_ratios_from_response( if i == 0: projects[project_name] = [] else: - continue # skip projects which are on greylist + continue # Skip projects which are on greylist projects[project_name].append(project_stats["rac"]) for project_name, project_racs in projects.items(): average_rac = sum(project_racs) / len(project_racs) @@ -2964,11 +3344,21 @@ def get_project_mag_ratios_from_url( return None try: loaded_json = json.loads(resp.text) + if not loaded_json: + raise Exception + if len(loaded_json) == 0: + raise Exception response = get_project_mag_ratios_from_response( loaded_json, lookback_period, project_resolver_dict ) except Exception as e: log.error("E in get_project_mag_ratios_from_url:{}".format(e)) + if len(PROJECT_MAG_RATIOS_CACHE) > 0: + print_and_log( + "Error communicating with gridcoinstats for magnitude info, using cached data", + "ERROR", + ) + return PROJECT_MAG_RATIOS_CACHE return None else: return response @@ -2983,10 +3373,12 @@ def profitability_check( combined_stats: dict, ) -> bool: """ - Returns True if crunching is profitable right now. False otherwise. + Returns True if crunching is profitable right now. False if otherwise or unable to determine. """ if not grc_sell_price: grc_sell_price = 0.00 + if not isinstance(grc_price,float) and not isinstance(grc_price,int): + return False combined_stats_extract = combined_stats.get(project) if not combined_stats_extract: log.error( @@ -3104,13 +3496,20 @@ def benchmark_check( def actual_save_stats(database: Any, path: str = None) -> None: + """ + Save a JSON database file. Normally saves to given path.txt unless the path is "stats" + in which case it saves to stats.json + """ + if path: + if path == "stats": + path = "stats.json" try: if not path: - with open("stats.json", "w") as fp: + with open(path + ".txt", "w") as fp: json.dump(database, fp, default=json_default) SAVE_STATS_DB["DATABASE"] = DATABASE else: - with open(path + ".txt", "w") as fp: + with open(path, "w") as fp: json.dump(database, fp, default=json_default) SAVE_STATS_DB[path] = database finally: @@ -3118,21 +3517,25 @@ def actual_save_stats(database: Any, path: str = None) -> None: def save_stats(database: Any, path: str = None) -> None: + """ + Caching function to save a database. If the database + has changed, save it, otherwise don't. + """ + if not path: + path = "stats" try: - if not path: - if "DATABASE" in SAVE_STATS_DB: - if database != SAVE_STATS_DB["DATABASE"]: - actual_save_stats(database, path) - else: + if path in SAVE_STATS_DB: + if SAVE_STATS_DB[path] != database: + log.debug("Saving DB {}".format(path)) actual_save_stats(database, path) - else: - if path in SAVE_STATS_DB: - if SAVE_STATS_DB[path] != database: - actual_save_stats(database, path) else: - actual_save_stats(database, path) + log.debug("Skipping save of DB {}".format(path)) + else: + log.debug("Saving DB bc not in SAVE_STATS_DB {}".format(path)) + actual_save_stats(database, path) except Exception as e: log.error("Error saving db {}{}".format(path, e)) + SAVE_STATS_DB[path] = copy.deepcopy(database) def custom_sleep(sleep_time: float, boinc_rpc_client, dev_loop: bool = False): @@ -3150,7 +3553,7 @@ def custom_sleep(sleep_time: float, boinc_rpc_client, dev_loop: bool = False): DATABASE["DEVTIMETOTAL"] += 1 else: DATABASE["FTMTOTAL"] += 1 - # save database every ten minutes or at end of routine + # Save database every ten minutes or at end of routine if str(elapsed).endswith("0") or elapsed + 1 >= sleep_time: save_stats(DATABASE) elapsed += 1 @@ -3201,7 +3604,7 @@ def setup_dev_boinc() -> str: """ Do initial setup of and start dev boinc client. Returns RPC password. Returns 'ERROR' if unable to start BOINC """ - # check if dev BOINC directory exists, create if it doesn't + # Check if dev BOINC directory exists, create if it doesn't dev_path = os.path.abspath("DEVACCOUNT") boinc_executable = "/usr/bin/boinc" if "WINDOWS" in FOUND_PLATFORM.upper(): @@ -3211,7 +3614,7 @@ def setup_dev_boinc() -> str: if not os.path.exists("DEVACCOUNT"): os.mkdir(dev_path) - # update settings to match user settings from main BOINC install + # Update settings to match user settings from main BOINC install global_settings_path = os.path.join(BOINC_DATA_DIR, "global_prefs.xml") override_path = os.path.join(BOINC_DATA_DIR, "global_prefs_override.xml") override_dest_path = os.path.join( @@ -3343,12 +3746,14 @@ def update_table( status: str = None, dev_status: bool = False, dev_loop: bool = False, + clear: bool = True, ): """ Function to update table printed to user. :param status = Most recent status "waiting for xfers, starting crunching on x, etc" """ - # don't update table in dev loop because all our variables reference dev install not main one + # Don't update table in dev loop because all our variables reference + # dev install, not main one if dev_loop or SKIP_TABLE_UPDATES: return if not sleep_reason: @@ -3370,7 +3775,8 @@ def update_table( } ignore_list = ["MAGPERCREDIT"] # generate table to print pretty - os.system("cls" if os.name == "nt" else "clear") # clear terminal + if clear: + os.system("cls" if os.name == "nt" else "clear") # clear terminal table_dict = {} for project_url, stats_dict in COMBINED_STATS.items(): table_dict[project_url] = {} @@ -3400,6 +3806,7 @@ def update_table( sleep_reason=sleep_reason, status=status, dev_status=dev_status, + clear=clear, ) @@ -3413,12 +3820,14 @@ def boinc_loop( :param client_rpc_client client BOINC rpc client, as it must be accessed in dev mode and kept in suspend :param time How long to crunch for. Only used by dev mode at the moment """ - # if we are not passed this variable, it means we are not crunching for dev, so we fallback to global BOINC rpc + # If we are not passed this variable, it means we are not crunching for dev, + # so we fallback to global BOINC rpc if not client_rpc_client: client_rpc_client = rpc_client existing_cpu_mode = None existing_gpu_mode = None - # these variables are referenced outside the loop (or in recursive calls of the loop) so should be made global + # These variables are referenced outside the loop + # (or in recursive calls of the loop) so should be made global global COMBINED_STATS global COMBINED_STATS_DEV global FINAL_PROJECT_WEIGHTS @@ -3445,6 +3854,8 @@ def boinc_loop( CRUNCHING_FOR_DEV = False if mode not in DATABASE: DATABASE[mode] = {} + if DUMP_DATABASE: + save_stats(DATABASE, "DATABASE_DUMP") # Note yoyo@home does not support weak auth so it can't be added here # URLs must be in canonicalized database format @@ -3468,7 +3879,8 @@ def boinc_loop( while True: discrepancy = owed_to_dev() - # If we have done sufficient crunching in dev mode, exit dev loop. Closing dev client is done after exiting loop. + # If we have done sufficient crunching in dev mode, exit dev loop. + # Closing dev client is done after exiting loop. if discrepancy < 1 and not FORCE_DEV_MODE and dev_loop: return None @@ -3479,12 +3891,12 @@ def boinc_loop( authorize_response = loop.run_until_complete(rpc_client.authorize()) temp_project_list, BOINC_PROJECT_NAMES = loop.run_until_complete( get_attached_projects(rpc_client) - ) # we need to re-fetch this as it's different for dev and client + ) # We need to re-fetch this as it's different for dev and client if mode == "DEV": ATTACHED_PROJECT_SET_DEV.update(temp_project_list) else: ATTACHED_PROJECT_SET.update(temp_project_list) - # update ALL_BOINC_PROJECTS if we find any new names + # Update ALL_BOINC_PROJECTS if we find any new names for url, project_name in BOINC_PROJECT_NAMES.items(): if url not in ALL_BOINC_PROJECTS: ALL_BOINC_PROJECTS[url] = project_name @@ -3506,7 +3918,7 @@ def boinc_loop( ) if ( (abs(mag_fetch_delta.days) * 24 * 60) + (abs(mag_fetch_delta.seconds) / 60) - ) > 1442: # only re-check mag once a day: + ) > 1442: # Only re-check mag once a day: if MAG_RATIO_SOURCE == "WALLET": MAG_RATIOS = get_project_mag_ratios(grc_client, LOOKBACK_PERIOD) log.debug( @@ -3520,11 +3932,12 @@ def boinc_loop( if ( (abs(stats_calc_delta.days) * 24 * 60) + (abs(stats_calc_delta.seconds) / 60) - ) > RECALCULATE_STATS_INTERVAL: # only re-calculate stats every x minutes + ) > RECALCULATE_STATS_INTERVAL: # Only re-calculate stats every x minutes log.debug("Calculating stats..") DATABASE["STATSLASTCALCULATED"] = datetime.datetime.now() COMBINED_STATS = config_files_to_stats(BOINC_DATA_DIR) - # total_time = combined_stats_to_total_time(COMBINED_STATS) # Not sure what this line did but commented out, we'll see if anything breaks + # Not sure what this line did but commented out, we'll see if anything breaks + # total_time = combined_stats_to_total_time(COMBINED_STATS) if dev_loop: ( COMBINED_STATS_DEV, @@ -3577,7 +3990,7 @@ def boinc_loop( log.debug( "Highest priority projects are: " + str(highest_priority_projects) ) - # print some pretty stats + # Print some pretty stats update_table(dev_loop=dev_loop) log.info( @@ -3601,7 +4014,8 @@ def boinc_loop( DATABASE["GRCPRICE"] = grc_price else: grc_price = DATABASE["GRCPRICE"] - # Check profitability of all projects, if none profitable (and user doesn't want unprofitable crunching), sleep for 1hr + # Check profitability of all projects, if none profitable + # (and user doesn't want unprofitable crunching), sleep for 1hr if ONLY_BOINC_IF_PROFITABLE and not dev_loop: profitability_list = [] for project in highest_priority_projects: @@ -3640,7 +4054,8 @@ def boinc_loop( sleep(60 * 60) continue - # If we have enabled temperature control, verify that crunching is allowed at current temp + # If we have enabled temperature control, verify that crunching is + # allowed at current temp if ENABLE_TEMP_CONTROL: # Get BOINC's starting CPU and GPU modes existing_mode_info = loop.run_until_complete( @@ -3675,7 +4090,8 @@ def boinc_loop( if not temp_check(): while True: # Keep sleeping until we pass a temp check log.debug("Sleeping due to temperature") - # Put BOINC into sleep mode, automatically reverting if script closes unexpectedly + # Put BOINC into sleep mode, automatically reverting if + # script closes unexpectedly sleep_interval = str(int(((60 * TEMP_SLEEP_TIME) + 60))) loop.run_until_complete( run_rpc_command( @@ -3712,7 +4128,8 @@ def boinc_loop( if dev_boinc_password == "ERROR": log.error("Error setting up crunching to developer account") else: - # setup dev RPC connection, it may take a few tries while we wait for it to come online + # Setup dev RPC connection, it may take a few tries while we + # wait for it to come online tries = 1 tries_max = 5 dev_rpc_client = None @@ -3722,10 +4139,10 @@ def boinc_loop( setup_connection( BOINC_IP, DEV_BOINC_PASSWORD, port=DEV_RPC_PORT ) - ) # setup dev BOINC RPC connection + ) # Setup dev BOINC RPC connection authorize_response = loop.run_until_complete( dev_rpc_client.authorize(DEV_BOINC_PASSWORD) - ) # authorize dev RPC connection + ) # Authorize dev RPC connection if not dev_rpc_client: raise Exception("Error connecting to boinc dev client") except Exception as e: @@ -3739,9 +4156,12 @@ def boinc_loop( if tries > tries_max: log.error("Giving up on connecting to BOINC dev client") if dev_rpc_client: - # Set main BOINC to suspend until we're done crunching in dev mode. It will automatically re-enable itself in 100x the time if nothing is done - # This allows for non-graceful exits of this script to not brick client's BOINC and considerations that dev account may not be crunching full time if client - # is actively using computer. + # Set main BOINC to suspend until we're done crunching in dev mode. + # It will automatically re-enable itself in 100x the time if nothing + # is done. + # This allows for non-graceful exits of this script to not brick + # client's BOINC and considerations that dev account may not be + # crunching full time if client is actively using computer. existing_mode_info = loop.run_until_complete( run_rpc_command(rpc_client, "get_cc_status") ) @@ -3770,7 +4190,7 @@ def boinc_loop( existing_gpu_mode = LAST_KNOWN_GPU_MODE if ( existing_cpu_mode and existing_gpu_mode - ): # we can't do this if we don't know what mode to revert back to + ): # We can't do this if we don't know what mode to revert back to discrepancy = owed_to_dev() timeout = make_discrepancy_timeout(discrepancy) loop.run_until_complete( @@ -3802,12 +4222,12 @@ def boinc_loop( rpc_client=dev_rpc_client, client_rpc_client=rpc_client, time=DATABASE["DEVTIMECOUNTER"], - ) # run the BOINC loop :) + ) # Run the BOINC loop :) loop.run_until_complete(dev_cleanup(dev_rpc_client)) log.debug("dev_cleanup_called it appears boinc_loop ended") update_table(dev_loop=dev_loop) DEV_LOOP_RUNNING = False - # re-enable client BOINC + # Re-enable client BOINC loop.run_until_complete( run_rpc_command(rpc_client, "set_gpu_mode", existing_gpu_mode) ) @@ -3817,12 +4237,12 @@ def boinc_loop( else: log.error("Unable to start dev mode due to unknown last mode") - # loop through each project in order of priority and request new tasks if not backed off - # stopping looping if cache becomes full + # Loop through each project in order of priority and request new tasks if + # not backed off stopping looping if cache becomes full dont_nnt = None if dev_loop: project_loop = DEV_PROJECT_WEIGHTS - # re-up suspend on main client + # Re-up suspend on main client timeout = make_discrepancy_timeout(discrepancy) loop.run_until_complete( run_rpc_command( @@ -3880,7 +4300,8 @@ def boinc_loop( ) ) continue - # If user has set to only mine highest mag project if profitable and it's not profitable or in benchmarking mode, skip + # If user has set to only mine highest mag project if profitable and + # it's not profitable or in benchmarking mode, skip if ( ONLY_MINE_IF_PROFITABLE and not profitability_result @@ -3901,7 +4322,8 @@ def boinc_loop( continue if database_url not in DATABASE[mode]: DATABASE[mode][database_url] = {} - # skip checking project if we have a backoff counter going and it hasn't been long enough + # Skip checking project if we have a backoff counter going and it + # hasn't been long enough last_project_check: datetime.datetime = DATABASE[mode][database_url].get( "LAST_CHECKED", datetime.datetime(1997, 6, 21, 18, 25, 30) ) @@ -3914,7 +4336,7 @@ def boinc_loop( DATABASE[ "TABLE_STATUS" ] = "Skipping {} due to backoff period...".format( - {highest_priority_project} + highest_priority_project ) update_table(dev_loop=dev_loop) log.debug( @@ -3931,18 +4353,20 @@ def boinc_loop( log.info("Waiting for any xfers to complete...") dl_response = wait_till_no_xfers( rpc_client - ) # wait until all network activity has concluded - # if in dev_loop, attach to project if needed + ) # Wait until all network activity has concluded + # If in dev_loop, attach to project if needed if dev_loop: get_project_list = loop.run_until_complete( run_rpc_command(rpc_client, "get_project_status") ) - # on first run, there is no project list + # On first run, there is no project list if isinstance(get_project_list, list): + # Convert to simple list of strings so we can check if + # project URL is in list converted_project_list = project_list_to_project_list( get_project_list - ) # convert to simple list of strings so we can check if project URL is in list + ) else: log.warning( "Dev BOINC shows empty project list, this is normal on first run" @@ -3953,7 +4377,7 @@ def boinc_loop( resolve_url_boinc_rpc(highest_priority_project, dev_mode=dev_loop) not in converted_project_list ): - # yoyo will never be in project dict due to not supporting weak auth + # Yoyo will never be in project dict due to not supporting weak auth converted_dev_project_url = resolve_url_boinc_rpc( highest_priority_project, dev_mode=dev_loop ) @@ -3976,21 +4400,21 @@ def boinc_loop( arg2="authenticator", arg2_val=DEV_PROJECT_DICT[database_url], ) - ) # update project + ) # Update project sleep(60) # give it a chance to finish attaching ( temp_project_list, BOINC_PROJECT_NAMES, ) = loop.run_until_complete( get_attached_projects(rpc_client) - ) # we need to re-fetch this as it's now changed + ) # We need to re-fetch this as it's now changed ATTACHED_PROJECT_SET.update(temp_project_list) boincified_url = resolve_url_boinc_rpc( highest_priority_project, dev_mode=dev_loop - ) # this may have changed, so check + ) # This may have changed, so check if ( len(ATTACHED_PROJECT_SET) == 0 - ): # using this as a proxy for "failed attach" + ): # Using this as a proxy for "failed attach" log.error( "Appears to fail to attach to {}".format(boincified_url) ) @@ -4013,19 +4437,20 @@ def boinc_loop( run_rpc_command( rpc_client, "project_update", "project_url", boincified_url ) - ) # update project + ) # Update project log.debug( "Requesting work from {} added to debug no new tasks bug" + str(boincified_url) ) log.debug("Update response is {}".format(update_response)) - sleep( - 15 - ) # give BOINC time to update w project, I don't know a less hacky way to do this, suggestions are welcome + # Give BOINC time to update w project, I don't know a less hacky way to + # do this, suggestions are welcome + sleep(15) DATABASE[mode][database_url]["LAST_CHECKED"] = datetime.datetime.now() - # check if project should be backed off. If so, back it off. + # Check if project should be backed off. If so, back it off. # This is an exponentially increasing backoff with a maximum time of 1 day - # Projects are backed off if they request it, if they are unresponsive/down, or if no work is available + # Projects are backed off if they request it, if they are + # unresponsive/down, or if no work is available backoff_response = loop.run_until_complete( check_log_entries_for_backoff(rpc_client, project_name=project_name) ) @@ -4041,9 +4466,9 @@ def boinc_loop( log.debug("Waiting for any xfers to complete...") dl_response = wait_till_no_xfers( rpc_client - ) # wait until all network activity has concluded + ) # Wait until all network activity has concluded - # re-NNT all projects + # Re-NNT all projects nnt_response = loop.run_until_complete( nnt_all_projects(rpc_client) ) # NNT all projects @@ -4061,7 +4486,8 @@ def boinc_loop( break # Allow highest priority project to be non-NNTd. - # This enables BOINC to fetch work if it's needed before our sleep period elapses + # This enables BOINC to fetch work if it's needed before our + # sleep period elapses dont_nnt = resolve_url_database(project_loop[0]) allow_this_project = resolve_url_boinc_rpc(dont_nnt, dev_mode=dev_loop) allow_response = loop.run_until_complete( @@ -4069,9 +4495,8 @@ def boinc_loop( rpc_client, "project_allowmorework", "project_url", allow_this_project ) ) - custom_sleep( - 30, rpc_client, dev_loop=dev_loop - ) # There's no reason to loop through all projects more than once every 30 minutes + # There's no reason to loop through all projects more than once every 30 minutes + custom_sleep(30, rpc_client, dev_loop=dev_loop) def print_and_log(msg: str, log_level: str) -> None: @@ -4102,7 +4527,7 @@ def create_default_database() -> Dict[str, Any]: if __name__ == "__main__": - wallet_running = True # switches to false if we have issues connecting + wallet_running = True # Switches to false if we have issues connecting # Verify we are in appropriate python environment python_major = sys.version_info.major @@ -4125,10 +4550,14 @@ def create_default_database() -> Dict[str, Any]: del python_minor del python_major log.debug("Python version {}".format(platform.python_version())) + # These must be declared early in case the user ctrl+Cs the script. + # This way, safe_exit can use these paths + override_path = os.path.join(BOINC_DATA_DIR, "global_prefs_override.xml") + override_dest_path = os.path.join(os.getcwd(), "global_prefs_override_backup.xml") - shutdown_dev_client( - quiet=True - ) # shut down dev client is it's running. This is useful if program shuts down unexpectedly + # Shut down dev client is it's running. This is useful if program shuts + # down unexpectedly + shutdown_dev_client(quiet=True) # Load long-term stats if os.path.exists("stats.json"): @@ -4156,6 +4585,7 @@ def create_default_database() -> Dict[str, Any]: DATABASE = create_default_database() save_stats(DATABASE) else: + log.warning("No stats file found, making new one...") DATABASE = create_default_database() save_stats(DATABASE) @@ -4194,7 +4624,8 @@ def create_default_database() -> Dict[str, Any]: # } # }, # } - # check that directories exist + + # Check that directories exist log.info("Guessing BOINC data dir is " + str(BOINC_DATA_DIR)) if not os.path.isdir(BOINC_DATA_DIR): print_and_log( @@ -4211,8 +4642,6 @@ def create_default_database() -> Dict[str, Any]: ) input("Press enter to continue or CTRL+C to quit") wallet_running = False - override_path = os.path.join(BOINC_DATA_DIR, "global_prefs_override.xml") - override_dest_path = os.path.join(os.getcwd(), "global_prefs_override_backup.xml") try: os.access(override_path, os.W_OK) @@ -4230,7 +4659,7 @@ def create_default_database() -> Dict[str, Any]: if not SCRIPTED_RUN: input("Press enter to continue") - # auto-detect password for BOINC RPC if it exists and user didn't know + # Auto-detect password for BOINC RPC if it exists and user didn't know # BOINC on Windows automatically generates an RPC password auth_location = os.path.join(BOINC_DATA_DIR, "gui_rpc_auth.cfg") if not BOINC_PASSWORD: @@ -4364,16 +4793,18 @@ def create_default_database() -> Dict[str, Any]: try: rpc_client = loop.run_until_complete( setup_connection(BOINC_IP, BOINC_PASSWORD, BOINC_PORT) - ) # setup BOINC RPC connection + ) # Setup BOINC RPC connection except Exception as e: print_and_log("Error: Unable to connect to BOINC client, quitting now", "ERROR") quit() if not rpc_client: print_and_log("Error: Unable to connect to BOINC client, quitting now", "ERROR") quit() + # Get project list from BOINC client directly. This is needed for + # correct capitalization temp_project_set, temp_project_names = loop.run_until_complete( get_attached_projects(rpc_client) - ) # get project list from BOINC client directly. This is needed for correct capitalization + ) if not temp_project_set or not temp_project_names: print_and_log( "Error connecting to BOINC client, unable to get project list.", "ERROR" @@ -4390,14 +4821,12 @@ def create_default_database() -> Dict[str, Any]: # Get project list from Gridcoin wallet and/or gridcoinstats, check sidestakes foundation_address = "bc3NA8e8E3EoTL1qhRmeprbjWcmuoZ26A2" developer_address = "RzUgcntbFm8PeSJpauk6a44qbtu92dpw3K" - MAG_RATIOS = ( - {} - ) # added to prevent pycharm "may be undefined". Can't be though because the app quits if it can't be found try: grc_client = GridcoinClientConnection( rpc_user=rpc_user, rpc_port=rpc_port, rpc_password=gridcoin_rpc_password ) - source_urls = grc_client.get_approved_project_urls() # this is just to test if the client is connectable + # Test if the client is connectable + source_urls = grc_client.get_approved_project_urls() wait_till_synced(grc_client) source_urls = grc_client.get_approved_project_urls() log.debug("Got source_urls from wallet: {}".format(source_urls)) @@ -4481,13 +4910,13 @@ def create_default_database() -> Dict[str, Any]: mag_ratios=MAG_RATIOS, ) log.debug("Printing pretty stats...") - # calculate starting efficiency stats + # Calculate starting efficiency stats if "STARTMAGHR" not in DATABASE: DATABASE["STARTMAGHR"] = get_avg_mag_hr(COMBINED_STATS) else: original_avg_mag_hr = DATABASE["STARTMAGHR"] current_avg_mag_hr = get_avg_mag_hr(COMBINED_STATS) - # generate table to print pretty + # Generate table to print pretty table_dict = {} for project_url, stats_dict in COMBINED_STATS.items(): table_dict[project_url] = {} @@ -4500,7 +4929,7 @@ def create_default_database() -> Dict[str, Any]: if len(table_dict) > 0: print("SOME PRETTY STATS JUST FOR YOU, SORTED BY AVG GRC/DAY") priority_results = {} - update_table() + update_table(clear=False) del priority_results # this is only created temporarily as update_table expects it else: print( @@ -4601,10 +5030,11 @@ def create_default_database() -> Dict[str, Any]: priority_results = {} highest_priority_project = "" highest_priority_projects = [] - DATABASE["STATSLASTCALCULATED"] = datetime.datetime( - 1997, 3, 3 - ) # force calculation of stats at first run since they are not cached in DB - # While we don't have enough tasks, continue cycling through project list and updating. If we have cycled through all projects, get_highest_priority_project will stall to prevent requesting too often + # Force calculation of stats at first run since they are not cached in DB + DATABASE["STATSLASTCALCULATED"] = datetime.datetime(1997, 3, 3) + # While we don't have enough tasks, continue cycling through project list and + # updating. If we have cycled through all projects, get_highest_priority_project + # will stall to prevent requesting too often boinc_loop(False, rpc_client) # Restore user prefs safe_exit(None, None) diff --git a/updates.txt b/updates.txt index 4a67a4a..3b86804 100644 --- a/updates.txt +++ b/updates.txt @@ -5,4 +5,5 @@ 2.1,0,Update is strongly suggested fixes several major bugs in project handling 2.2,1,FindTheMag critical security update please see Github for more info 2.3,0,Various usability improvements and crash fixes -3.0,0,Massive improvements in stability and new config file format \ No newline at end of file +3.0,0,Massive improvements in stability and new config file format +3.1,0,Stability improvements to reduce crashes \ No newline at end of file