From 188928be6f4437115d12c2af0aadcf78fc3f3c0d Mon Sep 17 00:00:00 2001
From: HeyMerlin <26638413+HeyMerlin@users.noreply.github.com>
Date: Tue, 16 May 2023 21:42:34 -0700
Subject: [PATCH 01/23] Add .git-blame-ignore-revs to .gitignore.
.git-blame-ignore-revs is used to filter out Black style modifications from
git blame
---
.gitignore | 1 +
1 file changed, 1 insertion(+)
diff --git a/.gitignore b/.gitignore
index 2859cff..f914ab1 100644
--- a/.gitignore
+++ b/.gitignore
@@ -6,4 +6,5 @@
/global_prefs_override_backup.xml
/.idea/
/venv/
+/.git-blame-ignore-revs
scratch.py
From 66db6c3b55e24122e3a374235e47af852c5807dc Mon Sep 17 00:00:00 2001
From: HeyMerlin <26638413+HeyMerlin@users.noreply.github.com>
Date: Tue, 16 May 2023 21:57:54 -0700
Subject: [PATCH 02/23] Initial Black style application to main.py
---
main.py | 3937 +++++++++++++++++++++++++++++++++++--------------------
1 file changed, 2541 insertions(+), 1396 deletions(-)
diff --git a/main.py b/main.py
index 0c8a3b8..beae69c 100644
--- a/main.py
+++ b/main.py
@@ -1,6 +1,6 @@
# DO NOT EDIT THIS FILE, EDIT CONFIG.PY INSTEAD
from itertools import chain
-from math import floor,ceil
+from math import floor, ceil
import copy
import shlex
import shutil
@@ -24,188 +24,227 @@
import requests
from requests.auth import HTTPBasicAuth
from typing import List, Union, Dict, Tuple, Any
-import sys,signal
+import sys, signal
# ignore deprecation warnings in Windows
import warnings
-warnings.filterwarnings('ignore',category=DeprecationWarning)
+
+warnings.filterwarnings("ignore", category=DeprecationWarning)
# Set default settings for all vars
-preferred_projects_percent:float=80
-preferred_projects:Dict[str, int]={}
-ignored_projects:List[str] = ['https://foldingathome.div72.xyz/']
-boinc_data_dir:Union[str,None]=None
-gridcoin_data_dir:Union[str,None]=None
-control_boinc:bool=False
-boinc_ip:str='127.0.0.1'
-boinc_port:int=31416
-boinc_username:Union[str,None]=None
-boinc_password:Union[str,None]=None
-min_recheck_time:int=30 # minimum time in minutes before re-asking a project for work who previously said they were out
-abort_unstarted_tasks:bool=False
-recalculate_stats_interval:int=60
-price_check_interval:int=720
-local_kwh:float=0.1542
-grc_sell_price:Union[float,None]=None
-exchange_fee:float=0.00
-only_BOINC_if_profitable:bool=False
-only_mine_if_profitable:bool=False
-host_power_usage:float=70
-min_profit_per_hour:float=0
-benchmarking_minimum_wus:float=5
-benchmarking_minimum_time:float=10
-benchmarking_delay_in_days:float=160
-skip_benchmarking:bool=False
-dev_fee:float=.05
-VERSION=2.2
-DEV_RPC_PORT=31418
-log_level='WARNING'
-start_temp:int=65
-stop_temp:int=75
-temp_command=None
-enable_temp_control=True # Enable controlling BOINC based on temp. Default: False
-temp_sleep_time=10
-temp_regex=r'\d*'
-max_logfile_size_in_mb=10
-rolling_weight_window=60
-lookback_period=30
+preferred_projects_percent: float = 80
+preferred_projects: Dict[str, int] = {}
+ignored_projects: List[str] = ["https://foldingathome.div72.xyz/"]
+boinc_data_dir: Union[str, None] = None
+gridcoin_data_dir: Union[str, None] = None
+control_boinc: bool = False
+boinc_ip: str = "127.0.0.1"
+boinc_port: int = 31416
+boinc_username: Union[str, None] = None
+boinc_password: Union[str, None] = None
+min_recheck_time: int = 30 # minimum time in minutes before re-asking a project for work who previously said they were out
+abort_unstarted_tasks: bool = False
+recalculate_stats_interval: int = 60
+price_check_interval: int = 720
+local_kwh: float = 0.1542
+grc_sell_price: Union[float, None] = None
+exchange_fee: float = 0.00
+only_BOINC_if_profitable: bool = False
+only_mine_if_profitable: bool = False
+host_power_usage: float = 70
+min_profit_per_hour: float = 0
+benchmarking_minimum_wus: float = 5
+benchmarking_minimum_time: float = 10
+benchmarking_delay_in_days: float = 160
+skip_benchmarking: bool = False
+dev_fee: float = 0.05
+VERSION = 2.2
+DEV_RPC_PORT = 31418
+log_level = "WARNING"
+start_temp: int = 65
+stop_temp: int = 75
+temp_command = None
+enable_temp_control = True # Enable controlling BOINC based on temp. Default: False
+temp_sleep_time = 10
+temp_regex = r"\d*"
+max_logfile_size_in_mb = 10
+rolling_weight_window = 60
+lookback_period = 30
# Some globals we need. I try to have all globals be ALL CAPS
-FORCE_DEV_MODE=False # used for debugging purposes to force crunching under dev account
-BOINC_PROJECT_NAMES={}
-DATABASE={}
-DATABASE['TABLE_SLEEP_REASON']= '' # sleep reason printed in table, must be reset at script start
-DATABASE['TABLE_STATUS']='' # info status printed in table, must be reset at script start
-SCRIPTED_RUN:bool=False
-SKIP_TABLE_UPDATES:bool=False
-HOST_COST_PER_HOUR = ( host_power_usage / 1000 ) * local_kwh
+FORCE_DEV_MODE = (
+ False # used for debugging purposes to force crunching under dev account
+)
+BOINC_PROJECT_NAMES = {}
+DATABASE = {}
+DATABASE[
+ "TABLE_SLEEP_REASON"
+] = "" # sleep reason printed in table, must be reset at script start
+DATABASE[
+ "TABLE_STATUS"
+] = "" # info status printed in table, must be reset at script start
+SCRIPTED_RUN: bool = False
+SKIP_TABLE_UPDATES: bool = False
+HOST_COST_PER_HOUR = (host_power_usage / 1000) * local_kwh
# Translates BOINC's CPU and GPU Mode replies into English. Note difference between keys integer vs string.
-CPU_MODE_DICT = {
- 1: 'always',
- 2: 'auto',
- 3: 'never'
-}
-GPU_MODE_DICT = {
- '1': 'always',
- '2': 'auto',
- '3': 'never'
-}
-DEV_BOINC_PASSWORD='' # this is only used for printing to table, not used elsewhere
-DEV_LOOP_RUNNING=False
+CPU_MODE_DICT = {1: "always", 2: "auto", 3: "never"}
+GPU_MODE_DICT = {"1": "always", "2": "auto", "3": "never"}
+DEV_BOINC_PASSWORD = "" # this is only used for printing to table, not used elsewhere
+DEV_LOOP_RUNNING = False
# import user settings from config
try:
from config import *
except Exception as e:
- print('Error opening config.py, using defaults! Error is: {}'.format(e))
+ print("Error opening config.py, using defaults! Error is: {}".format(e))
# if user has no preferred projects, their % of crunching should be 0
-if len(preferred_projects)==0:
- preferred_projects_percent:float=0
+if len(preferred_projects) == 0:
+ preferred_projects_percent: float = 0
# setup logging
log = logging.getLogger()
-if log_level=='NONE':
+if log_level == "NONE":
log.addHandler(logging.NullHandler())
else:
- handler = logging.handlers.RotatingFileHandler(os.environ.get("LOGFILE", "debug.log"),
- maxBytes=max_logfile_size_in_mb * 1024 * 1024, backupCount=1)
+ handler = logging.handlers.RotatingFileHandler(
+ os.environ.get("LOGFILE", "debug.log"),
+ maxBytes=max_logfile_size_in_mb * 1024 * 1024,
+ backupCount=1,
+ )
log.setLevel(os.environ.get("LOGLEVEL", log_level))
formatter = logging.Formatter(logging.BASIC_FORMAT)
handler.setFormatter(formatter)
log.addHandler(handler)
+
class GridcoinClientConnection:
"""
A class for connecting to a Gridcoin wallet and issuing RPC commands. Currently quite barebones.
"""
- def __init__(self, config_file:str=None, ip_address:str='127.0.0.1', rpc_port:str='9876', rpc_user:str=None, rpc_password:str=None,):
- self.configfile=config_file #absolute path to the client config file
- self.ipaddress=ip_address
- self.rpc_port=rpc_port
- self.rpcuser=rpc_user
- self.rpcpassword=rpc_password
- def run_command(self,command:str,arguments:List[Union[str,bool]]=None)->dict:
+
+ def __init__(
+ self,
+ config_file: str = None,
+ ip_address: str = "127.0.0.1",
+ rpc_port: str = "9876",
+ rpc_user: str = None,
+ rpc_password: str = None,
+ ):
+ self.configfile = config_file # absolute path to the client config file
+ self.ipaddress = ip_address
+ self.rpc_port = rpc_port
+ self.rpcuser = rpc_user
+ self.rpcpassword = rpc_password
+
+ def run_command(
+ self, command: str, arguments: List[Union[str, bool]] = None
+ ) -> dict:
if not arguments:
- arguments=[]
- credentials=None
- url='http://' + self.ipaddress +':' + self.rpc_port + '/'
- headers = {'content-type': 'application/json'}
+ arguments = []
+ credentials = None
+ url = "http://" + self.ipaddress + ":" + self.rpc_port + "/"
+ headers = {"content-type": "application/json"}
payload = {
"method": command,
"params": arguments,
"jsonrpc": "2.0",
"id": 0,
}
- jsonpayload=json.dumps(payload,default=json_default)
+ jsonpayload = json.dumps(payload, default=json_default)
if self.rpcuser or self.rpcpassword:
- credentials=HTTPBasicAuth(self.rpcuser, self.rpcpassword)
+ credentials = HTTPBasicAuth(self.rpcuser, self.rpcpassword)
response = requests.post(
- url, data=jsonpayload, headers=headers, auth=credentials)
+ url, data=jsonpayload, headers=headers, auth=credentials
+ )
return response.json()
- def get_approved_project_urls(self)->List[str]:
+
+ def get_approved_project_urls(self) -> List[str]:
"""
:return: A list of UPPERCASED project URLs using gridcoin command listprojects
"""
- return_list=[]
- all_projects=self.run_command('listprojects')
- for projectname,project in all_projects['result'].items():
- return_list.append(project['base_url'].upper())
+ return_list = []
+ all_projects = self.run_command("listprojects")
+ for projectname, project in all_projects["result"].items():
+ return_list.append(project["base_url"].upper())
return return_list
- def project_name_to_url(self,searchname:str)->Union[str,None]:
+
+ def project_name_to_url(self, searchname: str) -> Union[str, None]:
"""
Convert a project name into its project url, then UPPERCASE it
"""
- all_projects = self.run_command('listprojects')
- for found_project_name, project_dict in all_projects['result'].items():
- if found_project_name.upper()==searchname.upper():
- return project_dict['base_url'].upper()
+ all_projects = self.run_command("listprojects")
+ for found_project_name, project_dict in all_projects["result"].items():
+ if found_project_name.upper() == searchname.upper():
+ return project_dict["base_url"].upper()
return None
+
+
class BoincClientConnection:
"""
A simple class for grepping BOINC config files etc. Doesn't do any RPC communication
"""
- def __init__(self, config_dir:str=None, ip_address:str='127.0.0.1', port:str='9876', rpc_user:str=boinc_username, rpc_password:str=None):
+
+ def __init__(
+ self,
+ config_dir: str = None,
+ ip_address: str = "127.0.0.1",
+ port: str = "9876",
+ rpc_user: str = boinc_username,
+ rpc_password: str = None,
+ ):
if config_dir is None:
- self.config_dir='/var/lib/boinc-client'
+ self.config_dir = "/var/lib/boinc-client"
else:
- self.config_dir=config_dir # absolute path to the client config dir
- self.ip_address=ip_address
- self.port=port
- self.rpc_user=rpc_user
- self.rpc_password=rpc_password
- def get_project_list(self)->List[str]:
+ self.config_dir = config_dir # absolute path to the client config dir
+ self.ip_address = ip_address
+ self.port = port
+ self.rpc_user = rpc_user
+ self.rpc_password = rpc_password
+
+ def get_project_list(self) -> List[str]:
"""
:return: UPPERCASED list of project URLs. This is all of them, not just ones which are attached
"""
- project_list_file=os.path.join(self.config_dir,'all_projects_list.xml')
- return_list=[]
- with open(project_list_file, mode='r', encoding='ASCII', errors='ignore') as f:
+ project_list_file = os.path.join(self.config_dir, "all_projects_list.xml")
+ return_list = []
+ with open(project_list_file, mode="r", encoding="ASCII", errors="ignore") as f:
parsed = xmltodict.parse(f.read())
- for project in parsed['projects']['project']:
- return_list.append(project['url'].upper())
+ for project in parsed["projects"]["project"]:
+ return_list.append(project["url"].upper())
return return_list
-def shutdown_dev_client(quiet:bool=False)->None:
+
+
+def shutdown_dev_client(quiet: bool = False) -> None:
exit_loop = asyncio.get_event_loop()
- log.info('Attempting to shut down dev client at safe_exit...')
+ log.info("Attempting to shut down dev client at safe_exit...")
try:
dev_rpc_client = exit_loop.run_until_complete(
- setup_connection(boinc_ip, boinc_password, port=DEV_RPC_PORT)) # setup dev BOINC RPC connection
- authorize_response = exit_loop.run_until_complete(dev_rpc_client.authorize()) # authorize dev RPC connection
- shutdown_response = exit_loop.run_until_complete(run_rpc_command(dev_rpc_client, 'quit'))
+ setup_connection(boinc_ip, boinc_password, port=DEV_RPC_PORT)
+ ) # setup dev BOINC RPC connection
+ authorize_response = exit_loop.run_until_complete(
+ dev_rpc_client.authorize()
+ ) # authorize dev RPC connection
+ shutdown_response = exit_loop.run_until_complete(
+ run_rpc_command(dev_rpc_client, "quit")
+ )
except Exception as e:
- log.error('Error shutting down dev client {}'.format(e))
-def safe_exit(arg1,arg2)->None:
+ log.error("Error shutting down dev client {}".format(e))
+
+
+def safe_exit(arg1, arg2) -> None:
"""
Function to safely exit tool by saving database, restoring original user preferences, and quitting dev BOINC client.
arg1/2 required by the signal handler library, but aren't used for anything inside this function
"""
- new_loop = asyncio.get_event_loop() # this is needed in case this function is called while main loop is still waiting for an RPC command etc
- print_and_log("Program exiting gracefully",'INFO')
+ new_loop = (
+ asyncio.get_event_loop()
+ ) # this is needed in case this function is called while main loop is still waiting for an RPC command etc
+ print_and_log("Program exiting gracefully", "INFO")
# Backup most recent database save then save database to json file
- log.debug('Saving database')
- shutil.copy('stats.json','stats.json.backup')
+ log.debug("Saving database")
+ shutil.copy("stats.json", "stats.json.backup")
save_stats(DATABASE)
# If BOINC control is not enabled, we can skip the rest of these steps
@@ -214,209 +253,262 @@ def safe_exit(arg1,arg2)->None:
# Restore original BOINC preferences
if os.path.exists(override_dest_path):
- print('Restoring original preferences...')
- log.debug('Restoring original preferences...')
+ print("Restoring original preferences...")
+ log.debug("Restoring original preferences...")
try:
- shutil.copy(override_dest_path,override_path)
+ shutil.copy(override_dest_path, override_path)
except PermissionError as e:
- print('Permission error restoring original BOINC preferences {}'.format(e))
- log.error('Permission error restoring original BOINC preferences {}'.format(e))
- print('Be sure you have permission to edit this file')
- print("Linux users try 'sudo usermod -aG boinc your_username_here' to fix this error".format(override_path))
- print('Note that you will need to restart your machine for these changes to take effect')
+ print("Permission error restoring original BOINC preferences {}".format(e))
+ log.error(
+ "Permission error restoring original BOINC preferences {}".format(e)
+ )
+ print("Be sure you have permission to edit this file")
+ print(
+ "Linux users try 'sudo usermod -aG boinc your_username_here' to fix this error".format(
+ override_path
+ )
+ )
+ print(
+ "Note that you will need to restart your machine for these changes to take effect"
+ )
except Exception as e:
- print('Error restoring original BOINC preferences {}'.format(e))
- log.error('Error restoring original BOINC preferences {}'.format(e))
- print('Be sure you have permission to edit this file')
+ print("Error restoring original BOINC preferences {}".format(e))
+ log.error("Error restoring original BOINC preferences {}".format(e))
+ print("Be sure you have permission to edit this file")
+ print(
+ "Linux users try 'sudo usermod -aG boinc your_username_here' to fix this error".format(
+ override_path
+ )
+ )
print(
- "Linux users try 'sudo usermod -aG boinc your_username_here' to fix this error".format(override_path))
- print('Note that you will need to restart your machine for these changes to take effect')
+ "Note that you will need to restart your machine for these changes to take effect"
+ )
else:
os.remove(override_dest_path)
# Shutdown developer BOINC client, if running
shutdown_dev_client()
quit()
-async def get_task_list(rpc_client:libs.pyboinc.rpc_client)->list:
+
+
+async def get_task_list(rpc_client: libs.pyboinc.rpc_client) -> list:
"""
Return list of tasks from BOINC client which are not completed/failed. These
can be active tasks, tasks waiting to be started, or paused tasks.
"""
# Known task states
# 2: Active
- return_value=[]
- reply = await run_rpc_command(rpc_client,'get_results')
- if isinstance(reply,str):
- log.info('BOINC appears to have no tasks...')
+ return_value = []
+ reply = await run_rpc_command(rpc_client, "get_results")
+ if isinstance(reply, str):
+ log.info("BOINC appears to have no tasks...")
return return_value
for task in reply:
- if task['state'] in [2]:
+ if task["state"] in [2]:
return_value.append(task)
else:
- log.warning('Warning: Found unknown task state {}: {}'.format(task['state'],task))
+ log.warning(
+ "Warning: Found unknown task state {}: {}".format(task["state"], task)
+ )
return return_value
-async def is_boinc_crunching(rpc_client:libs.pyboinc.rpc_client)->bool:
+
+
+async def is_boinc_crunching(rpc_client: libs.pyboinc.rpc_client) -> bool:
"""
Returns True is boinc is crunching, false otherwise
"""
- reply = await run_rpc_command(rpc_client, 'get_cc_status')
- task_suspend_reason=int(reply['task_suspend_reason'])
- if task_suspend_reason !=0:
+ reply = await run_rpc_command(rpc_client, "get_cc_status")
+ task_suspend_reason = int(reply["task_suspend_reason"])
+ if task_suspend_reason != 0:
# These are documented at https://github.com/BOINC/boinc/blob/73a7754e7fd1ae3b7bf337e8dd42a7a0b42cf3d2/android/BOINC/app/src/main/java/edu/berkeley/boinc/utils/BOINCDefs.kt
- log.debug('Determined BOINC client is not crunching task_suspend_reason: {}'.format(task_suspend_reason))
+ log.debug(
+ "Determined BOINC client is not crunching task_suspend_reason: {}".format(
+ task_suspend_reason
+ )
+ )
return False
- if task_suspend_reason==0:
- log.debug('Determined BOINC client is crunching task_suspend_reason: {}'.format(task_suspend_reason))
+ if task_suspend_reason == 0:
+ log.debug(
+ "Determined BOINC client is crunching task_suspend_reason: {}".format(
+ task_suspend_reason
+ )
+ )
return True
- log.warning('Unable to determine if BOINC is crunching or not, assuming not.')
+ log.warning("Unable to determine if BOINC is crunching or not, assuming not.")
return False
-async def setup_connection(boinc_ip:str=boinc_ip,boinc_password:str=boinc_password,port:int=31416)->libs.pyboinc.rpc_client:
+
+
+async def setup_connection(
+ boinc_ip: str = boinc_ip, boinc_password: str = boinc_password, port: int = 31416
+) -> libs.pyboinc.rpc_client:
"""
Sets up a BOINC RPC client connection
"""
rpc_client = await init_rpc_client(boinc_ip, boinc_password, port=port)
return rpc_client
-def temp_check()->bool:
+
+
+def temp_check() -> bool:
"""
Returns True if we should keep crunching based on temperature, False otherwise
"""
if not enable_temp_control:
return True
- text=''
+ text = ""
if temp_url:
import requests as req
+
try:
- text=req.get(temp_url).text
+ text = req.get(temp_url).text
except Exception as e:
- print('Error checking temp: {}'.format(e))
- log.error('Error checking temp: {}'.format(e))
+ print("Error checking temp: {}".format(e))
+ log.error("Error checking temp: {}".format(e))
return True
elif temp_command:
- command=shlex.split(temp_command)
+ command = shlex.split(temp_command)
try:
- text=subprocess.check_output(command)
+ text = subprocess.check_output(command)
except Exception as e:
- print('Error checking temp: {}'.format(e))
- log.error('Error checking temp: {}'.format(e))
+ print("Error checking temp: {}".format(e))
+ log.error("Error checking temp: {}".format(e))
return True
- command_output=config.temp_function()
- match=None
+ command_output = config.temp_function()
+ match = None
if command_output:
- text=str(command_output)
- pattern=re.compile(temp_regex)
+ text = str(command_output)
+ pattern = re.compile(temp_regex)
match = re.search(pattern, text)
if match:
- found_temp=int(match.group(0))
- log.debug('Found temp {}'.format(found_temp))
+ found_temp = int(match.group(0))
+ log.debug("Found temp {}".format(found_temp))
if found_temp > stop_temp or found_temp < start_temp:
return False
else:
- print('No temps found!')
- log.error('No temps found!')
+ print("No temps found!")
+ log.error("No temps found!")
return True
return True
-
-def update_check()->None:
+def update_check() -> None:
"""
Check for updates to the FindTheMag tool
"""
# If we've checked for updates in the last week, ignore
- delta=datetime.datetime.now()-DATABASE.get('LASTUPDATECHECK',datetime.datetime(1997,3,3))
- if abs(delta.days)<7:
+ delta = datetime.datetime.now() - DATABASE.get(
+ "LASTUPDATECHECK", datetime.datetime(1997, 3, 3)
+ )
+ if abs(delta.days) < 7:
return
import requests as req
- headers=req.utils.default_headers()
- headers.update( {
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.0.0 Safari/537.36',
- })
+
+ headers = req.utils.default_headers()
+ headers.update(
+ {
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.0.0 Safari/537.36",
+ }
+ )
# Get update status from Github
- url='https://raw.githubusercontent.com/makeasnek/FindTheMag2/main/updates.txt'
+ url = "https://raw.githubusercontent.com/makeasnek/FindTheMag2/main/updates.txt"
try:
- resp = req.get(url,headers=headers).text
+ resp = req.get(url, headers=headers).text
except Exception as e:
- DATABASE['TABLE_STATUS']='Error checking for updates {}'.format(e)
- log.error('Error checking for updates {}'.format(e))
+ DATABASE["TABLE_STATUS"] = "Error checking for updates {}".format(e)
+ log.error("Error checking for updates {}".format(e))
return
- if 'UPDATE FILE FOR FINDTHEMAG DO NOT DELETE THIS LINE' not in resp:
- DATABASE['TABLE_STATUS']='Error checking for updates invalid update file'
- log.error('Error checking for updates invalid update file')
+ if "UPDATE FILE FOR FINDTHEMAG DO NOT DELETE THIS LINE" not in resp:
+ DATABASE["TABLE_STATUS"] = "Error checking for updates invalid update file"
+ log.error("Error checking for updates invalid update file")
return None
for line in resp.splitlines():
- if line.startswith('#'):
+ if line.startswith("#"):
continue
- if line=='':
+ if line == "":
continue
- if ',' not in line:
+ if "," not in line:
continue
- split=line.split(',')
- version=float(split[0])
- if split[1]=='1':
- security=True
+ split = line.split(",")
+ version = float(split[0])
+ if split[1] == "1":
+ security = True
else:
- security=False
- notes=split[2]
- if version>VERSION:
+ security = False
+ notes = split[2]
+ if version > VERSION:
if security:
- security_text='This is an important security update.'
+ security_text = "This is an important security update."
else:
- security_text=''
- print('There is an updated version of this tool available ({}). {} Major changes include: {} '.format(version,security_text,notes))
+ security_text = ""
+ print(
+ "There is an updated version of this tool available ({}). {} Major changes include: {} ".format(
+ version, security_text, notes
+ )
+ )
log.info(
- 'There is an updated version of this tool available ({}). {} Major changes include: {} '.format(version,
- security_text,
- notes))
- DATABASE['LASTUPDATECHECK']=datetime.datetime.now()
-def get_grc_price()->float:
+ "There is an updated version of this tool available ({}). {} Major changes include: {} ".format(
+ version, security_text, notes
+ )
+ )
+ DATABASE["LASTUPDATECHECK"] = datetime.datetime.now()
+
+
+def get_grc_price() -> float:
"""
Gets average GRC price from three online sources.
"""
import requests as req
- found_prices=[]
- headers=req.utils.default_headers()
- headers.update( {
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.0.0 Safari/537.36',
- })
+
+ found_prices = []
+ headers = req.utils.default_headers()
+ headers.update(
+ {
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.0.0 Safari/537.36",
+ }
+ )
# Get price from coinmarketcap
- url='https://coinmarketcap.com/currencies/gridcoin/'
- regex=re.compile('(
\$)(\d*\.\d*)()')
- resp=''
+ url = "https://coinmarketcap.com/currencies/gridcoin/"
+ regex = re.compile('(
\$)(\d*\.\d*)()')
+ resp = ""
try:
- resp = req.get(url,headers=headers).text
+ resp = req.get(url, headers=headers).text
except Exception as e:
pass
- regex_result=re.search(regex, resp)
+ regex_result = re.search(regex, resp)
if regex_result:
answer = float(regex_result.group(2))
- log.info('Found GRC price of {} from coinmarketcap'.format(answer))
+ log.info("Found GRC price of {} from coinmarketcap".format(answer))
found_prices.append(answer)
else:
- DATABASE['TABLE_STATUS']='Error getting info from coinmarketcap'
- print('Error getting info from coinmarketcap')
+ DATABASE["TABLE_STATUS"] = "Error getting info from coinmarketcap"
+ print("Error getting info from coinmarketcap")
# Get price from Yahoo
- url = 'https://finance.yahoo.com/quote/GRC-USD/'
- regex = re.compile('(data-field="regularMarketPrice" data-trend="none" data-pricehint="\d" value=")(\d*\.\d*)')
- resp=''
+ url = "https://finance.yahoo.com/quote/GRC-USD/"
+ regex = re.compile(
+ '(data-field="regularMarketPrice" data-trend="none" data-pricehint="\d" value=")(\d*\.\d*)'
+ )
+ resp = ""
try:
- resp = req.get(url,headers=headers).text
+ resp = req.get(url, headers=headers).text
except Exception as e:
pass
regex_result = re.search(regex, resp)
if regex_result:
- answer=float(regex_result.group(2))
- log.info('Found GRC price of {} from Yahoo'.format(answer))
+ answer = float(regex_result.group(2))
+ log.info("Found GRC price of {} from Yahoo".format(answer))
found_prices.append(answer)
else:
- DATABASE['TABLE_STATUS']='Error getting info from Yahoo'
- print('Error getting info from Yahoo')
+ DATABASE["TABLE_STATUS"] = "Error getting info from Yahoo"
+ print("Error getting info from Yahoo")
# Get price from coingecko
- url = 'https://www.coingecko.com/en/coins/gridcoin-research'
- regex = re.compile('(data-coin-symbol="grc" data-target="price.price">\$)(\d*.\d*)',flags=re.MULTILINE|re.IGNORECASE)
- resp = ''
+ url = "https://www.coingecko.com/en/coins/gridcoin-research"
+ regex = re.compile(
+ '(data-coin-symbol="grc" data-target="price.price">\$)(\d*.\d*)',
+ flags=re.MULTILINE | re.IGNORECASE,
+ )
+ resp = ""
try:
resp = req.get(url, headers=headers).text
except Exception as e:
@@ -424,160 +516,203 @@ def get_grc_price()->float:
regex_result = re.search(regex, resp)
if regex_result:
answer = float(regex_result.group(2))
- log.info('Found GRC price of {} from coingecko'.format(answer))
+ log.info("Found GRC price of {} from coingecko".format(answer))
found_prices.append(answer)
else:
- DATABASE['TABLE_STATUS']='Error getting info from coingecko'
- log.error('Error getting info from coingecko')
+ DATABASE["TABLE_STATUS"] = "Error getting info from coingecko"
+ log.error("Error getting info from coingecko")
# Return average of all found prices
- if len(found_prices)>0:
- DATABASE['TABLE_STATUS'] = 'Found GRC price {}'.format(sum(found_prices)/len(found_prices))
- return(sum(found_prices)/len(found_prices))
+ if len(found_prices) > 0:
+ DATABASE["TABLE_STATUS"] = "Found GRC price {}".format(
+ sum(found_prices) / len(found_prices)
+ )
+ return sum(found_prices) / len(found_prices)
else:
- DATABASE['TABLE_STATUS'] = 'Unable to find GRC price'
+ DATABASE["TABLE_STATUS"] = "Unable to find GRC price"
return 0
-def get_approved_project_urls_web()->Tuple[List[str],Dict[str,str]]:
+
+
+def get_approved_project_urls_web() -> Tuple[List[str], Dict[str, str]]:
"""
Gets current whitelist from Gridcoinstats
"""
# Return cached version if we have it and requested it < 24 hrs ago
- delta=datetime.datetime.now()-DATABASE.get('LASTGRIDCOINSTATSPROJECTCHECK',datetime.datetime(1993,3,3))
- if abs(delta.days)<1 and 'GSPROJECTLIST' in DATABASE and 'GSRESOLVERDICT' in DATABASE:
- log.debug('Returning cached version of gridcoinstats data')
- return DATABASE['GSPROJECTLIST'],DATABASE['GSRESOLVERDICT']
+ delta = datetime.datetime.now() - DATABASE.get(
+ "LASTGRIDCOINSTATSPROJECTCHECK", datetime.datetime(1993, 3, 3)
+ )
+ if (
+ abs(delta.days) < 1
+ and "GSPROJECTLIST" in DATABASE
+ and "GSRESOLVERDICT" in DATABASE
+ ):
+ log.debug("Returning cached version of gridcoinstats data")
+ return DATABASE["GSPROJECTLIST"], DATABASE["GSRESOLVERDICT"]
# Otherwise, request it
import json
- url='https://www.gridcoinstats.eu/API/simpleQuery.php?q=listprojects'
+
+ url = "https://www.gridcoinstats.eu/API/simpleQuery.php?q=listprojects"
import requests as req
+
resp = req.get(url)
- if 'BOINC' not in resp.text.upper():
- print('Error fetching magnitude stats from {}'.format(url))
- log.error('Error fetching magnitude stats from {}'.format(url))
- if 'GSPROJECTLIST' in DATABASE and 'GSRESOLVERDICT' in DATABASE:
- log.debug('Returning cached magnitude stats')
- return DATABASE['GSPROJECTLIST'], DATABASE['GSRESOLVERDICT']
+ if "BOINC" not in resp.text.upper():
+ print("Error fetching magnitude stats from {}".format(url))
+ log.error("Error fetching magnitude stats from {}".format(url))
+ if "GSPROJECTLIST" in DATABASE and "GSRESOLVERDICT" in DATABASE:
+ log.debug("Returning cached magnitude stats")
+ return DATABASE["GSPROJECTLIST"], DATABASE["GSRESOLVERDICT"]
else:
- log.debug('Exiting safely')
- safe_exit(None,None)
- return_list:List[str]= []
- project_resolver_dict:Dict[str,str]={}
- loaded_json={}
+ log.debug("Exiting safely")
+ safe_exit(None, None)
+ return_list: List[str] = []
+ project_resolver_dict: Dict[str, str] = {}
+ loaded_json = {}
try:
- loaded_json=json.loads(resp.text)
+ loaded_json = json.loads(resp.text)
except Exception as e:
- log.error('Error parsing data from Gridcoinstats {}'.format(e))
- if 'GSPROJECTLIST' in DATABASE and 'GSRESOLVERDICT' in DATABASE:
- log.error('Returning old gridcoinstats data'.format(e))
- return DATABASE['GSPROJECTLIST'], DATABASE['GSRESOLVERDICT']
+ log.error("Error parsing data from Gridcoinstats {}".format(e))
+ if "GSPROJECTLIST" in DATABASE and "GSRESOLVERDICT" in DATABASE:
+ log.error("Returning old gridcoinstats data".format(e))
+ return DATABASE["GSPROJECTLIST"], DATABASE["GSRESOLVERDICT"]
else:
- print('Unable to continue...')
- safe_exit(None,None)
+ print("Unable to continue...")
+ safe_exit(None, None)
for projectname, project in loaded_json.items():
- if 'WORLDCOMMUNITYGRID.ORG/BOINC' in project['base_url'].upper():
- return_list.append(project['base_url'].upper().replace('/BOINC/',''))
+ if "WORLDCOMMUNITYGRID.ORG/BOINC" in project["base_url"].upper():
+ return_list.append(project["base_url"].upper().replace("/BOINC/", ""))
else:
- return_list.append(project['base_url'].upper())
- project_resolver_dict[projectname]=project['base_url']
- DATABASE['LASTGRIDCOINSTATSPROJECTCHECK']=datetime.datetime.now()
- DATABASE['GSPROJECTLIST']=return_list
- DATABASE['GSRESOLVERDICT']=project_resolver_dict
- return return_list,project_resolver_dict
-def wait_till_no_xfers(rpc_client:libs.pyboinc.rpc_client)->None:
+ return_list.append(project["base_url"].upper())
+ project_resolver_dict[projectname] = project["base_url"]
+ DATABASE["LASTGRIDCOINSTATSPROJECTCHECK"] = datetime.datetime.now()
+ DATABASE["GSPROJECTLIST"] = return_list
+ DATABASE["GSRESOLVERDICT"] = project_resolver_dict
+ return return_list, project_resolver_dict
+
+
+def wait_till_no_xfers(rpc_client: libs.pyboinc.rpc_client) -> None:
"""
Wait for BOINC to finish all pending xfers, return None when done
"""
- max_loops=30
- current_loops=0
- loop_wait_in_seconds=30 # wait this long between loops
- def xfers_happening(xfer_list:list)->bool:
+ max_loops = 30
+ current_loops = 0
+ loop_wait_in_seconds = 30 # wait this long between loops
+
+ def xfers_happening(xfer_list: list) -> bool:
"""
Returns True if any active xfers are happening, false if none are happening or if only stalled xfers exist
"""
# Known statuses:
# 0 = Active
- if isinstance(xfer_list,str):
+ if isinstance(xfer_list, str):
return False
for xfer in xfer_list:
- if str(xfer['status'])=='0':
- if 'persistent_file_xfer' in xfer:
- if float(xfer['persistent_file_xfer'].get('num_retries',0))>1:
- continue # assume xfers with multiple retries are stalled
+ if str(xfer["status"]) == "0":
+ if "persistent_file_xfer" in xfer:
+ if float(xfer["persistent_file_xfer"].get("num_retries", 0)) > 1:
+ continue # assume xfers with multiple retries are stalled
return True
else:
- log.warning('Found xfer with unknown status: ' + str(xfer))
+ log.warning("Found xfer with unknown status: " + str(xfer))
return False
+
# Every ten seconds we will request the list of file transfers from BOINC until there are none left
- while current_loopsDict[str, str]:
+def get_config_parameters(gridcoin_dir: str) -> Dict[str, str]:
"""
:param gridcoin_dir: Absolute path to a gridcoin config directory
:return: All config parameters found, preferring those in the json file to the conf. Note that sidestakes become a list as there may be multiple
"""
- return_dict=dict()
- if 'gridcoinsettings.json' in os.listdir(gridcoin_dir):
- with open(os.path.join(gridcoin_dir,'gridcoinsettings.json')) as json_file:
- config_dict=json.load(json_file)
- if 'rpcuser' in config_dict:
- return_dict['rpc_user']=config_dict['rpcuser']
- if 'rpcpass' in config_dict:
- return_dict['rpc_pass']=config_dict['rpcpass']
- if 'rpcport' in config_dict:
- return_dict['rpc_port']=config_dict['rpcport']
- if 'gridcoinresearch.conf' in os.listdir(gridcoin_dir):
- with open(os.path.join(gridcoin_dir,'gridcoinresearch.conf')) as f:
+ return_dict = dict()
+ if "gridcoinsettings.json" in os.listdir(gridcoin_dir):
+ with open(os.path.join(gridcoin_dir, "gridcoinsettings.json")) as json_file:
+ config_dict = json.load(json_file)
+ if "rpcuser" in config_dict:
+ return_dict["rpc_user"] = config_dict["rpcuser"]
+ if "rpcpass" in config_dict:
+ return_dict["rpc_pass"] = config_dict["rpcpass"]
+ if "rpcport" in config_dict:
+ return_dict["rpc_port"] = config_dict["rpcport"]
+ if "gridcoinresearch.conf" in os.listdir(gridcoin_dir):
+ with open(os.path.join(gridcoin_dir, "gridcoinresearch.conf")) as f:
for line in f:
- if line.startswith('#'):
+ if line.startswith("#"):
continue
- if line.strip()=="":
+ if line.strip() == "":
continue
try:
- key=line.split('=')[0]
- value=line.split('=')[1].replace('\n','')
- if '#' in value:
- value=value.split('#')[0]
- value=value.strip()
+ key = line.split("=")[0]
+ value = line.split("=")[1].replace("\n", "")
+ if "#" in value:
+ value = value.split("#")[0]
+ value = value.strip()
except Exception as e:
- log.error('Warning: Error parsing line from config file, ignoring: {} error was {}' .format(line, e))
+ log.error(
+ "Warning: Error parsing line from config file, ignoring: {} error was {}".format(
+ line, e
+ )
+ )
continue
- if key=='addnode':
+ if key == "addnode":
continue
- if key=='sidestake':
- if 'sidestake' not in return_dict:
- return_dict['sidestake']=[]
- return_dict['sidestake'].append(value)
+ if key == "sidestake":
+ if "sidestake" not in return_dict:
+ return_dict["sidestake"] = []
+ return_dict["sidestake"].append(value)
continue
if key in return_dict:
- print('Warning: multiple values found for '+key+' in gridcoin config file at '+os.path.join(gridcoin_dir,'gridcoinresearch.conf')+' using the first one we found')
- log.warning('Warning: multiple values found for ' + key + ' in gridcoin config file at ' + os.path.join(
- gridcoin_dir, 'gridcoinresearch.conf') + ' using the first one we found')
+ print(
+ "Warning: multiple values found for "
+ + key
+ + " in gridcoin config file at "
+ + os.path.join(gridcoin_dir, "gridcoinresearch.conf")
+ + " using the first one we found"
+ )
+ log.warning(
+ "Warning: multiple values found for "
+ + key
+ + " in gridcoin config file at "
+ + os.path.join(gridcoin_dir, "gridcoinresearch.conf")
+ + " using the first one we found"
+ )
continue
- return_dict[key]=value
+ return_dict[key] = value
return return_dict
-def check_sidestake(config_params:Dict[str,Union[str,List[str]]],address:str,minval:float)->bool:
+
+def check_sidestake(
+ config_params: Dict[str, Union[str, List[str]]], address: str, minval: float
+) -> bool:
"""
Checks if a given address is being sidestaked to or not. Returns False if value < minval
:param config_params: config_params from get_config_parameters
@@ -585,28 +720,34 @@ def check_sidestake(config_params:Dict[str,Union[str,List[str]]],address:str,min
:param minval: minimum value to pass check
:return: True or False
"""
- if 'enablesidestaking' not in config_params:
+ if "enablesidestaking" not in config_params:
return False
- if 'sidestake' not in config_params:
+ if "sidestake" not in config_params:
return False
- if config_params['enablesidestaking']!='1':
+ if config_params["enablesidestaking"] != "1":
return False
- for sidestake in config_params['sidestake']:
- found_address=sidestake.split(',')[0]
- found_value=float(sidestake.split(',')[1])
- if found_address==address:
- if found_value>=minval:
+ for sidestake in config_params["sidestake"]:
+ found_address = sidestake.split(",")[0]
+ found_value = float(sidestake.split(",")[1])
+ if found_address == address:
+ if found_value >= minval:
return True
return False
-def projecturlfromstatsfile(statsfilename: str,all_project_urls:List[str],approved_project_urls:List[str],boinc_projects_list:List[str]) -> str:
+
+def projecturlfromstatsfile(
+ statsfilename: str,
+ all_project_urls: List[str],
+ approved_project_urls: List[str],
+ boinc_projects_list: List[str],
+) -> str:
"""
Guess a project url from the name of a stats file
"""
# remove extraneous information from name
- statsfilename = statsfilename.replace('job_log_', '')
- statsfilename = statsfilename.split('_')[0]
- statsfilename = statsfilename.replace('.txt', '')
+ statsfilename = statsfilename.replace("job_log_", "")
+ statsfilename = statsfilename.split("_")[0]
+ statsfilename = statsfilename.replace(".txt", "")
# check if name is in any known URLs
for knownurl in approved_project_urls:
@@ -617,19 +758,34 @@ def projecturlfromstatsfile(statsfilename: str,all_project_urls:List[str],approv
return knownurl
for knownurl in boinc_projects_list:
if statsfilename.upper() in knownurl.upper():
- return knownurl.upper() # we have to upper these as they are not uppered by default
- print('WARNING: Found stats file ' + statsfilename+' but unable to find URL for it, perhaps it is not the BOINC client\'s list of projects?')
+ return (
+ knownurl.upper()
+ ) # we have to upper these as they are not uppered by default
+ print(
+ "WARNING: Found stats file "
+ + statsfilename
+ + " but unable to find URL for it, perhaps it is not the BOINC client's list of projects?"
+ )
log.warning(
- 'WARNING: Found stats file ' + statsfilename + ' but unable to find URL for it, perhaps it is not the BOINC client\'s list of projects?')
+ "WARNING: Found stats file "
+ + statsfilename
+ + " but unable to find URL for it, perhaps it is not the BOINC client's list of projects?"
+ )
return statsfilename
-def project_url_from_credit_history_file(filename: str, approved_project_urls: List[str],
- all_project_urls: List[str],boinc_projects_list:List[str]) -> str:
+
+
+def project_url_from_credit_history_file(
+ filename: str,
+ approved_project_urls: List[str],
+ all_project_urls: List[str],
+ boinc_projects_list: List[str],
+) -> str:
"""
Guess a project url from credit history file name
"""
- filename = filename.replace('statistics_', '')
- filename = filename.replace('.xml', '')
- filename = filename.split('_')[0]
+ filename = filename.replace("statistics_", "")
+ filename = filename.replace(".xml", "")
+ filename = filename.split("_")[0]
for knownurl in approved_project_urls:
if filename.upper() in knownurl:
return knownurl
@@ -638,99 +794,162 @@ def project_url_from_credit_history_file(filename: str, approved_project_urls: L
return knownurl
for knownurl in boinc_projects_list:
if filename.upper() in knownurl.upper():
- return knownurl.upper() # have to upper as this list is not uppered
- print('WARNING: Found credit history file ' + filename+' but unable to find URL for it, perhaps it is not in the BOINC client\'s list of projects?')
+ return knownurl.upper() # have to upper as this list is not uppered
+ print(
+ "WARNING: Found credit history file "
+ + filename
+ + " but unable to find URL for it, perhaps it is not in the BOINC client's list of projects?"
+ )
log.error(
- 'WARNING: Found credit history file ' + filename + ' but unable to find URL for it, perhaps it is not in the BOINC client\'s list of projects?')
+ "WARNING: Found credit history file "
+ + filename
+ + " but unable to find URL for it, perhaps it is not in the BOINC client's list of projects?"
+ )
return filename
def stat_file_to_list(stat_file_abs_path: str) -> List[Dict[str, str]]:
"""
- Turns a BOINC job log into list of dicts we can use, each dict is a task. Dicts have keys below:
- STARTTIME,ESTTIME,CPUTIME,ESTIMATEDFLOPS,TASKNAME,WALLTIME,EXITCODE
- Note that ESTIMATEDFLOPS comes from the project and EXITCODE will always be zero.
- All values and keys in dicts are strings.
+ Turns a BOINC job log into list of dicts we can use, each dict is a task. Dicts have keys below:
+ STARTTIME,ESTTIME,CPUTIME,ESTIMATEDFLOPS,TASKNAME,WALLTIME,EXITCODE
+ Note that ESTIMATEDFLOPS comes from the project and EXITCODE will always be zero.
+ All values and keys in dicts are strings.
- BOINC's job log format is:
+ BOINC's job log format is:
-[ue] Estimated runtime BOINC Client estimate (seconds)
-[ct] CPU time Measured CPU runtime at completion (seconds)
-[fe] Estimated FLOPs count From project (integer)
-[nm] Task name From project
-[et] Elapsed time Wallclock runtime at completion (seconds)
+ [ue] Estimated runtime BOINC Client estimate (seconds)
+ [ct] CPU time Measured CPU runtime at completion (seconds)
+ [fe] Estimated FLOPs count From project (integer)
+ [nm] Task name From project
+ [et] Elapsed time Wallclock runtime at completion (seconds)
"""
stats_list = []
try:
- with open(stat_file_abs_path, mode='r', errors='ignore') as f:
+ with open(stat_file_abs_path, mode="r", errors="ignore") as f:
for log_entry in f:
- #log.debug('Found logentry '+str(log_entry))
- match=None
+ # log.debug('Found logentry '+str(log_entry))
+ match = None
try:
- match = re.search(r'(\d*)( ue )([\d\.]*)( ct )([\d\.]*)( fe )(\d*)( nm )(\S*)( et )([\d\.]*)( es )(\d)',log_entry)
+ match = re.search(
+ r"(\d*)( ue )([\d\.]*)( ct )([\d\.]*)( fe )(\d*)( nm )(\S*)( et )([\d\.]*)( es )(\d)",
+ log_entry,
+ )
except Exception as e:
print(
- 'Error reading BOINC job log at ' + stat_file_abs_path + ' maybe it\'s corrupt? Line: error: '.format(log_entry,e))
+ "Error reading BOINC job log at "
+ + stat_file_abs_path
+ + " maybe it's corrupt? Line: error: ".format(log_entry, e)
+ )
log.error(
- 'Error reading BOINC job log at ' + stat_file_abs_path + ' maybe it\'s corrupt? Line: error: '.format(
- log_entry, e))
+ "Error reading BOINC job log at "
+ + stat_file_abs_path
+ + " maybe it's corrupt? Line: error: ".format(log_entry, e)
+ )
if not match:
- print('Encountered log entry in unknown format: ' + log_entry)
- log.error('Encountered log entry in unknown format: ' + log_entry)
+ print("Encountered log entry in unknown format: " + log_entry)
+ log.error("Encountered log entry in unknown format: " + log_entry)
continue
stats = dict()
- stats['STARTTIME'] = match.group(1)
- stats['ESTTIME'] = match.group(3)
- stats['CPUTIME'] = match.group(5)
- stats['ESTIMATEDFLOPS'] = match.group(7)
- stats['TASKNAME'] = match.group(9)
- stats['WALLTIME'] = match.group(11)
- stats['EXITCODE'] = match.group(13)
+ stats["STARTTIME"] = match.group(1)
+ stats["ESTTIME"] = match.group(3)
+ stats["CPUTIME"] = match.group(5)
+ stats["ESTIMATEDFLOPS"] = match.group(7)
+ stats["TASKNAME"] = match.group(9)
+ stats["WALLTIME"] = match.group(11)
+ stats["EXITCODE"] = match.group(13)
stats_list.append(stats)
return stats_list
except Exception as e:
- print('Error reading BOINC job log at '+stat_file_abs_path+' maybe it\'s corrupt? '+str(e))
- log.error('Error reading BOINC job log at ' + stat_file_abs_path + ' maybe it\'s corrupt? ' + str(e))
+ print(
+ "Error reading BOINC job log at "
+ + stat_file_abs_path
+ + " maybe it's corrupt? "
+ + str(e)
+ )
+ log.error(
+ "Error reading BOINC job log at "
+ + stat_file_abs_path
+ + " maybe it's corrupt? "
+ + str(e)
+ )
return []
-def resolve_boinc_url_new(url:str):
- '''
+
+
+def resolve_boinc_url_new(url: str):
+ """
Note: Using resolve_boinc_url_new instead to use get to pass to BOINC, this is for other purposes.
Given URL, find BOINC's version with appropriate capitalization. If unable to find, print warning and return input
Prior to a specific BOINC version, RPC calls require capitalization to match identically.
- '''
- cleaned_search_url = url.upper().replace('HTTPS://', '').replace('HTTP://', '').replace('WWW.', '')
- cleaned_search_url = cleaned_search_url.replace('WORLDCOMMUNITYGRID.ORG/BOINC', 'WORLDCOMMUNITYGRID.ORG')
- if cleaned_search_url.endswith('/'):
+ """
+ cleaned_search_url = (
+ url.upper().replace("HTTPS://", "").replace("HTTP://", "").replace("WWW.", "")
+ )
+ cleaned_search_url = cleaned_search_url.replace(
+ "WORLDCOMMUNITYGRID.ORG/BOINC", "WORLDCOMMUNITYGRID.ORG"
+ )
+ if cleaned_search_url.endswith("/"):
cleaned_search_url = cleaned_search_url[:-1]
- for found_url in chain(BOINC_PROJECT_LIST,ALL_BOINC_PROJECTS.keys()):
- cleaned_found_url = found_url.upper().replace('HTTPS://', '').replace('HTTP://', '').replace('WWW.', '')
- if cleaned_search_url == cleaned_found_url or cleaned_search_url in cleaned_found_url:
+ for found_url in chain(BOINC_PROJECT_LIST, ALL_BOINC_PROJECTS.keys()):
+ cleaned_found_url = (
+ found_url.upper()
+ .replace("HTTPS://", "")
+ .replace("HTTP://", "")
+ .replace("WWW.", "")
+ )
+ if (
+ cleaned_search_url == cleaned_found_url
+ or cleaned_search_url in cleaned_found_url
+ ):
return found_url
return url
-def resolve_boinc_url(url:str,boinc_url_list:List[str]):
- '''
+
+
+def resolve_boinc_url(url: str, boinc_url_list: List[str]):
+ """
Note: Using resolve_boinc_url_new instead to use get to pass to BOINC, this is for other purposes.
Given URL, find BOINC's version with appropriate capitalization. If unable to find, print warning and return input
Prior to a specific BOINC version, RPC calls require capitalization to match identically.
- '''
- cleaned_search_url=url.upper().replace('HTTPS://','').replace('HTTP://','').replace('WWW.','')
- cleaned_search_url=cleaned_search_url.replace('WORLDCOMMUNITYGRID.ORG/BOINC','WORLDCOMMUNITYGRID.ORG')
- if cleaned_search_url.endswith('/'):
- cleaned_search_url=cleaned_search_url[:-1]
- for found_url in chain(ALL_BOINC_PROJECTS.keys(),BOINC_PROJECT_LIST):
- cleaned_found_url=found_url.upper().replace('HTTPS://','').replace('HTTP://','').replace('WWW.','')
- if cleaned_search_url==cleaned_found_url or cleaned_search_url in cleaned_found_url:
+ """
+ cleaned_search_url = (
+ url.upper().replace("HTTPS://", "").replace("HTTP://", "").replace("WWW.", "")
+ )
+ cleaned_search_url = cleaned_search_url.replace(
+ "WORLDCOMMUNITYGRID.ORG/BOINC", "WORLDCOMMUNITYGRID.ORG"
+ )
+ if cleaned_search_url.endswith("/"):
+ cleaned_search_url = cleaned_search_url[:-1]
+ for found_url in chain(ALL_BOINC_PROJECTS.keys(), BOINC_PROJECT_LIST):
+ cleaned_found_url = (
+ found_url.upper()
+ .replace("HTTPS://", "")
+ .replace("HTTP://", "")
+ .replace("WWW.", "")
+ )
+ if (
+ cleaned_search_url == cleaned_found_url
+ or cleaned_search_url in cleaned_found_url
+ ):
return found_url
return url
-async def run_rpc_command(rpc_client:libs.pyboinc.rpc_client,command:str,arg1:Union[str,None]=None,arg1_val:Union[str,None]=None,arg2:Union[str,None]=None,arg2_val:Union[str,None]=None)->Union[str,Dict[Any,Any]]:
+
+async def run_rpc_command(
+ rpc_client: libs.pyboinc.rpc_client,
+ command: str,
+ arg1: Union[str, None] = None,
+ arg1_val: Union[str, None] = None,
+ arg2: Union[str, None] = None,
+ arg2_val: Union[str, None] = None,
+) -> Union[str, Dict[Any, Any]]:
"""
Runs command on BOINC client via RPC
Example: run_rpc_command(rpc_client,'project_nomorework','http://project.com/project')
"""
- full_command='{} {} {} {}'.format(command,arg1,arg1_val,arg2,arg2_val) # added for debugging purposes
- log.debug('Running BOINC rpc request '+full_command)
+ full_command = "{} {} {} {}".format(
+ command, arg1, arg1_val, arg2, arg2_val
+ ) # added for debugging purposes
+ log.debug("Running BOINC rpc request " + full_command)
req = ET.Element(command)
if arg1 is not None:
a = ET.SubElement(req, arg1)
@@ -743,9 +962,11 @@ async def run_rpc_command(rpc_client:libs.pyboinc.rpc_client,command:str,arg1:Un
response = await rpc_client._request(req)
parsed = parse_generic(response)
if not str(parsed):
- print('Warning: Error w RPC command {}: {}'.format(full_command,parsed))
- log.error('Warning: Error w RPC command {}: {}'.format(full_command, parsed))
+ print("Warning: Error w RPC command {}: {}".format(full_command, parsed))
+ log.error("Warning: Error w RPC command {}: {}".format(full_command, parsed))
return parsed
+
+
def credit_history_file_to_list(credithistoryfileabspath: str) -> List[Dict[str, str]]:
"""
Turns a BOINC credit history file into list of dicts we can use. Dicts have keys below:
@@ -753,158 +974,239 @@ def credit_history_file_to_list(credithistoryfileabspath: str) -> List[Dict[str,
Note that ESTIMATEDFLOPS comes from the project and EXITCODE will always be zero.
"""
statslist = []
- with open(credithistoryfileabspath, mode='r', encoding='ASCII', errors='ignore') as f:
+ with open(
+ credithistoryfileabspath, mode="r", encoding="ASCII", errors="ignore"
+ ) as f:
parsed = xmltodict.parse(f.read())
- for logentry in parsed.get('project_statistics', {}).get('daily_statistics', []):
+ for logentry in parsed.get("project_statistics", {}).get(
+ "daily_statistics", []
+ ):
stats = {}
if not isinstance(logentry, dict):
continue
- stats['TIME'] = logentry['day']
- stats['USERTOTALCREDIT'] = logentry['user_total_credit']
- stats['USERRAC'] = logentry['user_expavg_credit']
- stats['HOSTTOTALCREDIT'] = logentry['host_total_credit']
- stats['HOSTRAC'] = logentry['host_expavg_credit']
+ stats["TIME"] = logentry["day"]
+ stats["USERTOTALCREDIT"] = logentry["user_total_credit"]
+ stats["USERRAC"] = logentry["user_expavg_credit"]
+ stats["HOSTTOTALCREDIT"] = logentry["host_total_credit"]
+ stats["HOSTRAC"] = logentry["host_expavg_credit"]
statslist.append(stats)
return statslist
-def config_files_to_stats(config_dir_abs_path: str) -> Dict[str, Dict[str, Union[int, float, Dict[str, Union[float, str]]]]]:
+
+def config_files_to_stats(
+ config_dir_abs_path: str,
+) -> Dict[str, Dict[str, Union[int, float, Dict[str, Union[float, str]]]]]:
"""
:param config_dir_abs_path: Absolute path to BOINC data directory
:return: Dict of stats in format COMBINEDSTATSEXAMPLE in main.py
"""
- stats_files:List[str] = []
- credit_history_files:List[str] = []
+ stats_files: List[str] = []
+ credit_history_files: List[str] = []
return_stats = {}
# find files to search through, add them to lists
for file in os.listdir(config_dir_abs_path):
- if 'job_log' in file:
+ if "job_log" in file:
stats_files.append(os.path.join(config_dir_abs_path, file))
- if file.startswith('statistics_') and file.endswith('.xml'):
+ if file.startswith("statistics_") and file.endswith(".xml"):
credit_history_files.append(os.path.join(config_dir_abs_path, file))
- log.debug('Found stats_files: ' + str(stats_files))
- log.debug('Found historical credit info files at: ' + str(credit_history_files))
+ log.debug("Found stats_files: " + str(stats_files))
+ log.debug("Found historical credit info files at: " + str(credit_history_files))
# Process stats files
for statsfile in stats_files:
- project_url = projecturlfromstatsfile(os.path.basename(statsfile),ALL_PROJECT_URLS,approved_project_urls=APPROVED_PROJECT_URLS,boinc_projects_list=BOINC_PROJECT_LIST)
+ project_url = projecturlfromstatsfile(
+ os.path.basename(statsfile),
+ ALL_PROJECT_URLS,
+ approved_project_urls=APPROVED_PROJECT_URLS,
+ boinc_projects_list=BOINC_PROJECT_LIST,
+ )
stat_list = stat_file_to_list(statsfile)
- log.debug('In statsfile for '+project_url)
+ log.debug("In statsfile for " + project_url)
# Compute the first and last date in the stats file. Currently not used but does work
- startdate = str(datetime.datetime.fromtimestamp(float(stat_list[0]['STARTTIME'])).strftime('%m-%d-%Y'))
+ startdate = str(
+ datetime.datetime.fromtimestamp(float(stat_list[0]["STARTTIME"])).strftime(
+ "%m-%d-%Y"
+ )
+ )
lastdate = str(
- datetime.datetime.fromtimestamp(float(stat_list[len(stat_list) - 1]['STARTTIME'])).strftime('%m-%d-%Y'))
- log.debug('Start date is '+startdate)
+ datetime.datetime.fromtimestamp(
+ float(stat_list[len(stat_list) - 1]["STARTTIME"])
+ ).strftime("%m-%d-%Y")
+ )
+ log.debug("Start date is " + startdate)
if project_url not in return_stats:
- return_stats[project_url] = {'CREDIT_HISTORY': {}, 'WU_HISTORY': {}, 'COMPILED_STATS': {}}
- wu_history = return_stats[project_url]['WU_HISTORY']
+ return_stats[project_url] = {
+ "CREDIT_HISTORY": {},
+ "WU_HISTORY": {},
+ "COMPILED_STATS": {},
+ }
+ wu_history = return_stats[project_url]["WU_HISTORY"]
for wu in stat_list:
- date = str(datetime.datetime.fromtimestamp(float(wu['STARTTIME'])).strftime('%m-%d-%Y'))
+ date = str(
+ datetime.datetime.fromtimestamp(float(wu["STARTTIME"])).strftime(
+ "%m-%d-%Y"
+ )
+ )
if date not in wu_history:
- wu_history[date] = {'TOTALWUS': 0, 'total_wall_time': 0, 'total_cpu_time': 0}
- wu_history[date]['TOTALWUS'] += 1
- wu_history[date]['total_wall_time'] += float(wu['WALLTIME'])
- wu_history[date]['total_cpu_time'] += float(wu['CPUTIME'])
+ wu_history[date] = {
+ "TOTALWUS": 0,
+ "total_wall_time": 0,
+ "total_cpu_time": 0,
+ }
+ wu_history[date]["TOTALWUS"] += 1
+ wu_history[date]["total_wall_time"] += float(wu["WALLTIME"])
+ wu_history[date]["total_cpu_time"] += float(wu["CPUTIME"])
# process credit logs
for credit_history_file in credit_history_files:
- project_url = project_url_from_credit_history_file(os.path.basename(credit_history_file), APPROVED_PROJECT_URLS,
- ALL_PROJECT_URLS,boinc_projects_list=BOINC_PROJECT_LIST)
+ project_url = project_url_from_credit_history_file(
+ os.path.basename(credit_history_file),
+ APPROVED_PROJECT_URLS,
+ ALL_PROJECT_URLS,
+ boinc_projects_list=BOINC_PROJECT_LIST,
+ )
credithistorylist = credit_history_file_to_list(credit_history_file)
if len(credithistorylist) > 0:
# print('In credit_history_file for ' + project_url)
- startdate = str(datetime.datetime.fromtimestamp(float(credithistorylist[0]['TIME'])).strftime('%m-%d-%Y'))
+ startdate = str(
+ datetime.datetime.fromtimestamp(
+ float(credithistorylist[0]["TIME"])
+ ).strftime("%m-%d-%Y")
+ )
lastdate = str(
- datetime.datetime.fromtimestamp(float(credithistorylist[len(credithistorylist) - 1]['TIME'])).strftime(
- '%m-%d-%Y'))
+ datetime.datetime.fromtimestamp(
+ float(credithistorylist[len(credithistorylist) - 1]["TIME"])
+ ).strftime("%m-%d-%Y")
+ )
for index, entry in enumerate(credithistorylist):
- if index == len(credithistorylist) - 1: # Skip the last entry as it's already calculated at the previous entry
+ if (
+ index == len(credithistorylist) - 1
+ ): # Skip the last entry as it's already calculated at the previous entry
continue
next_entry = credithistorylist[index + 1]
- current_time = float(entry['TIME'])
- delta_credits = float(next_entry['HOSTTOTALCREDIT']) - float(entry['HOSTTOTALCREDIT'])
+ current_time = float(entry["TIME"])
+ delta_credits = float(next_entry["HOSTTOTALCREDIT"]) - float(
+ entry["HOSTTOTALCREDIT"]
+ )
# Add found info to combined average stats
- date = str(datetime.datetime.fromtimestamp(float(current_time)).strftime('%m-%d-%Y'))
+ date = str(
+ datetime.datetime.fromtimestamp(float(current_time)).strftime(
+ "%m-%d-%Y"
+ )
+ )
if project_url not in return_stats:
- return_stats[project_url] = {'CREDIT_HISTORY': {}, 'WU_HISTORY': {}, 'COMPILED_STATS': {}}
- if 'CREDIT_HISTORY' not in return_stats[project_url]:
- return_stats[project_url]['CREDIT_HISTORY'] = {}
- credit_history = return_stats[project_url]['CREDIT_HISTORY']
- if 'COMPILED STATS' not in return_stats[project_url]:
- return_stats[project_url]['COMPILED_STATS'] = {}
+ return_stats[project_url] = {
+ "CREDIT_HISTORY": {},
+ "WU_HISTORY": {},
+ "COMPILED_STATS": {},
+ }
+ if "CREDIT_HISTORY" not in return_stats[project_url]:
+ return_stats[project_url]["CREDIT_HISTORY"] = {}
+ credit_history = return_stats[project_url]["CREDIT_HISTORY"]
+ if "COMPILED STATS" not in return_stats[project_url]:
+ return_stats[project_url]["COMPILED_STATS"] = {}
if date not in credit_history:
credit_history[date] = {}
- if 'CREDITAWARDED' not in credit_history[date]:
+ if "CREDITAWARDED" not in credit_history[date]:
credit_history[date]["CREDITAWARDED"] = 0
- credit_history[date]['CREDITAWARDED'] += delta_credits
+ credit_history[date]["CREDITAWARDED"] += delta_credits
# find averages
for project_url, parent_dict in return_stats.items():
total_wus = 0
total_credit = 0
total_cpu_time = 0
total_wall_time = 0
- x_day_wall_time=0
- for date, credit_history in parent_dict['CREDIT_HISTORY'].items():
- total_credit += credit_history['CREDITAWARDED']
- for date, wu_history in parent_dict['WU_HISTORY'].items():
- total_wus += wu_history['TOTALWUS']
- total_wall_time += wu_history['total_wall_time']
- split_date=date.split('-')
- datetimed_date=datetime.datetime(year=int(split_date[2]),month=int(split_date[0]),day=int(split_date[1]))
- time_ago=datetime.datetime.now()-datetimed_date
- days_ago=time_ago.days
- if days_ago<=rolling_weight_window:
- x_day_wall_time+=wu_history['total_wall_time']
- total_cpu_time += wu_history['total_cpu_time']
+ x_day_wall_time = 0
+ for date, credit_history in parent_dict["CREDIT_HISTORY"].items():
+ total_credit += credit_history["CREDITAWARDED"]
+ for date, wu_history in parent_dict["WU_HISTORY"].items():
+ total_wus += wu_history["TOTALWUS"]
+ total_wall_time += wu_history["total_wall_time"]
+ split_date = date.split("-")
+ datetimed_date = datetime.datetime(
+ year=int(split_date[2]),
+ month=int(split_date[0]),
+ day=int(split_date[1]),
+ )
+ time_ago = datetime.datetime.now() - datetimed_date
+ days_ago = time_ago.days
+ if days_ago <= rolling_weight_window:
+ x_day_wall_time += wu_history["total_wall_time"]
+ total_cpu_time += wu_history["total_cpu_time"]
if total_wus == 0:
avg_wall_time = 0
avg_cpu_time = 0
avg_credit_per_task = 0
credits_per_hour = 0
else:
- total_cpu_time=total_cpu_time/60/60 # convert to hours
- total_wall_time=total_wall_time/60/60 #convert to hours
+ total_cpu_time = total_cpu_time / 60 / 60 # convert to hours
+ total_wall_time = total_wall_time / 60 / 60 # convert to hours
x_day_wall_time = x_day_wall_time / 60 / 60 # convert to hours
avg_wall_time = total_wall_time / total_wus
avg_cpu_time = total_cpu_time / total_wus
avg_credit_per_task = total_credit / total_wus
- credits_per_hour = (total_credit / (total_wall_time))
- parent_dict['COMPILED_STATS']['TOTALCREDIT'] = total_credit
- parent_dict['COMPILED_STATS']['AVGWALLTIME'] = avg_wall_time
- parent_dict['COMPILED_STATS']['AVGCPUTIME'] = avg_cpu_time
- parent_dict['COMPILED_STATS']['AVGCREDITPERTASK'] = avg_credit_per_task
- parent_dict['COMPILED_STATS']['TOTALTASKS'] = total_wus
- parent_dict['COMPILED_STATS']['TOTALWALLTIME'] = total_wall_time
- parent_dict['COMPILED_STATS']['TOTALCPUTIME'] = total_cpu_time
- parent_dict['COMPILED_STATS']['AVGCREDITPERHOUR'] = credits_per_hour
- parent_dict['COMPILED_STATS']['XDAYWALLTIME'] = x_day_wall_time
- log.debug('For project {} this host has crunched {} WUs for {} total credit with an average of {} credits per WU. {} hours were spent on these WUs for {} credit/hr'.format(project_url.lower(), total_wus, round(total_credit,2), round(avg_credit_per_task,2), round((total_wall_time),2),round(credits_per_hour,2)))
+ credits_per_hour = total_credit / (total_wall_time)
+ parent_dict["COMPILED_STATS"]["TOTALCREDIT"] = total_credit
+ parent_dict["COMPILED_STATS"]["AVGWALLTIME"] = avg_wall_time
+ parent_dict["COMPILED_STATS"]["AVGCPUTIME"] = avg_cpu_time
+ parent_dict["COMPILED_STATS"]["AVGCREDITPERTASK"] = avg_credit_per_task
+ parent_dict["COMPILED_STATS"]["TOTALTASKS"] = total_wus
+ parent_dict["COMPILED_STATS"]["TOTALWALLTIME"] = total_wall_time
+ parent_dict["COMPILED_STATS"]["TOTALCPUTIME"] = total_cpu_time
+ parent_dict["COMPILED_STATS"]["AVGCREDITPERHOUR"] = credits_per_hour
+ parent_dict["COMPILED_STATS"]["XDAYWALLTIME"] = x_day_wall_time
+ log.debug(
+ "For project {} this host has crunched {} WUs for {} total credit with an average of {} credits per WU. {} hours were spent on these WUs for {} credit/hr".format(
+ project_url.lower(),
+ total_wus,
+ round(total_credit, 2),
+ round(avg_credit_per_task, 2),
+ round((total_wall_time), 2),
+ round(credits_per_hour, 2),
+ )
+ )
return return_stats
-def add_mag_to_combined_stats(combined_stats: dict, mag_ratios: Dict[str, float], approved_projects: List[str],preferred_projects:List[str] ) -> Tuple[dict,List[str]]:
+
+def add_mag_to_combined_stats(
+ combined_stats: dict,
+ mag_ratios: Dict[str, float],
+ approved_projects: List[str],
+ preferred_projects: List[str],
+) -> Tuple[dict, List[str]]:
"""
:param combined_stats: combined_stats from main.py
:param mag_ratios: mag ratios returned from get_project_mag_ratios. A dict with project URL as key and mag ratio as value
:return: combined_stats w/ mag ratios added to us, list of projects which are being crunched but not on approved projects list
"""
- unapproved_list=[]
+ unapproved_list = []
for project_url, project_stats in combined_stats.items():
- found_mag_ratio = get_project_from_dict(project_url, mag_ratios,'searching mag_ratios')
+ found_mag_ratio = get_project_from_dict(
+ project_url, mag_ratios, "searching mag_ratios"
+ )
if not found_mag_ratio:
if project_url not in approved_projects:
if project_url not in preferred_projects:
unapproved_list.append(project_url.lower())
- project_stats['COMPILED_STATS']['AVGMAGPERHOUR'] = 0
- project_stats['COMPILED_STATS']['MAGPERCREDIT'] = 0
+ project_stats["COMPILED_STATS"]["AVGMAGPERHOUR"] = 0
+ project_stats["COMPILED_STATS"]["MAGPERCREDIT"] = 0
continue
avg_credit_per_hour = 0
- if 'AVGCREDITPERHOUR' in project_stats['COMPILED_STATS']:
- avg_credit_per_hour = project_stats['COMPILED_STATS']['AVGCREDITPERHOUR']
- project_stats['COMPILED_STATS']['AVGMAGPERHOUR'] = avg_credit_per_hour * found_mag_ratio
- project_stats['COMPILED_STATS']['MAGPERCREDIT']=found_mag_ratio
- return combined_stats,unapproved_list
-
-def get_most_mag_efficient_projects(combinedstats: dict, ignored_projects: List[str], percentdiff: int = 10,quiet:bool=False) -> List[
- str]:
+ if "AVGCREDITPERHOUR" in project_stats["COMPILED_STATS"]:
+ avg_credit_per_hour = project_stats["COMPILED_STATS"]["AVGCREDITPERHOUR"]
+ project_stats["COMPILED_STATS"]["AVGMAGPERHOUR"] = (
+ avg_credit_per_hour * found_mag_ratio
+ )
+ project_stats["COMPILED_STATS"]["MAGPERCREDIT"] = found_mag_ratio
+ return combined_stats, unapproved_list
+
+
+def get_most_mag_efficient_projects(
+ combinedstats: dict,
+ ignored_projects: List[str],
+ percentdiff: int = 10,
+ quiet: bool = False,
+) -> List[str]:
"""
Given combinedstats, return most mag efficient project(s). This is the #1 most efficient project and any other projects which are within percentdiff of that number.
:param combinedstats: combinedstats dict
@@ -916,77 +1218,108 @@ def is_eligible(project_url: str, project_stats: dict):
# Ignore projects and projects w less than 10 completed tasks are ineligible
if project_url in ignored_projects:
return False
- if int(project_stats['COMPILED_STATS']['TOTALTASKS']) >= 10:
+ if int(project_stats["COMPILED_STATS"]["TOTALTASKS"]) >= 10:
return True
return False
return_list = []
- highest_project=None
+ highest_project = None
try:
- highest_project = next(iter(combinedstats)) # first project is the "highest project" until we test others against it
+ highest_project = next(
+ iter(combinedstats)
+ ) # first project is the "highest project" until we test others against it
except Exception as e:
if not quiet:
- print('Searching for most mag efficient projects.. No projects found? Assuming this is a brand new BOINC install'+str(e))
- log.error('Searching for most mag efficient projects.. No projects found? Assuming this is a brand new BOINC install'+str(e))
+ print(
+ "Searching for most mag efficient projects.. No projects found? Assuming this is a brand new BOINC install"
+ + str(e)
+ )
+ log.error(
+ "Searching for most mag efficient projects.. No projects found? Assuming this is a brand new BOINC install"
+ + str(e)
+ )
return []
# find the highest project
for project_url, project_stats in combinedstats.items():
- current_mag_per_hour=project_stats['COMPILED_STATS']['AVGMAGPERHOUR']
- highest_mag_per_hour=combinedstats[highest_project]['COMPILED_STATS']['AVGMAGPERHOUR']
- if current_mag_per_hour > highest_mag_per_hour and is_eligible(project_url, project_stats):
+ current_mag_per_hour = project_stats["COMPILED_STATS"]["AVGMAGPERHOUR"]
+ highest_mag_per_hour = combinedstats[highest_project]["COMPILED_STATS"][
+ "AVGMAGPERHOUR"
+ ]
+ if current_mag_per_hour > highest_mag_per_hour and is_eligible(
+ project_url, project_stats
+ ):
highest_project = project_url
- if combinedstats[highest_project]['COMPILED_STATS']['TOTALTASKS']>=10:
+ if combinedstats[highest_project]["COMPILED_STATS"]["TOTALTASKS"] >= 10:
if not quiet:
- print('\n\nHighest mag/hr project --with at least 10 completed WUs-- is {} w/ {}/hr of credit.'.format(highest_project.lower(),
- combinedstats[highest_project]['COMPILED_STATS'][
- 'AVGMAGPERHOUR']))
- log.info('\n\nHighest mag/hr project //with at least 10 completed WUs// is {} w/ {}/hr of credit.'.format(
- highest_project.lower(),
- combinedstats[highest_project]['COMPILED_STATS'][
- 'AVGMAGPERHOUR']))
+ print(
+ "\n\nHighest mag/hr project --with at least 10 completed WUs-- is {} w/ {}/hr of credit.".format(
+ highest_project.lower(),
+ combinedstats[highest_project]["COMPILED_STATS"]["AVGMAGPERHOUR"],
+ )
+ )
+ log.info(
+ "\n\nHighest mag/hr project //with at least 10 completed WUs// is {} w/ {}/hr of credit.".format(
+ highest_project.lower(),
+ combinedstats[highest_project]["COMPILED_STATS"]["AVGMAGPERHOUR"],
+ )
+ )
return_list.append(highest_project)
# then compare other projects to it to see if any are within 10% of it
- highest_avg_mag = combinedstats[highest_project]['COMPILED_STATS']['AVGMAGPERHOUR']
- minimum_for_inclusion=highest_avg_mag - (highest_avg_mag * .10)
+ highest_avg_mag = combinedstats[highest_project]["COMPILED_STATS"]["AVGMAGPERHOUR"]
+ minimum_for_inclusion = highest_avg_mag - (highest_avg_mag * 0.10)
for project_url, project_stats in combinedstats.items():
- current_avg_mag=project_stats['COMPILED_STATS']['AVGMAGPERHOUR']
+ current_avg_mag = project_stats["COMPILED_STATS"]["AVGMAGPERHOUR"]
if project_url == highest_project:
continue
- if minimum_for_inclusion <= current_avg_mag and is_eligible(project_url, project_stats) and current_avg_mag!=0:
+ if (
+ minimum_for_inclusion <= current_avg_mag
+ and is_eligible(project_url, project_stats)
+ and current_avg_mag != 0
+ ):
if not quiet:
- print('Also including this project because it\'s within 10% variance of highest mag/hr project: {}, mag/hr {}'.format(project_url.lower(), current_avg_mag))
+ print(
+ "Also including this project because it's within 10% variance of highest mag/hr project: {}, mag/hr {}".format(
+ project_url.lower(), current_avg_mag
+ )
+ )
log.info(
- 'Also including this project because it\'s within 10% variance of highest mag/hr project: {}, mag/hr {}'.format(
- project_url.lower(), current_avg_mag))
+ "Also including this project because it's within 10% variance of highest mag/hr project: {}, mag/hr {}".format(
+ project_url.lower(), current_avg_mag
+ )
+ )
return_list.append(project_url)
- #If there is no highest project, return empty list
- if len(return_list)==1:
- if combinedstats[highest_project]['COMPILED_STATS']['TOTALTASKS']<10:
+ # If there is no highest project, return empty list
+ if len(return_list) == 1:
+ if combinedstats[highest_project]["COMPILED_STATS"]["TOTALTASKS"] < 10:
return_list.clear()
return return_list
-def sidestake_check(check_sidestake_results:bool,check_type:str,address:str)->None:
- if check_type=='FOUNDATION':
- message1='It appears that you have not enabled sidestaking to the Gridcoin foundation in your wallet. We believe it is only fair that people benefiting from the Gridcoin network contribute back to it\nSidestaking enables you to contribute a small % of your staking profits (you can choose the %)\nWould you like to enable sidestaking?. \nPlease answer "Y" or "N" (without quotes)'
- message2='What percent would you like to donate to the Gridcoin foundation? Donations go towards software development, promotion, and growth of the coin. Enter a number like 5 for 5%. Please enter whole numbers only'
- elif check_type=='DEVELOPER':
- message1='Are you interested in sidestaking to the developers of this tool? This is optional. We ask you to consider what gain in efficiency this tool can bring you and to donate a small portion of that gain (you can choose the %).\nPlease. I am trying to buy a pony.\nSetting a sidestake amount also skips the "crunching for dev" portion of this tool which will save you some disk space and CPU time. Please answer "Y" or "N" (without quotes)'
- message2='What percent would you like to donate to the developers of this tool? Enter a number like 5 for 5%. Please enter whole numbers only'
+
+
+def sidestake_check(
+ check_sidestake_results: bool, check_type: str, address: str
+) -> None:
+ if check_type == "FOUNDATION":
+ message1 = 'It appears that you have not enabled sidestaking to the Gridcoin foundation in your wallet. We believe it is only fair that people benefiting from the Gridcoin network contribute back to it\nSidestaking enables you to contribute a small % of your staking profits (you can choose the %)\nWould you like to enable sidestaking?. \nPlease answer "Y" or "N" (without quotes)'
+ message2 = "What percent would you like to donate to the Gridcoin foundation? Donations go towards software development, promotion, and growth of the coin. Enter a number like 5 for 5%. Please enter whole numbers only"
+ elif check_type == "DEVELOPER":
+ message1 = 'Are you interested in sidestaking to the developers of this tool? This is optional. We ask you to consider what gain in efficiency this tool can bring you and to donate a small portion of that gain (you can choose the %).\nPlease. I am trying to buy a pony.\nSetting a sidestake amount also skips the "crunching for dev" portion of this tool which will save you some disk space and CPU time. Please answer "Y" or "N" (without quotes)'
+ message2 = "What percent would you like to donate to the developers of this tool? Enter a number like 5 for 5%. Please enter whole numbers only"
else:
- message1=''
- message2=''
+ message1 = ""
+ message2 = ""
if not check_sidestake_results:
answer = input(message1)
- while answer not in ['Y', 'N']:
- print('Error: Y or N not entered. Try again please :)')
+ while answer not in ["Y", "N"]:
+ print("Error: Y or N not entered. Try again please :)")
answer = input("")
- if answer == 'N':
- if check_type=='FOUNDATION':
- print('Ok no problem, it is your choice after all!')
- if check_type=='DEVELOPER':
- print('Ok no problem, it is your choice after all!')
+ if answer == "N":
+ if check_type == "FOUNDATION":
+ print("Ok no problem, it is your choice after all!")
+ if check_type == "DEVELOPER":
+ print("Ok no problem, it is your choice after all!")
return
print(message2)
answer = input("")
@@ -995,14 +1328,21 @@ def sidestake_check(check_sidestake_results:bool,check_type:str,address:str)->No
try:
converted_value = int(answer)
except Exception as e:
- print("Hmm... that didn't seem to work, let's try again. Please enter a whole number")
+ print(
+ "Hmm... that didn't seem to work, let's try again. Please enter a whole number"
+ )
answer = input("")
- with open(os.path.join(gridcoin_data_dir, 'gridcoinresearch.conf'), "a") as myfile:
- if 'enablesidestaking=1' not in str(myfile):
+ with open(
+ os.path.join(gridcoin_data_dir, "gridcoinresearch.conf"), "a"
+ ) as myfile:
+ if "enablesidestaking=1" not in str(myfile):
myfile.write("enablesidestaking=1\n")
- myfile.write('sidestake='+address+',' + str(converted_value) + '\n')
-def get_project_mag_ratios(grc_client: GridcoinClientConnection, lookback_period: int = 30) -> Dict[
- str, float]:
+ myfile.write("sidestake=" + address + "," + str(converted_value) + "\n")
+
+
+def get_project_mag_ratios(
+ grc_client: GridcoinClientConnection, lookback_period: int = 30
+) -> Dict[str, float]:
"""
:param grc_client:
:param lookback_period: number of superblocks to look back to determine average
@@ -1011,205 +1351,284 @@ def get_project_mag_ratios(grc_client: GridcoinClientConnection, lookback_period
projects = {}
return_dict = {}
mag_per_project = 0
- command_result= grc_client.run_command('superblocks', [30, True])
+ command_result = grc_client.run_command("superblocks", [30, True])
for i in range(0, lookback_period):
- superblock=command_result['result'][i]
+ superblock = command_result["result"][i]
if i == 0:
- total_magnitude = superblock['total_magnitude']
- total_projects = superblock['total_projects']
+ total_magnitude = superblock["total_magnitude"]
+ total_projects = superblock["total_projects"]
mag_per_project = total_magnitude / total_projects
- for project_name, project_stats in superblock['Contract Contents']['projects'].items():
+ for project_name, project_stats in superblock["Contract Contents"][
+ "projects"
+ ].items():
if project_name not in projects:
- if i==0:
+ if i == 0:
projects[project_name] = []
else:
- continue # skip projects which are on greylist
- projects[project_name].append(project_stats['rac'])
+ continue # skip projects which are on greylist
+ projects[project_name].append(project_stats["rac"])
for project_name, project_racs in projects.items():
average_rac = sum(project_racs) / len(project_racs)
project_url = grc_client.project_name_to_url(project_name)
return_dict[project_url] = mag_per_project / average_rac
return return_dict
-def project_url_to_name(url:str,project_names:dict=None):
+
+
+def project_url_to_name(url: str, project_names: dict = None):
if not project_names:
- project_names=BOINC_PROJECT_NAMES
- search=url.lower().replace('https://','').replace('http://','').replace('www.','')
- found=search
- for project_url,name in project_names.items():
+ project_names = BOINC_PROJECT_NAMES
+ search = (
+ url.lower().replace("https://", "").replace("http://", "").replace("www.", "")
+ )
+ found = search
+ for project_url, name in project_names.items():
if search in project_url.lower():
- found=name.lower().replace('@home','').replace('athome','')
+ found = name.lower().replace("@home", "").replace("athome", "")
return found
-def print_table(table_dict:Dict[str,Dict[str,str]], sortby:str='GRC/HR', sleep_reason:str=DATABASE['TABLE_SLEEP_REASON'], status:str=DATABASE['TABLE_STATUS'],dev_status:bool=False):
- def left_align(yourstring:str,total_len:int,min_pad:int=0)->str:
+
+def print_table(
+ table_dict: Dict[str, Dict[str, str]],
+ sortby: str = "GRC/HR",
+ sleep_reason: str = DATABASE["TABLE_SLEEP_REASON"],
+ status: str = DATABASE["TABLE_STATUS"],
+ dev_status: bool = False,
+):
+ def left_align(yourstring: str, total_len: int, min_pad: int = 0) -> str:
"""
Return left-aligned string with a total len of X and min_padding (extra space on right side) of min_pad, cutting off string if needed
If min_pad==1, it looks like this ' yourstring '
"""
- if len(yourstring)>=total_len-min_pad:
- yourstring=yourstring[0:total_len-(min_pad)]
- space_left=total_len-(len(yourstring)+min_pad)
- right_pad = ' ' * space_left
- return yourstring+right_pad
- def center_align(yourstring:str,total_len:int,min_pad:int=0)->str:
+ if len(yourstring) >= total_len - min_pad:
+ yourstring = yourstring[0 : total_len - (min_pad)]
+ space_left = total_len - (len(yourstring) + min_pad)
+ right_pad = " " * space_left
+ return yourstring + right_pad
+
+ def center_align(yourstring: str, total_len: int, min_pad: int = 0) -> str:
"""
Return center-aligned string with a total len of X and min_padding (extra space on right & left side) of min_pad, cutting off string if needed
If min_pad==1, it looks like this ' yourstring '
"""
- if len(yourstring)>=total_len-min_pad:
- yourstring=yourstring[0:total_len-(min_pad)]
- space_left=total_len-(len(yourstring)+min_pad)
- left_pad=' '*floor(space_left/2)
- right_pad = ' ' * ceil(space_left/2)
- return left_pad+yourstring+right_pad
-
-
- if len(table_dict)==0:
+ if len(yourstring) >= total_len - min_pad:
+ yourstring = yourstring[0 : total_len - (min_pad)]
+ space_left = total_len - (len(yourstring) + min_pad)
+ left_pad = " " * floor(space_left / 2)
+ right_pad = " " * ceil(space_left / 2)
+ return left_pad + yourstring + right_pad
+
+ if len(table_dict) == 0:
return
- headings=[]
- heading_length:Dict[str,int]={} # length of each heading column
- values={}
- working_dict=copy.deepcopy(table_dict)
+ headings = []
+ heading_length: Dict[str, int] = {} # length of each heading column
+ values = {}
+ working_dict = copy.deepcopy(table_dict)
# convert urls to nice names, add USD/GRC/hr
for url in list(working_dict.keys()):
- name=project_url_to_name(url,ALL_BOINC_PROJECTS)
+ name = project_url_to_name(url, ALL_BOINC_PROJECTS)
if not name:
- name=url
- stats=table_dict[url]
- working_dict[name]=stats
- if name!=url:
+ name = url
+ stats = table_dict[url]
+ working_dict[name] = stats
+ if name != url:
del working_dict[url]
# add usd/grc/hr to each project
- if working_dict[name].get('MAG/HR'):
- grc_per_hour=float(working_dict[name].get('MAG/HR',0))/4
- grc_per_day = (float(working_dict[name].get('MAG/HR', 0)) / 4)*24
- working_dict[name]['GRC/HR']=str('{:.3f}').format(grc_per_hour)
- working_dict[name]['GRC/DAY'] = str('{:.3f}').format(grc_per_day)
- if float(working_dict[name].get('MAG/HR'))!=0:
- revenue_per_hour = (float(working_dict[name].get('MAG/HR')) / 4) * DATABASE.get('GRCPRICE',0)
+ if working_dict[name].get("MAG/HR"):
+ grc_per_hour = float(working_dict[name].get("MAG/HR", 0)) / 4
+ grc_per_day = (float(working_dict[name].get("MAG/HR", 0)) / 4) * 24
+ working_dict[name]["GRC/HR"] = str("{:.3f}").format(grc_per_hour)
+ working_dict[name]["GRC/DAY"] = str("{:.3f}").format(grc_per_day)
+ if float(working_dict[name].get("MAG/HR")) != 0:
+ revenue_per_hour = (
+ float(working_dict[name].get("MAG/HR")) / 4
+ ) * DATABASE.get("GRCPRICE", 0)
exchange_expenses = revenue_per_hour * exchange_fee
expenses_per_hour = exchange_expenses + HOST_COST_PER_HOUR
profit = revenue_per_hour - expenses_per_hour
- working_dict[name]['USD/HR R/P']='{:.4f}/{:.4f}'.format(revenue_per_hour,profit)
+ working_dict[name]["USD/HR R/P"] = "{:.4f}/{:.4f}".format(
+ revenue_per_hour, profit
+ )
else:
- working_dict[name]['USD/HR R/P'] = '0'
- del working_dict[name]['MAG/HR']
+ working_dict[name]["USD/HR R/P"] = "0"
+ del working_dict[name]["MAG/HR"]
# figure out table headings
- for url,stats in working_dict.items():
- for key,value in stats.items():
+ for url, stats in working_dict.items():
+ for key, value in stats.items():
if key not in headings:
headings.append(key)
if key not in heading_length:
- heading_length[key]=len(key)+2
- heading_length[key]=len(key)+2
+ heading_length[key] = len(key) + 2
+ heading_length[key] = len(key) + 2
if key not in values:
- values[key]=[]
+ values[key] = []
if value not in values[key]:
values[key].append(value)
- longest_url= len(max(working_dict.keys(), key=len))
- table_width=longest_url+len(str(values.keys()))
+ longest_url = len(max(working_dict.keys(), key=len))
+ table_width = longest_url + len(str(values.keys()))
# print header
## print first line
- print('*'*table_width)
- print('*' + center_align('FINDTHEMAG V2.0',table_width-2)+ '*')
- print('*' * table_width)
+ print("*" * table_width)
+ print("*" + center_align("FINDTHEMAG V2.0", table_width - 2) + "*")
+ print("*" * table_width)
## print rest of header
- padding_str = ' ' * (longest_url+1)
- print('*'+padding_str,end='|')
+ padding_str = " " * (longest_url + 1)
+ print("*" + padding_str, end="|")
for heading in headings:
- print(center_align(heading,heading_length[heading])+'|',end="")
- print('')
+ print(center_align(heading, heading_length[heading]) + "|", end="")
+ print("")
# print contents
- sortedprojects= sorted(working_dict.keys(),key=lambda a: float(working_dict[a].get(sortby,0)),reverse=True)
+ sortedprojects = sorted(
+ working_dict.keys(),
+ key=lambda a: float(working_dict[a].get(sortby, 0)),
+ reverse=True,
+ )
for url in sortedprojects:
- stats=working_dict[url]
- url_padding=longest_url-len(url)
- url_padding_str=' '*url_padding
- print('* '+url.lower()+url_padding_str,end='|')
+ stats = working_dict[url]
+ url_padding = longest_url - len(url)
+ url_padding_str = " " * url_padding
+ print("* " + url.lower() + url_padding_str, end="|")
for heading in headings:
- value=stats.get(heading,'')
+ value = stats.get(heading, "")
print(left_align(value, heading_length[heading]), end="|")
- print('')
+ print("")
# print bottom bar
- print('*' * table_width)
+ print("*" * table_width)
if not sleep_reason:
- sleep_reason='NONE'
- elif sleep_reason=='':
- sleep_reason='NONE'
- bottom_bar_1='*'+left_align('Sleep reason: {}'.format(sleep_reason),total_len=60,min_pad=1)+'*'
- bottom_bar_2=left_align('Info: {}'.format(status),total_len=60,min_pad=1)
- bottom_bar_3=left_align('GRC Price: {:.4f}'.format(DATABASE.get('GRCPRICE',0.00000)),total_len=17,min_pad=1)+'*'
- print(bottom_bar_1+bottom_bar_2+bottom_bar_3)
+ sleep_reason = "NONE"
+ elif sleep_reason == "":
+ sleep_reason = "NONE"
+ bottom_bar_1 = (
+ "*"
+ + left_align("Sleep reason: {}".format(sleep_reason), total_len=60, min_pad=1)
+ + "*"
+ )
+ bottom_bar_2 = left_align("Info: {}".format(status), total_len=60, min_pad=1)
+ bottom_bar_3 = (
+ left_align(
+ "GRC Price: {:.4f}".format(DATABASE.get("GRCPRICE", 0.00000)),
+ total_len=17,
+ min_pad=1,
+ )
+ + "*"
+ )
+ print(bottom_bar_1 + bottom_bar_2 + bottom_bar_3)
if dev_status or DEV_LOOP_RUNNING:
- print('Crunching for developer, main BOINC is paused. You can monitor by connecting BOINC manager to 127.0.0.1:31418 pwd: {}'.format(DEV_BOINC_PASSWORD))
+ print(
+ "Crunching for developer, main BOINC is paused. You can monitor by connecting BOINC manager to 127.0.0.1:31418 pwd: {}".format(
+ DEV_BOINC_PASSWORD
+ )
+ )
# print improved stats
- addl=''
- curr_avg_mag=get_avg_mag_hr(combined_stats)
- if curr_avg_mag>DATABASE['STARTMAGHR'] and DATABASE['STARTMAGHR']>0:
- increase=(curr_avg_mag-DATABASE['STARTMAGHR'])/DATABASE['STARTMAGHR']
- addl=" That's an increase of {:.2f}%!".format(increase)
- print('When you started using this tool, your average mag/hr was: {:.4f} now it is {:.4f}'.format(
- DATABASE['STARTMAGHR'], get_avg_mag_hr(combined_stats))+addl)
- print('Hours crunched for you vs dev: {:.1f}|{:.1f} '.format(DATABASE['FTMTOTAL']/60,DATABASE['DEVTIMETOTAL']/60))
+ addl = ""
+ curr_avg_mag = get_avg_mag_hr(combined_stats)
+ if curr_avg_mag > DATABASE["STARTMAGHR"] and DATABASE["STARTMAGHR"] > 0:
+ increase = (curr_avg_mag - DATABASE["STARTMAGHR"]) / DATABASE["STARTMAGHR"]
+ addl = " That's an increase of {:.2f}%!".format(increase)
+ print(
+ "When you started using this tool, your average mag/hr was: {:.4f} now it is {:.4f}".format(
+ DATABASE["STARTMAGHR"], get_avg_mag_hr(combined_stats)
+ )
+ + addl
+ )
+ print(
+ "Hours crunched for you vs dev: {:.1f}|{:.1f} ".format(
+ DATABASE["FTMTOTAL"] / 60, DATABASE["DEVTIMETOTAL"] / 60
+ )
+ )
# print final line
if not check_sidestake_results:
- print('Consider donating to this app\'s development directly or via sidestake: RzUgcntbFm8PeSJpauk6a44qbtu92dpw3K. Sidestaking means you can skip crunching for dev')
- print('Use Ctrl+C to exit FTM and return BOINC to previous config')
- print('*' * table_width)
-def in_list(str,list)->bool:
- search_str=str.upper()
- search_str=search_str.replace('HTTPS://','')
- search_str = search_str.replace('HTTP://', '')
- search_str = search_str.replace('WWW.', '')
- search_str = search_str.replace('WORLDCOMMUNITYGRID.ORG/BOINC/', 'WORLDCOMMUNITYGRID.ORG') # fix for WCG
+ print(
+ "Consider donating to this app's development directly or via sidestake: RzUgcntbFm8PeSJpauk6a44qbtu92dpw3K. Sidestaking means you can skip crunching for dev"
+ )
+ print("Use Ctrl+C to exit FTM and return BOINC to previous config")
+ print("*" * table_width)
+
+
+def in_list(str, list) -> bool:
+ search_str = str.upper()
+ search_str = search_str.replace("HTTPS://", "")
+ search_str = search_str.replace("HTTP://", "")
+ search_str = search_str.replace("WWW.", "")
+ search_str = search_str.replace(
+ "WORLDCOMMUNITYGRID.ORG/BOINC/", "WORLDCOMMUNITYGRID.ORG"
+ ) # fix for WCG
for item in list:
- if search_str==item.upper() or search_str in item.upper():
+ if search_str == item.upper() or search_str in item.upper():
return True
return False
-def generate_stats(APPROVED_PROJECT_URLS:List[str],preferred_projects:Dict[str,float]=preferred_projects,ignored_projects:List[str]=ignored_projects,quiet:bool=False,ignore_unattached:bool=False,attached_list:List[str]=None,mag_ratios=Dict[str,float]):
+
+
+def generate_stats(
+ APPROVED_PROJECT_URLS: List[str],
+ preferred_projects: Dict[str, float] = preferred_projects,
+ ignored_projects: List[str] = ignored_projects,
+ quiet: bool = False,
+ ignore_unattached: bool = False,
+ attached_list: List[str] = None,
+ mag_ratios=Dict[str, float],
+):
if not attached_list:
- attached_list=[]
- weak_stats=[]
+ attached_list = []
+ weak_stats = []
if not quiet:
- print('Gathering project stats...')
- log.info('Gathering project stats..')
+ print("Gathering project stats...")
+ log.info("Gathering project stats..")
combined_stats = config_files_to_stats(boinc_data_dir)
if not quiet:
- print_and_log('Calculating project weights...','INFO')
- print('Curing some cancer along the way...')
+ print_and_log("Calculating project weights...", "INFO")
+ print("Curing some cancer along the way...")
# Calculate project weights w/ credit/hr
final_project_weights = {}
dev_project_weights = {}
# Uppercase preferred_projects list
for url in list(preferred_projects.keys()):
- weight=preferred_projects[url]
+ weight = preferred_projects[url]
del preferred_projects[url]
preferred_projects[url.upper()] = weight
- ignored_projects = [x.upper() for x in ignored_projects] # uppercase ignored project url list
+ ignored_projects = [
+ x.upper() for x in ignored_projects
+ ] # uppercase ignored project url list
# ignore unattached projects if requested
if ignore_unattached:
for project in APPROVED_PROJECT_URLS:
- if not in_list(project,attached_list):
+ if not in_list(project, attached_list):
ignored_projects.append(project.upper())
- log.warning('Ignoring whitelisted project {} bc not attached'.format(project))
- combined_stats,unapproved_projects = add_mag_to_combined_stats(combined_stats, mag_ratios, APPROVED_PROJECT_URLS,list(preferred_projects.keys()))
- if len(unapproved_projects)>0:
- print('Warning: Projects below were found in your BOINC config but are not on the gridcoin approval list or your preferred projects list. If you want them to be given weight, be sure to add them to your preferred projects')
+ log.warning(
+ "Ignoring whitelisted project {} bc not attached".format(project)
+ )
+ combined_stats, unapproved_projects = add_mag_to_combined_stats(
+ combined_stats,
+ mag_ratios,
+ APPROVED_PROJECT_URLS,
+ list(preferred_projects.keys()),
+ )
+ if len(unapproved_projects) > 0:
+ print(
+ "Warning: Projects below were found in your BOINC config but are not on the gridcoin approval list or your preferred projects list. If you want them to be given weight, be sure to add them to your preferred projects"
+ )
log.warning(
- 'Warning: Projects below were found in your BOINC config but are not on the gridcoin approval list or your preferred projects list. If you want them to be given weight, be sure to add them to your preferred projects' + str(unapproved_projects))
+ "Warning: Projects below were found in your BOINC config but are not on the gridcoin approval list or your preferred projects list. If you want them to be given weight, be sure to add them to your preferred projects"
+ + str(unapproved_projects)
+ )
pprint.pprint(unapproved_projects)
- most_efficient_projects = get_most_mag_efficient_projects(combined_stats, ignored_projects,quiet=quiet)
- if len(most_efficient_projects)==0:
+ most_efficient_projects = get_most_mag_efficient_projects(
+ combined_stats, ignored_projects, quiet=quiet
+ )
+ if len(most_efficient_projects) == 0:
print(
- 'No projects have enough completed tasks to determine which is the most efficient. Assigning all projects 1')
+ "No projects have enough completed tasks to determine which is the most efficient. Assigning all projects 1"
+ )
log.warning(
- 'No projects have enough completed tasks to determine which is the most efficient. Assigning all projects 1')
- total_preferred_weight=1000-(len(APPROVED_PROJECT_URLS))+len(preferred_projects)
+ "No projects have enough completed tasks to determine which is the most efficient. Assigning all projects 1"
+ )
+ total_preferred_weight = (
+ 1000 - (len(APPROVED_PROJECT_URLS)) + len(preferred_projects)
+ )
total_mining_weight = 0
else:
total_preferred_weight = (preferred_projects_percent / 100) * 1000
@@ -1217,84 +1636,135 @@ def generate_stats(APPROVED_PROJECT_URLS:List[str],preferred_projects:Dict[str,f
total_mining_weight_remaining = total_mining_weight
# assign weight of 1 to all projects which didn't make the cut
for project_url in APPROVED_PROJECT_URLS:
- preferred_extract=get_project_from_dict(project_url,preferred_projects,'IGNOREME')
+ preferred_extract = get_project_from_dict(
+ project_url, preferred_projects, "IGNOREME"
+ )
if preferred_extract:
continue # exclude preferred projects
if project_url in ignored_projects:
final_project_weights[project_url] = 0
dev_project_weights[project_url] = 0
continue
- combined_stats_extract=get_project_from_dict(project_url,combined_stats,'searching combined_stats in generate_stats')
+ combined_stats_extract = get_project_from_dict(
+ project_url, combined_stats, "searching combined_stats in generate_stats"
+ )
if not combined_stats_extract:
- log.debug('Warning: project has no stats, setting project weight to one: ' + project_url.lower())
+ log.debug(
+ "Warning: project has no stats, setting project weight to one: "
+ + project_url.lower()
+ )
final_project_weights[project_url] = 1
total_mining_weight_remaining -= 1
dev_project_weights[project_url] = 0
weak_stats.append(project_url.lower())
continue
- total_tasks = int(combined_stats_extract['COMPILED_STATS']['TOTALTASKS'])
+ total_tasks = int(combined_stats_extract["COMPILED_STATS"]["TOTALTASKS"])
if total_tasks < 10:
- log.debug('Warning: project does not have enough tasks to compute accurate average, setting project weight to one: ' + project_url.lower())
+ log.debug(
+ "Warning: project does not have enough tasks to compute accurate average, setting project weight to one: "
+ + project_url.lower()
+ )
weak_stats.append(project_url.lower())
if project_url not in most_efficient_projects or total_tasks < 10:
final_project_weights[project_url] = 1
total_mining_weight_remaining -= 1
- if len(most_efficient_projects)>0:
+ if len(most_efficient_projects) > 0:
if quiet:
- log.debug('The following projects do not have enough stats to be calculated accurately, assigning them a weight of one: '+str(weak_stats))
+ log.debug(
+ "The following projects do not have enough stats to be calculated accurately, assigning them a weight of one: "
+ + str(weak_stats)
+ )
else:
- print_and_log('The following projects do not have enough stats to be calculated accurately, assigning them a weight of one: ','INFO')
+ print_and_log(
+ "The following projects do not have enough stats to be calculated accurately, assigning them a weight of one: ",
+ "INFO",
+ )
pprint.pprint(weak_stats)
# Figure out weight to assign to most efficient projects, assign it
- if len(most_efficient_projects)==0:
- per_efficient_project=0
+ if len(most_efficient_projects) == 0:
+ per_efficient_project = 0
per_efficient_project_dev = 0
else:
- per_efficient_project = total_mining_weight_remaining / len(most_efficient_projects)
- per_efficient_project_dev = 1000/len(most_efficient_projects)
- if total_mining_weight_remaining>0:
+ per_efficient_project = total_mining_weight_remaining / len(
+ most_efficient_projects
+ )
+ per_efficient_project_dev = 1000 / len(most_efficient_projects)
+ if total_mining_weight_remaining > 0:
if not quiet:
- print('Assigning ' + str(total_mining_weight_remaining) + ' weight to ' + str(
- len(most_efficient_projects)) + ' mining projects which means ' + str(per_efficient_project) + ' per project ')
- log.info('Assigning ' + str(total_mining_weight_remaining) + ' weight to ' + str(
- len(most_efficient_projects)) + ' mining projects which means ' + str(per_efficient_project) + ' per project ')
+ print(
+ "Assigning "
+ + str(total_mining_weight_remaining)
+ + " weight to "
+ + str(len(most_efficient_projects))
+ + " mining projects which means "
+ + str(per_efficient_project)
+ + " per project "
+ )
+ log.info(
+ "Assigning "
+ + str(total_mining_weight_remaining)
+ + " weight to "
+ + str(len(most_efficient_projects))
+ + " mining projects which means "
+ + str(per_efficient_project)
+ + " per project "
+ )
for project_url in most_efficient_projects:
if project_url not in final_project_weights:
- final_project_weights[project_url]=0
- dev_project_weights[project_url]=0
+ final_project_weights[project_url] = 0
+ dev_project_weights[project_url] = 0
final_project_weights[project_url] += per_efficient_project
- dev_project_weights[project_url]= per_efficient_project_dev
+ dev_project_weights[project_url] = per_efficient_project_dev
# Assign weight to preferred projects
- for project_url,weight in preferred_projects.items():
- final_project_weights_extract=get_project_from_dict(project_url,final_project_weights, 'IGNOREME')
- preferred_project_weights_extract=get_project_from_dict(project_url, preferred_projects,'searching preferred_projects in generate_stats')
+ for project_url, weight in preferred_projects.items():
+ final_project_weights_extract = get_project_from_dict(
+ project_url, final_project_weights, "IGNOREME"
+ )
+ preferred_project_weights_extract = get_project_from_dict(
+ project_url,
+ preferred_projects,
+ "searching preferred_projects in generate_stats",
+ )
if not final_project_weights_extract:
- final_project_weights[project_url]=0
- intended_weight=(preferred_project_weights_extract / 100) * total_preferred_weight
+ final_project_weights[project_url] = 0
+ intended_weight = (
+ preferred_project_weights_extract / 100
+ ) * total_preferred_weight
final_project_weights[project_url] += intended_weight
- return combined_stats,final_project_weights,total_preferred_weight,total_mining_weight,dev_project_weights
-async def kill_all_unstarted_tasks(rpc_client: libs.pyboinc.rpc_client,task_list:list):
+ return (
+ combined_stats,
+ final_project_weights,
+ total_preferred_weight,
+ total_mining_weight,
+ dev_project_weights,
+ )
+
+
+async def kill_all_unstarted_tasks(
+ rpc_client: libs.pyboinc.rpc_client, task_list: list
+):
project_status_reply = await rpc_client.get_project_status()
- found_projects = [] # DEBUG ADDED TYPE THIS CORRECTLY
+ found_projects = [] # DEBUG ADDED TYPE THIS CORRECTLY
for task in task_list:
- #elapsed_time=task['active_task']['current_cpu_time'].seconds
- name=task['name']
- wu_name=task['wu_name']
- project_url=task['project_url'].master_url
- if 'active_task' not in task:
- print('Cancelling unstarted task {}'.format(task))
- log.info('Cancelling unstarted task {}'.format(task))
- req = ET.Element('abort_result')
- a = ET.SubElement(req, 'project_url')
+ # elapsed_time=task['active_task']['current_cpu_time'].seconds
+ name = task["name"]
+ wu_name = task["wu_name"]
+ project_url = task["project_url"].master_url
+ if "active_task" not in task:
+ print("Cancelling unstarted task {}".format(task))
+ log.info("Cancelling unstarted task {}".format(task))
+ req = ET.Element("abort_result")
+ a = ET.SubElement(req, "project_url")
a.text = project_url
- b = ET.SubElement(req, 'name')
+ b = ET.SubElement(req, "name")
b.text = name
response = await rpc_client._request(req)
parsed = parse_generic(response) # returns True if successful
- a="21"
+ a = "21"
else:
- #print('Keeping task {}'.format(task))
- log.debug('Keeping task {}'.format(task))
+ # print('Keeping task {}'.format(task))
+ log.debug("Keeping task {}".format(task))
+
async def nnt_all_projects(rpc_client: libs.pyboinc.rpc_client):
project_status_reply = await rpc_client.get_project_status()
@@ -1302,357 +1772,493 @@ async def nnt_all_projects(rpc_client: libs.pyboinc.rpc_client):
for project in project_status_reply:
found_projects.append(project.master_url)
for project in found_projects:
- req = ET.Element('project_nomorework')
- a = ET.SubElement(req, 'project_url')
+ req = ET.Element("project_nomorework")
+ a = ET.SubElement(req, "project_url")
a.text = project
response = await rpc_client._request(req)
- parsed = parse_generic(response) # returns True if successful
-async def check_log_entries(rpc_client: libs.pyboinc.rpc_client,project_name:str)->bool:
+ parsed = parse_generic(response) # returns True if successful
+
+
+async def check_log_entries(
+ rpc_client: libs.pyboinc.rpc_client, project_name: str
+) -> bool:
"""
Return True if project cache full, False otherwise.
project_name: name of project as it will appear in BOINC logs, NOT URL
"""
+
def ignore_message(message):
- ignore_phrases=[
- 'work fetch resumed by user',
- 'update requested by user',
- 'sending scheduler request',
- 'scheduler request completed',
- 'project requested delay',
- 'work fetch suspended by user',
- 'Started download of',
- 'Finished download of',
- 'Starting task',
- 'Requesting new tasks'
- 'last request too recent'
- 'master file download succeeded',
- 'No tasks sent',
- 'Requesting new tasks for',
- 'no tasks are available for',
- 'computation for task',
- 'started upload of',
- 'finished upload of',
- 'This computer has reached a limit on tasks in progress',
- 'Upgrade to the latest driver to process tasks using your computer\'s GPU',
- 'project has no tasks available'
+ ignore_phrases = [
+ "work fetch resumed by user",
+ "update requested by user",
+ "sending scheduler request",
+ "scheduler request completed",
+ "project requested delay",
+ "work fetch suspended by user",
+ "Started download of",
+ "Finished download of",
+ "Starting task",
+ "Requesting new tasks"
+ "last request too recent"
+ "master file download succeeded",
+ "No tasks sent",
+ "Requesting new tasks for",
+ "no tasks are available for",
+ "computation for task",
+ "started upload of",
+ "finished upload of",
+ "This computer has reached a limit on tasks in progress",
+ "Upgrade to the latest driver to process tasks using your computer's GPU",
+ "project has no tasks available",
]
- uppered_message=str(message).upper()
+ uppered_message = str(message).upper()
for phrase in ignore_phrases:
if phrase.upper() in uppered_message:
return True
- if 'UP TO' in uppered_message and 'NEEDS' in uppered_message and 'IS AVAILABLE FOR USE' in uppered_message and 'BUT ONLY' in uppered_message:
+ if (
+ "UP TO" in uppered_message
+ and "NEEDS" in uppered_message
+ and "IS AVAILABLE FOR USE" in uppered_message
+ and "BUT ONLY" in uppered_message
+ ):
return True
- if 'REPORTING' and 'COMPLETED TASKS' in uppered_message:
+ if "REPORTING" and "COMPLETED TASKS" in uppered_message:
return True
return False
- def cache_full(project_name:str,messages)->bool:
+
+ def cache_full(project_name: str, messages) -> bool:
"""
Returns TRUE if CPU /AND/ GPU cache full, False is either is un-full.
Systems w/o GPU will be assumed to have a "full cache" for GPU
"""
- cpu_full=False
- gpu_full=False
+ cpu_full = False
+ gpu_full = False
for message in messages:
if project_name.upper() not in str(message).upper():
continue
- difference = datetime.datetime.now() - message['time']
- if difference.seconds>60*5: # if message is > 5 min old, skip
+ difference = datetime.datetime.now() - message["time"]
+ if difference.seconds > 60 * 5: # if message is > 5 min old, skip
continue
- if project_name.upper()==message['project'].upper():
- if "Not requesting tasks: don't need".upper() in message['body'].upper():
- if 'GPU' not in message['body'].upper():
- gpu_full=True # if no GPU, GPU cache is always full
- if "CPU: job cache full".upper() in message['body'].upper() or "Not requesting tasks: don't need (job cache full)".upper() in message['body'].upper():
- cpu_full=True
- #print('CPU cache appears full {}'.format(message['body']))
- log.debug('CPU cache appears full {}'.format(message['body']))
+ if project_name.upper() == message["project"].upper():
+ if (
+ "Not requesting tasks: don't need".upper()
+ in message["body"].upper()
+ ):
+ if "GPU" not in message["body"].upper():
+ gpu_full = True # if no GPU, GPU cache is always full
+ if (
+ "CPU: job cache full".upper() in message["body"].upper()
+ or "Not requesting tasks: don't need (job cache full)".upper()
+ in message["body"].upper()
+ ):
+ cpu_full = True
+ # print('CPU cache appears full {}'.format(message['body']))
+ log.debug("CPU cache appears full {}".format(message["body"]))
else:
- if "Not requesting tasks: don't need ()".upper() in message['body'].upper():
+ if (
+ "Not requesting tasks: don't need ()".upper()
+ in message["body"].upper()
+ ):
pass
else:
- #print('CPU cache appears not full {}'.format(message['body']))
- log.debug('CPU cache appears not full {}'.format(message['body']))
- if "GPU: job cache full".upper() in message['body'].upper():
- gpu_full=True
- #print('GPU cache appears full {}'.format(message['body']))
- log.debug('GPU cache appears full {}'.format(message['body']))
- elif 'GPUs not usable'.upper() in message['body'].upper():
+ # print('CPU cache appears not full {}'.format(message['body']))
+ log.debug(
+ "CPU cache appears not full {}".format(message["body"])
+ )
+ if "GPU: job cache full".upper() in message["body"].upper():
gpu_full = True
- log.debug('GPU cache appears full {}'.format(message['body']))
+ # print('GPU cache appears full {}'.format(message['body']))
+ log.debug("GPU cache appears full {}".format(message["body"]))
+ elif "GPUs not usable".upper() in message["body"].upper():
+ gpu_full = True
+ log.debug("GPU cache appears full {}".format(message["body"]))
else:
- if "Not requesting tasks: don't need ()".upper() in message['body'].upper():
+ if (
+ "Not requesting tasks: don't need ()".upper()
+ in message["body"].upper()
+ ):
pass
else:
- if not gpu_full: # if GPU is not mentioned in log, this would always happen so using this to stop erroneous messages
- #print('GPU cache appears not full {}'.format(message['body']))
- log.debug('GPU cache appears not full {}'.format(message['body']))
+ if (
+ not gpu_full
+ ): # if GPU is not mentioned in log, this would always happen so using this to stop erroneous messages
+ # print('GPU cache appears not full {}'.format(message['body']))
+ log.debug(
+ "GPU cache appears not full {}".format(
+ message["body"]
+ )
+ )
continue
elif ignore_message(message):
pass
else:
- log.warning('Found unknown message1: {}'.format(message['body']))
+ log.warning("Found unknown message1: {}".format(message["body"]))
if cpu_full and gpu_full:
return True
return False
# Get message count
- req = ET.Element('get_message_count')
+ req = ET.Element("get_message_count")
msg_count_response = await rpc_client._request(req)
message_count = int(parse_generic(msg_count_response))
- req = ET.Element('get_messages')
- a = ET.SubElement(req, 'seqno')
- a.text = str(message_count-50) # get ten most recent messages
+ req = ET.Element("get_messages")
+ a = ET.SubElement(req, "seqno")
+ a.text = str(message_count - 50) # get ten most recent messages
messages_response = await rpc_client._request(req)
messages = parse_generic(messages_response) # returns True if successful
- if cache_full(project_name,messages):
+ if cache_full(project_name, messages):
return True
return False
-async def check_log_entries_for_backoff(rpc_client: libs.pyboinc.rpc_client,project_name:str)->bool:
+
+
+async def check_log_entries_for_backoff(
+ rpc_client: libs.pyboinc.rpc_client, project_name: str
+) -> bool:
"""
Return True if project should be backed off, False otherwise
project_name: name of project as it will appear in BOINC logs, NOT URL
"""
- def ignore_message(message,ignore_phrases:List[str]):
- lowered=str(message['body']).lower()
- uppered=str(message['body']).upper()
+
+ def ignore_message(message, ignore_phrases: List[str]):
+ lowered = str(message["body"]).lower()
+ uppered = str(message["body"]).upper()
for phrase in ignore_phrases:
if phrase.upper() in uppered:
return True
- if 'got' and 'new tasks' in lowered:
+ if "got" and "new tasks" in lowered:
return True
- if 'reporting' and 'completed tasks' in lowered:
+ if "reporting" and "completed tasks" in lowered:
return True
- if 'computation for task' and 'finished' in lowered:
+ if "computation for task" and "finished" in lowered:
return True
return False
- def project_backoff(project_name:str,messages)->bool:
+
+ def project_backoff(project_name: str, messages) -> bool:
"""
Returns TRUE if project should be backed off. False otherwise or if unable to determine
"""
- #Phrases which indicate project SHOULD be backed off
+ # Phrases which indicate project SHOULD be backed off
# removed 'project requested delay' from positive phrases because projects always provide this, even if work was provided!
- positive_phrases=['project has no tasks available','scheduler request failed','no tasks sent','last request too recent','An NVIDIA GPU is required to run tasks for this project']
+ positive_phrases = [
+ "project has no tasks available",
+ "scheduler request failed",
+ "no tasks sent",
+ "last request too recent",
+ "An NVIDIA GPU is required to run tasks for this project",
+ ]
# Phrases which indicate project SHOULD NOT be backed off
- negative_phrases=["Not requesting tasks: don't need",'started download','Finished download of']
+ negative_phrases = [
+ "Not requesting tasks: don't need",
+ "started download",
+ "Finished download of",
+ ]
# Phrases which indicate we can skip this log entry
ignore_phrases = [
- 'work fetch resumed by user',
- 'update requested by user',
- 'work fetch suspended by user',
- 'Starting task',
- 'Requesting new tasks',
- 'sending scheduler request',
- 'scheduler request completed',
- 'started upload',
- 'finished upload',
- 'master file download succeeded',
- 'fetching scheduler list',
- 'Upgrade to the latest driver to process tasks using your computer\'s GPU',
- 'not started and deadline has passed',
- 'Project requested delay of'
+ "work fetch resumed by user",
+ "update requested by user",
+ "work fetch suspended by user",
+ "Starting task",
+ "Requesting new tasks",
+ "sending scheduler request",
+ "scheduler request completed",
+ "started upload",
+ "finished upload",
+ "master file download succeeded",
+ "fetching scheduler list",
+ "Upgrade to the latest driver to process tasks using your computer's GPU",
+ "not started and deadline has passed",
+ "Project requested delay of",
]
for message in messages:
- uppered_body=message['body'].upper()
+ uppered_body = message["body"].upper()
if project_name.upper() not in str(message).upper():
continue
- difference = datetime.datetime.now() - message['time']
- if difference.seconds>60*5: # if message is > 5 min old, skip
+ difference = datetime.datetime.now() - message["time"]
+ if difference.seconds > 60 * 5: # if message is > 5 min old, skip
continue
- if ignore_message(message,ignore_phrases):
+ if ignore_message(message, ignore_phrases):
continue
for phrase in positive_phrases:
if phrase.upper() in uppered_body:
- log.debug('Backing off {} bc {} in logs'.format(project_name, phrase))
+ log.debug(
+ "Backing off {} bc {} in logs".format(project_name, phrase)
+ )
return True
for phrase in negative_phrases:
if phrase.upper() in uppered_body:
return False
- if 'NEEDS' in uppered_body and 'BUT ONLY' in uppered_body and 'IS AVAILABLE FOR USE' in uppered_body:
- log.debug('Backing off {} bc NEEDS BUT ONLY AVAILABLE FOR USE in logs'.format(project_name), 'DEBUG')
+ if (
+ "NEEDS" in uppered_body
+ and "BUT ONLY" in uppered_body
+ and "IS AVAILABLE FOR USE" in uppered_body
+ ):
+ log.debug(
+ "Backing off {} bc NEEDS BUT ONLY AVAILABLE FOR USE in logs".format(
+ project_name
+ ),
+ "DEBUG",
+ )
return True
- log.warning('Found unknown messagex: {}'.format(message['body']))
- log.warning('Unable to determine if project {} should be backed off, assuming no'.format(project_name))
+ log.warning("Found unknown messagex: {}".format(message["body"]))
+ log.warning(
+ "Unable to determine if project {} should be backed off, assuming no".format(
+ project_name
+ )
+ )
return False
# Get message count
- req = ET.Element('get_message_count')
+ req = ET.Element("get_message_count")
msg_count_response = await rpc_client._request(req)
message_count = int(parse_generic(msg_count_response))
- req = ET.Element('get_messages')
- a = ET.SubElement(req, 'seqno')
- a.text = str(message_count-50) # get ten most recent messages
+ req = ET.Element("get_messages")
+ a = ET.SubElement(req, "seqno")
+ a.text = str(message_count - 50) # get ten most recent messages
messages_response = await rpc_client._request(req)
messages = parse_generic(messages_response) # returns True if successful
- if project_name.upper()=='GPUGRID.NET':
- project_name='GPUGRID' # fix for log entries which show up under different name
- return project_backoff(project_name,messages)
-async def get_all_projects(rpc_client: libs.pyboinc.rpc_client)->Dict[str, str]:
+ if project_name.upper() == "GPUGRID.NET":
+ project_name = (
+ "GPUGRID" # fix for log entries which show up under different name
+ )
+ return project_backoff(project_name, messages)
+
+
+async def get_all_projects(rpc_client: libs.pyboinc.rpc_client) -> Dict[str, str]:
"""
Get ALL projects the BOINC client knows about, even if unattached
"""
- req = ET.Element('get_all_projects_list')
+ req = ET.Element("get_all_projects_list")
messages_response = await rpc_client._request(req)
- project_status_reply = parse_generic(messages_response) # returns True if successful
+ project_status_reply = parse_generic(
+ messages_response
+ ) # returns True if successful
found_projects = []
- project_names={}
+ project_names = {}
for project in project_status_reply:
- project_names[project['url']]=project['name']
- project_names['https://gene.disi.unitn.it/test/']='TN-Grid' # added bc BOINC client does not list this project for some reason
+ project_names[project["url"]] = project["name"]
+ project_names[
+ "https://gene.disi.unitn.it/test/"
+ ] = "TN-Grid" # added bc BOINC client does not list this project for some reason
return project_names
-async def get_attached_projects(rpc_client: libs.pyboinc.rpc_client)->Tuple[List[str], Dict[str, str]]:
+
+
+async def get_attached_projects(
+ rpc_client: libs.pyboinc.rpc_client,
+) -> Tuple[List[str], Dict[str, str]]:
project_status_reply = await rpc_client.get_project_status()
found_projects = []
- project_names={}
+ project_names = {}
for project in project_status_reply:
found_projects.append(project.master_url)
- if isinstance(project.project_name,bool): # this happens if project is "attached" but unable to communicate w project due to it being down or some other issue
+ if isinstance(
+ project.project_name, bool
+ ): # this happens if project is "attached" but unable to communicate w project due to it being down or some other issue
project_names[project.master_url] = project.master_url
else:
- project_names[project.master_url]=project.project_name
- return found_projects,project_names
-async def verify_boinc_connection(rpc_client:libs.pyboinc.rpc_client)->bool:
+ project_names[project.master_url] = project.project_name
+ return found_projects, project_names
+
+
+async def verify_boinc_connection(rpc_client: libs.pyboinc.rpc_client) -> bool:
"""
Checks if a BOINC client can be connected to and authorized.
Returns True if it can, False if it can't.
"""
authorize_response = await rpc_client.authorize()
- req = ET.Element('get_global_prefs_working')
+ req = ET.Element("get_global_prefs_working")
response = await rpc_client._request(req)
- if 'unauthorized' in str(response):
+ if "unauthorized" in str(response):
return False
return True
-async def prefs_check(rpc_client: libs.pyboinc.rpc_client)->dict:
+
+
+async def prefs_check(rpc_client: libs.pyboinc.rpc_client) -> dict:
# authorize BOINC client
authorize_response = await rpc_client.authorize()
# get prefs
- req = ET.Element('get_global_prefs_working')
+ req = ET.Element("get_global_prefs_working")
response = await rpc_client._request(req)
parsed = parse_generic(response) # returns True if successful
# get actual disk usage
- req = ET.Element('get_disk_usage')
+ req = ET.Element("get_disk_usage")
response = await rpc_client._request(req)
usage = parse_generic(response) # returns True if successful
- max_gb=int(float(parsed.get('disk_max_used_gb',0)))
- used_max_gb=int(int(usage['d_allowed'])/1024/1024/1024)
- if (max_gb<10 and max_gb!=0) or used_max_gb<9.5:
- print("BOINC is configured to use less than 10GB, this tool will not run with <10GB allocated in order to prevent requesting base project files from projects too often.")
- log.error("BOINC is configured to use less than 10GB, this tool will not run with <10GB allocated in order to prevent requesting base project files from projects too often.")
- print('If you have configured BOINC to be able to use >=10GB and still get this message, it is because you are low on disk space and BOINC is responding to settings such as "don\'t use greater than X% of space" or "leave x% free"')
+ max_gb = int(float(parsed.get("disk_max_used_gb", 0)))
+ used_max_gb = int(int(usage["d_allowed"]) / 1024 / 1024 / 1024)
+ if (max_gb < 10 and max_gb != 0) or used_max_gb < 9.5:
+ print(
+ "BOINC is configured to use less than 10GB, this tool will not run with <10GB allocated in order to prevent requesting base project files from projects too often."
+ )
log.error(
- 'If you have configured BOINC to be able to use >=10GB and still get this message, it is because you are low on disk space and BOINC is responding to settings such as "don\'t use greater than X% of space" or "leave x% free"')
- print('Press enter to quit')
+ "BOINC is configured to use less than 10GB, this tool will not run with <10GB allocated in order to prevent requesting base project files from projects too often."
+ )
+ print(
+ 'If you have configured BOINC to be able to use >=10GB and still get this message, it is because you are low on disk space and BOINC is responding to settings such as "don\'t use greater than X% of space" or "leave x% free"'
+ )
+ log.error(
+ 'If you have configured BOINC to be able to use >=10GB and still get this message, it is because you are low on disk space and BOINC is responding to settings such as "don\'t use greater than X% of space" or "leave x% free"'
+ )
+ print("Press enter to quit")
input()
quit()
- net_start_hour=int(float(parsed['net_start_hour']))+int(float(parsed['net_end_hour']))
- if net_start_hour!=0:
+ net_start_hour = int(float(parsed["net_start_hour"])) + int(
+ float(parsed["net_end_hour"])
+ )
+ if net_start_hour != 0:
print(
- 'You have BOINC configured to only access the network at certain times, this tool requires constant '
- 'internet availability.')
+ "You have BOINC configured to only access the network at certain times, this tool requires constant "
+ "internet availability."
+ )
log.error(
- 'You have BOINC configured to only access the network at certain times, this tool requires constant '
- 'internet availability.')
- print('Press enter to quit')
+ "You have BOINC configured to only access the network at certain times, this tool requires constant "
+ "internet availability."
+ )
+ print("Press enter to quit")
input()
return parsed
-def get_highest_priority_project(combined_stats:dict,project_weights:Dict[str,int],min_recheck_time=min_recheck_time,attached_projects:List[str]=None,quiet:bool=False)->Tuple[List[str],Dict[str,float]]:
+
+
+def get_highest_priority_project(
+ combined_stats: dict,
+ project_weights: Dict[str, int],
+ min_recheck_time=min_recheck_time,
+ attached_projects: List[str] = None,
+ quiet: bool = False,
+) -> Tuple[List[str], Dict[str, float]]:
"""
Given STATS, return list of projects sorted by priority. Note that "benchmark" projects are compared to TOTAL time
while others are compared to windowed time specific by user
"""
if not attached_projects:
- attached_projects=[]
- priority_dict={}
+ attached_projects = []
+ priority_dict = {}
# calculate total time from stats
- total_xday_time=0
- total_time=0
+ total_xday_time = 0
+ total_time = 0
for found_key, projectstats in combined_stats.items():
- total_xday_time+=projectstats['COMPILED_STATS']['XDAYWALLTIME']
- total_time += projectstats['COMPILED_STATS']['TOTALWALLTIME']
- #print('Calculating project weights: total time is {}'.format(total_xday_time))
- log.debug('Calculating project weights: total time is {}'.format(total_xday_time))
- for project,weight in project_weights.items():
- if not in_list(project,attached_projects):
- log.debug('skipping project bc not attached {}'.format(project))
+ total_xday_time += projectstats["COMPILED_STATS"]["XDAYWALLTIME"]
+ total_time += projectstats["COMPILED_STATS"]["TOTALWALLTIME"]
+ # print('Calculating project weights: total time is {}'.format(total_xday_time))
+ log.debug("Calculating project weights: total time is {}".format(total_xday_time))
+ for project, weight in project_weights.items():
+ if not in_list(project, attached_projects):
+ log.debug("skipping project bc not attached {}".format(project))
continue
- combined_stats_extract=get_project_from_dict(project, combined_stats,'searching combined_stats in get_highest_priority_project')
+ combined_stats_extract = get_project_from_dict(
+ project,
+ combined_stats,
+ "searching combined_stats in get_highest_priority_project",
+ )
if not combined_stats_extract:
if not quiet:
- print('Warning: {} not found in stats, assuming not attached. You can safely ignore this warning w/ a new BOINC install which has not received credit on this project yet '.format(project))
- log.warning('Warning: {} not found in stats, assuming not attached You can safely ignore this warning w/ a new BOINC install which has not received credit on this project yet '.format(project))
- existing_time=0
+ print(
+ "Warning: {} not found in stats, assuming not attached. You can safely ignore this warning w/ a new BOINC install which has not received credit on this project yet ".format(
+ project
+ )
+ )
+ log.warning(
+ "Warning: {} not found in stats, assuming not attached You can safely ignore this warning w/ a new BOINC install which has not received credit on this project yet ".format(
+ project
+ )
+ )
+ existing_time = 0
else:
- if weight==1: # benchmarking projects should be over ALL time not just recent time
- existing_time = combined_stats_extract['COMPILED_STATS']['TOTALWALLTIME']
+ if (
+ weight == 1
+ ): # benchmarking projects should be over ALL time not just recent time
+ existing_time = combined_stats_extract["COMPILED_STATS"][
+ "TOTALWALLTIME"
+ ]
else:
- existing_time=combined_stats_extract['COMPILED_STATS']['XDAYWALLTIME']
- if weight==1:
+ existing_time = combined_stats_extract["COMPILED_STATS"]["XDAYWALLTIME"]
+ if weight == 1:
target_time = existing_time - (total_time * (weight / 1000))
else:
- target_time=existing_time-(total_xday_time*(weight/1000))
- priority_dict[project]=round(target_time/60/60,2)
+ target_time = existing_time - (total_xday_time * (weight / 1000))
+ priority_dict[project] = round(target_time / 60 / 60, 2)
log.debug(
- 'Project is {} weight is {} existing time is {} so time delta is {}(s) or {}(h)'.format(project,
- weight,
- existing_time,
- target_time,
- round(
- target_time / 60 / 60,
- 4)))
- if len(priority_dict)>0:
- return sorted(priority_dict,key=priority_dict.get),priority_dict
+ "Project is {} weight is {} existing time is {} so time delta is {}(s) or {}(h)".format(
+ project,
+ weight,
+ existing_time,
+ target_time,
+ round(target_time / 60 / 60, 4),
+ )
+ )
+ if len(priority_dict) > 0:
+ return sorted(priority_dict, key=priority_dict.get), priority_dict
else:
- print('Error: Unable to find a highest priority project, ? Sleeping for 10 min')
+ print("Error: Unable to find a highest priority project, ? Sleeping for 10 min")
log.error(
- 'Unable to find a highest priority project, maybe all have been checked recently? Sleeping for 10 min')
- return [],{}
-def get_project_mag_ratios_from_url(lookback_period: int = 30,project_resolver_dict:Dict[str,str]=None) -> Union[Dict[str, float],None]:
+ "Unable to find a highest priority project, maybe all have been checked recently? Sleeping for 10 min"
+ )
+ return [], {}
+
+
+def get_project_mag_ratios_from_url(
+ lookback_period: int = 30, project_resolver_dict: Dict[str, str] = None
+) -> Union[Dict[str, float], None]:
"""
:param lookback_period: number of superblocks to look back to determine average
:return: Dictionary w/ key as project URL and value as project mag ratio (mag per unit of RAC)
"""
- def project_name_to_url(searchname:str,project_resolver_dict:Dict[str,str])->Union[str,None]:
+
+ def project_name_to_url(
+ searchname: str, project_resolver_dict: Dict[str, str]
+ ) -> Union[str, None]:
all_projects = project_resolver_dict
for found_project_name, project_url in project_resolver_dict.items():
- if found_project_name.upper()==searchname.upper():
+ if found_project_name.upper() == searchname.upper():
return project_url.upper()
return None
+
import requests as req
import json
+
projects = {}
return_dict = {}
mag_per_project = 0
- url='https://www.gridcoinstats.eu/API/simpleQuery.php?q=superblocks'
+ url = "https://www.gridcoinstats.eu/API/simpleQuery.php?q=superblocks"
try:
resp = req.get(url)
except Exception as e:
- print('Error retrieving project mag ratios from gridcoinstats.eu')
+ print("Error retrieving project mag ratios from gridcoinstats.eu")
return None
- loaded_json=json.loads(resp.text)
+ loaded_json = json.loads(resp.text)
for i in range(0, lookback_period):
superblock = loaded_json[i]
if i == 0:
- total_magnitude = superblock['total_magnitude']
- total_projects = superblock['total_projects']
+ total_magnitude = superblock["total_magnitude"]
+ total_projects = superblock["total_projects"]
mag_per_project = total_magnitude / total_projects
- for project_name, project_stats in superblock['Contract Contents']['projects'].items():
+ for project_name, project_stats in superblock["Contract Contents"][
+ "projects"
+ ].items():
if project_name not in projects:
if i == 0:
projects[project_name] = []
else:
continue # skip projects which are on greylist
- projects[project_name].append(project_stats['rac'])
+ projects[project_name].append(project_stats["rac"])
for project_name, project_racs in projects.items():
average_rac = sum(project_racs) / len(project_racs)
- project_url = project_name_to_url(project_name,project_resolver_dict)
+ project_url = project_name_to_url(project_name, project_resolver_dict)
return_dict[project_url] = mag_per_project / average_rac
return return_dict
-def url_to_just_domain_and_path(url:str)->str:
- cleaned_url = url.upper().replace('HTTPS://', '').replace('HTTP://', '').replace('WWW.', '')
- if cleaned_url.endswith('/'):
- cleaned_project_url=cleaned_url[:-1]
+
+def url_to_just_domain_and_path(url: str) -> str:
+ cleaned_url = (
+ url.upper().replace("HTTPS://", "").replace("HTTP://", "").replace("WWW.", "")
+ )
+ if cleaned_url.endswith("/"):
+ cleaned_project_url = cleaned_url[:-1]
return cleaned_url
-def get_project_from_dict(project_url:str, combined_stats:dict,debug_notes:str='')->Union[dict,int,None]:
+
+
+def get_project_from_dict(
+ project_url: str, combined_stats: dict, debug_notes: str = ""
+) -> Union[dict, int, None]:
"""
project_url: A project URL to search for
combined_stats: Dict to search through w/ project urls as keys. Can be ANY dict
@@ -1660,121 +2266,196 @@ def get_project_from_dict(project_url:str, combined_stats:dict,debug_notes:str='
Given a dict in format {projectname1:someinformation,projectname2:someinformation}, canonicalize project name by removing http/s/www
and trailing slashes, ignoring capitalization, and returning someinformation, otherwise return none
"""
- cleaned_project_url=url_to_just_domain_and_path(project_url)
- if cleaned_project_url.endswith('/'):
- cleaned_project_url=cleaned_project_url[:-1]
+ cleaned_project_url = url_to_just_domain_and_path(project_url)
+ if cleaned_project_url.endswith("/"):
+ cleaned_project_url = cleaned_project_url[:-1]
for found_project in combined_stats:
- if cleaned_project_url==found_project.upper() or cleaned_project_url in found_project.upper():
+ if (
+ cleaned_project_url == found_project.upper()
+ or cleaned_project_url in found_project.upper()
+ ):
return combined_stats[found_project]
return None
-def profitability_check(grc_price:float,exchange_fee:float,host_power_usage:float,grc_sell_price:Union[None,float],local_kwh:float,project:str,min_profit_per_hour:float,combined_stats:dict)->bool:
+
+def profitability_check(
+ grc_price: float,
+ exchange_fee: float,
+ host_power_usage: float,
+ grc_sell_price: Union[None, float],
+ local_kwh: float,
+ project: str,
+ min_profit_per_hour: float,
+ combined_stats: dict,
+) -> bool:
"""
Returns True if crunching is profitable right now. False otherwise.
"""
if not grc_sell_price:
- grc_sell_price=0.00
- combined_stats_extract=get_project_from_dict(project, combined_stats,'searching combined_stats in profitability_check')
+ grc_sell_price = 0.00
+ combined_stats_extract = get_project_from_dict(
+ project, combined_stats, "searching combined_stats in profitability_check"
+ )
if not combined_stats_extract:
- log.error('Error: Unable to calculate profitability for project {} bc we have no stats for it'.format(project))
+ log.error(
+ "Error: Unable to calculate profitability for project {} bc we have no stats for it".format(
+ project
+ )
+ )
return False
- revenue_per_hour = combined_stats_extract['COMPILED_STATS']['AVGMAGPERHOUR']/4 * max(grc_price,grc_sell_price)
- exchange_expenses = revenue_per_hour*exchange_fee
+ revenue_per_hour = (
+ combined_stats_extract["COMPILED_STATS"]["AVGMAGPERHOUR"]
+ / 4
+ * max(grc_price, grc_sell_price)
+ )
+ exchange_expenses = revenue_per_hour * exchange_fee
expenses_per_hour = exchange_expenses + HOST_COST_PER_HOUR
profit = revenue_per_hour - expenses_per_hour
- if profit>min_profit_per_hour:
- log.debug('Determined project {} is profitable. Rev is {} expenses is {} profit is {}'.format(project,revenue_per_hour,expenses_per_hour,profit))
+ if profit > min_profit_per_hour:
+ log.debug(
+ "Determined project {} is profitable. Rev is {} expenses is {} profit is {}".format(
+ project, revenue_per_hour, expenses_per_hour, profit
+ )
+ )
return True
- log.debug('Determined project {} is NOT profitable. Rev is {} expenses is {} profit is {}'.format(project, revenue_per_hour,
- expenses_per_hour,
- profit))
+ log.debug(
+ "Determined project {} is NOT profitable. Rev is {} expenses is {} profit is {}".format(
+ project, revenue_per_hour, expenses_per_hour, profit
+ )
+ )
return False
-def in_preferred_projects(projecturl:str,preferred_projects:Dict[str,float]):
- cleaned_search_url=projecturl.upper().replace('HTTP://','').replace('HTTPS://','')
+
+
+def in_preferred_projects(projecturl: str, preferred_projects: Dict[str, float]):
+ cleaned_search_url = (
+ projecturl.upper().replace("HTTP://", "").replace("HTTPS://", "")
+ )
for found_project in preferred_projects:
- cleaned_found=found_project.upper().replace('HTTP://','').replace('HTTPS://','')
- if cleaned_search_url in cleaned_found or cleaned_search_url==cleaned_found:
+ cleaned_found = (
+ found_project.upper().replace("HTTP://", "").replace("HTTPS://", "")
+ )
+ if cleaned_search_url in cleaned_found or cleaned_search_url == cleaned_found:
return True
return False
-def benchmark_check(project_url:str,combined_stats:dict,benchmarking_minimum_wus:float,benchmarking_minimum_time:float,benchmarking_delay_in_days:float,skip_benchmarking:bool)->bool:
+
+
+def benchmark_check(
+ project_url: str,
+ combined_stats: dict,
+ benchmarking_minimum_wus: float,
+ benchmarking_minimum_time: float,
+ benchmarking_delay_in_days: float,
+ skip_benchmarking: bool,
+) -> bool:
"""
Returns True if we should force crunch this project for benchmarking reasons. False otherwise
"""
- def date_to_date(date:str)->datetime.datetime:
+
+ def date_to_date(date: str) -> datetime.datetime:
"""
Convert date from str to datetime
"""
- split=date.split('-')
- return datetime.datetime(int(split[2]),int(split[0]),int(split[1]))
+ split = date.split("-")
+ return datetime.datetime(int(split[2]), int(split[0]), int(split[1]))
+
if skip_benchmarking:
return False
- combined_stats_extract=get_project_from_dict(project_url,combined_stats,'searching combined_stats in benchmark_check')
+ combined_stats_extract = get_project_from_dict(
+ project_url, combined_stats, "searching combined_stats in benchmark_check"
+ )
if not combined_stats_extract:
- log.error('Unable to find project in benchmark_check'.format(project_url))
+ log.error("Unable to find project in benchmark_check".format(project_url))
return True
- if combined_stats_extract.get('COMPILED_STATS',{}).get('TOTALWALLTIME',0)latest_date:
- latest_date=datetimed
- delta=datetime.datetime.now() - latest_date
+ latest_date = datetime.datetime(1993, 1, 1)
+ for date in combined_stats_extract["WU_HISTORY"]:
+ datetimed = date_to_date(date)
+ if datetimed > latest_date:
+ latest_date = datetimed
+ delta = datetime.datetime.now() - latest_date
if abs(delta.days) > benchmarking_delay_in_days:
- log.debug('Forcing WU fetch on {} due to benchmarking_delay_in_days'.format(project_url))
+ log.debug(
+ "Forcing WU fetch on {} due to benchmarking_delay_in_days".format(
+ project_url
+ )
+ )
return True
return False
-def save_stats(database:dict):
- with open('stats.json', 'w') as fp:
+
+
+def save_stats(database: dict):
+ with open("stats.json", "w") as fp:
json.dump(database, fp, default=json_default)
-def custom_sleep(sleep_time:float,boinc_rpc_client,dev_loop:bool=False):
+
+
+def custom_sleep(sleep_time: float, boinc_rpc_client, dev_loop: bool = False):
"""
A function to sleep and update the DEVTIMECOUNTER
sleep_time: duration in minutes to sleep
dev_loop: True if we are in dev loop
"""
- log.debug('Sleeping for {}...'.format(sleep_time))
- elapsed=0
+ log.debug("Sleeping for {}...".format(sleep_time))
+ elapsed = 0
while elapsed < sleep_time:
sleep(60)
if loop.run_until_complete(is_boinc_crunching(boinc_rpc_client)):
if dev_loop:
- DATABASE['DEVTIMECOUNTER'] -= 1
- DATABASE['DEVTIMETOTAL'] +=1
+ DATABASE["DEVTIMECOUNTER"] -= 1
+ DATABASE["DEVTIMETOTAL"] += 1
else:
- DATABASE['DEVTIMECOUNTER'] += max(dev_fee,.01)
- DATABASE['FTMTOTAL'] += 1
+ DATABASE["DEVTIMECOUNTER"] += max(dev_fee, 0.01)
+ DATABASE["FTMTOTAL"] += 1
# save database every ten minutes or at end of routine
- if str(elapsed).endswith('0') or elapsed + 1 >= sleep_time:
+ if str(elapsed).endswith("0") or elapsed + 1 >= sleep_time:
save_stats(DATABASE)
- elapsed+=1
+ elapsed += 1
+
def json_default(obj):
"""
For serializing datetimes to json
"""
if isinstance(obj, datetime.datetime):
- return { '_isoformat': obj.isoformat() }
- raise TypeError('...')
+ return {"_isoformat": obj.isoformat()}
+ raise TypeError("...")
+
-def get_avg_mag_hr(combined_stats:dict)->float:
+def get_avg_mag_hr(combined_stats: dict) -> float:
"""
Get average mag/hr over all projects to date
"""
found_mag = []
found_time = []
for project_url, stats in combined_stats.items():
- total_hours = stats['COMPILED_STATS']['TOTALWALLTIME']
- total_mag = stats['COMPILED_STATS']['TOTALWALLTIME'] * stats['COMPILED_STATS']['AVGMAGPERHOUR']
+ total_hours = stats["COMPILED_STATS"]["TOTALWALLTIME"]
+ total_mag = (
+ stats["COMPILED_STATS"]["TOTALWALLTIME"]
+ * stats["COMPILED_STATS"]["AVGMAGPERHOUR"]
+ )
found_mag.append(total_mag)
found_time.append(total_hours)
- found_sum=sum(found_time)
- found_mag=sum(found_mag)
- if found_sum==0 or found_mag==0:
+ found_sum = sum(found_time)
+ found_mag = sum(found_mag)
+ if found_sum == 0 or found_mag == 0:
return 0
average = found_mag / found_sum
return average
@@ -1784,102 +2465,136 @@ def object_hook(obj):
"""
For de-serializing datetimes from json
"""
- _isoformat = obj.get('_isoformat')
+ _isoformat = obj.get("_isoformat")
if _isoformat is not None:
return datetime.datetime.fromisoformat(_isoformat)
return obj
-def setup_dev_boinc()->str:
+
+
+def setup_dev_boinc() -> str:
"""
Do initial setup of and start dev boinc client. Returns RPC password or 'ERROR' if unable to start BOINC
"""
# check if dev BOINC directory exists
## create if it doesn't
# start BOINC
- dev_path = os.path.abspath('DEVACCOUNT')
- boinc_executable = '/usr/bin/boinc'
- if 'WINDOWS' in found_platform.upper():
- boinc_executable='C:\\Program Files\\BOINC\\boinc.exe'
- elif 'DARWIN' in found_platform.upper():
- boinc_executable='/Applications/BOINCManager.app/Contents/resources/boinc'
- if not os.path.exists('DEVACCOUNT'):
+ dev_path = os.path.abspath("DEVACCOUNT")
+ boinc_executable = "/usr/bin/boinc"
+ if "WINDOWS" in found_platform.upper():
+ boinc_executable = "C:\\Program Files\\BOINC\\boinc.exe"
+ elif "DARWIN" in found_platform.upper():
+ boinc_executable = "/Applications/BOINCManager.app/Contents/resources/boinc"
+ if not os.path.exists("DEVACCOUNT"):
os.mkdir(dev_path)
# update settings to match user settings from main BOINC install
- global_settings_path=os.path.join(boinc_data_dir,'global_prefs.xml')
- override_path=os.path.join(boinc_data_dir,'global_prefs_override.xml')
- override_dest_path=os.path.join(os.path.join(os.getcwd(),'DEVACCOUNT'),'global_prefs_override.xml')
- shutil.copy(global_settings_path,'DEVACCOUNT')
+ global_settings_path = os.path.join(boinc_data_dir, "global_prefs.xml")
+ override_path = os.path.join(boinc_data_dir, "global_prefs_override.xml")
+ override_dest_path = os.path.join(
+ os.path.join(os.getcwd(), "DEVACCOUNT"), "global_prefs_override.xml"
+ )
+ shutil.copy(global_settings_path, "DEVACCOUNT")
if os.path.exists(override_path):
- shutil.copy(override_path, 'DEVACCOUNT')
+ shutil.copy(override_path, "DEVACCOUNT")
# Read in the file
- with open(override_dest_path, 'r') as file:
+ with open(override_dest_path, "r") as file:
filedata = file.read()
# Replace the target string
- if '' in filedata:
- filedata=re.sub('[^<]*','5.000000',filedata)
+ if "" in filedata:
+ filedata = re.sub(
+ "[^<]*",
+ "5.000000",
+ filedata,
+ )
else:
- filedata=filedata.replace('','5.000000')
+ filedata = filedata.replace(
+ "",
+ "5.000000",
+ )
# Write the file out again
- with open(override_dest_path, 'w') as file:
+ with open(override_dest_path, "w") as file:
file.write(filedata)
else:
text_file = open(override_dest_path, "w")
- n = text_file.write('5.000000')
+ n = text_file.write(
+ "5.000000"
+ )
text_file.close()
- boinc_arguments=[boinc_executable,'--allow_multiple_clients','--dir',dev_path,'--gui_rpc_port',str(DEV_RPC_PORT)]
+ boinc_arguments = [
+ boinc_executable,
+ "--allow_multiple_clients",
+ "--dir",
+ dev_path,
+ "--gui_rpc_port",
+ str(DEV_RPC_PORT),
+ ]
try:
- boinc_result=subprocess.Popen(boinc_arguments,stderr=subprocess.DEVNULL,stdout=subprocess.DEVNULL)
+ boinc_result = subprocess.Popen(
+ boinc_arguments, stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL
+ )
except Exception as e:
- print('Error launching client for dev crunching {}'.format(e))
- log.error('Error launching client for dev crunching {}'.format(e))
- return 'ERROR'
+ print("Error launching client for dev crunching {}".format(e))
+ log.error("Error launching client for dev crunching {}".format(e))
+ return "ERROR"
sleep(6)
- auth_location = os.path.join(dev_path, 'gui_rpc_auth.cfg')
+ auth_location = os.path.join(dev_path, "gui_rpc_auth.cfg")
try:
if os.path.exists(auth_location):
- with open(auth_location, 'r') as file:
+ with open(auth_location, "r") as file:
data = file.read().rstrip()
- if data != '':
+ if data != "":
boinc_password = data
else:
- boinc_password=''
+ boinc_password = ""
except Exception as e:
# This error can generally be disregarded on Linux/OSX
- if 'WINDOWS' in found_platform.upper():
- print('Error reading boinc RPC file at {}: {}'.format(auth_location, e))
- log.error('Error reading boinc RPC file at {}: {}'.format(auth_location, e))
+ if "WINDOWS" in found_platform.upper():
+ print("Error reading boinc RPC file at {}: {}".format(auth_location, e))
+ log.error("Error reading boinc RPC file at {}: {}".format(auth_location, e))
else:
- log.debug('Error reading boinc RPC file at {}: {}'.format(auth_location, e))
+ log.debug("Error reading boinc RPC file at {}: {}".format(auth_location, e))
return boinc_password
-def project_to_dev_project(url:str,dev_projects:Dict[str,str])->str:
+
+
+def project_to_dev_project(url: str, dev_projects: Dict[str, str]) -> str:
"""
Convert a URL to a URL which can be found in DEV_PROJECT_DICT
"""
for project in dev_projects:
- if url.upper().replace('HTTPS://','').replace('HTTP://','')==project.upper().replace('HTTPS://','').replace('HTTP://',''):
+ if url.upper().replace("HTTPS://", "").replace(
+ "HTTP://", ""
+ ) == project.upper().replace("HTTPS://", "").replace("HTTP://", ""):
return project
return url
-def project_in_list_check(url:str,project_list:List[str]):
+
+
+def project_in_list_check(url: str, project_list: List[str]):
"""
Case-insensitive way to check if URL is in list of URLs
"""
- cleaned=url.upper().replace('HTTPS://','').replace('HTTP://','')
+ cleaned = url.upper().replace("HTTPS://", "").replace("HTTP://", "")
for project in project_list:
- cleaned_project=project.upper().replace('HTTPS://','').replace('HTTP://','')
- if cleaned==project:
+ cleaned_project = project.upper().replace("HTTPS://", "").replace("HTTP://", "")
+ if cleaned == project:
return True
if cleaned in project:
return True
return False
-def project_list_to_project_list(project_list:List[dict])->List[str]:
+
+
+def project_list_to_project_list(project_list: List[dict]) -> List[str]:
"""
Convert get_project_list into a list of project URLs so we can perform 'in' comparisons
"""
- return_list=[]
+ return_list = []
for project in project_list:
- return_list.append(project['master_url'])
+ return_list.append(project["master_url"])
return return_list
-def boinc_loop(dev_loop:bool=False,rpc_client=None,client_rpc_client=None,time:int=0):
+
+
+def boinc_loop(
+ dev_loop: bool = False, rpc_client=None, client_rpc_client=None, time: int = 0
+):
"""
Main routine which manages BOINC
:param dev_loop: set to True if we are crunching for developer
@@ -1888,7 +2603,7 @@ def boinc_loop(dev_loop:bool=False,rpc_client=None,client_rpc_client=None,time:i
:param time How long to crunch for. Only used by dev mode at the moment
"""
if not client_rpc_client:
- client_rpc_client=rpc_client
+ client_rpc_client = rpc_client
# these variables are referenced outside the loop (or in recursive calls of the loop) so should be made global
global combined_stats
global final_project_weights
@@ -1900,47 +2615,61 @@ def boinc_loop(dev_loop:bool=False,rpc_client=None,client_rpc_client=None,time:i
global DEV_BOINC_PASSWORD
global DEV_LOOP_RUNNING
if dev_loop:
- mode='DEV'
+ mode = "DEV"
else:
- mode='CLIENT'
+ mode = "CLIENT"
if mode not in DATABASE:
- DATABASE[mode]={}
+ DATABASE[mode] = {}
def should_crunch_for_dev() -> bool:
if dev_loop:
- log.debug('Should not start dev crunching bc already in dev loop')
+ log.debug("Should not start dev crunching bc already in dev loop")
return False
if check_sidestake_results:
- log.debug('Should skip dev mode bc check_sidestake_results')
+ log.debug("Should skip dev mode bc check_sidestake_results")
return False
if FORCE_DEV_MODE:
- log.debug('Should start dev crunching bc FORCE_DEV_MODE')
+ log.debug("Should start dev crunching bc FORCE_DEV_MODE")
return True
- dev_time_counter_in_hours=max(DATABASE.get('DEVTIMECOUNTER', 0), 1) / 60
+ dev_time_counter_in_hours = max(DATABASE.get("DEVTIMECOUNTER", 0), 1) / 60
if dev_time_counter_in_hours > 100:
- log.debug('Should start dev crunching due to time counter: {}'.format(dev_time_counter_in_hours))
+ log.debug(
+ "Should start dev crunching due to time counter: {}".format(
+ dev_time_counter_in_hours
+ )
+ )
return True
- log.debug('Should not start dev crunching, current counter is: {}'.format(dev_time_counter_in_hours))
+ log.debug(
+ "Should not start dev crunching, current counter is: {}".format(
+ dev_time_counter_in_hours
+ )
+ )
return False
+
# Note yoyo@home does not support weak auth so it can't be added here
- DEV_PROJECT_DICT={
- 'HTTPS://SECH.ME/BOINC/AMICABLE/':'48989_50328a1561506cd0dcd10476106fda82',
- 'HTTPS://ASTEROIDSATHOME.NET/BOINC/':'476179_e114636a09b4d451daacc9488c1f3b83',
- 'HTTPS://EINSTEINATHOME.ORG/':'1043421_4a19901b420ccc1aab1df9021e59e5ee',
- 'HTTPS://EINSTEIN.PHYS.UWM.EDU/':'1043421_4a19901b420ccc1aab1df9021e59e5ee',
- 'HTTPS://WWW.GPUGRID.NET/':'575631_e05399c996a36746d603d0edcc6fdcb2',
- 'HTTPS://MILKYWAY.CS.RPI.EDU/MILKYWAY/':'3206506_1ff09dd6be13aabc509b535934de40f8',
- 'HTTPS://ESCATTER11.FULLERTON.EDU/NFS/':'2583967_720a4730bb15ae246935cf911d496ba3',
- 'HTTPS://NUMBERFIELDS.ASU.EDU/NUMBERFIELDS/':'860933_e543b27624d04eea09d74f2cf39afd31',
- 'HTTPS://BOINC.MULTI-POOL.INFO/LATINSQUARES/':'33541_de185e7045673c1485d16148683c22d9',
- 'HTTPS://BOINC.BAKERLAB.ORG/ROSETTA/':'2382329_f817670777925b63d7090fa50ac11e0b',
- 'HTTPS://SRBASE.MY-FIREWALL.ORG/SR5/':'2909_3675395fe23cc846696609fc72114b17',
- 'HTTPS://WWW.SIDOCK.SI/SIDOCK':'9302_bfbe2dcf1bc6e6f50fbc4c67841e8624',
- 'HTTPS://GENE.DISI.UNITN.IT/TEST/':'3813_54b95766c943370c5517bce39742b4fe',
- 'HTTPS://UNIVERSEATHOME.PL/UNIVERSE/':'239667_e7bfb47b3f94750632796b03b2bc7954',
- 'HTTPS://WWW.WORLDCOMMUNITYGRID.ORG':'1156028_7f2601c3a6dc1b1b9f7eb99261db96f0',
+ DEV_PROJECT_DICT = {
+ "HTTPS://SECH.ME/BOINC/AMICABLE/": "48989_50328a1561506cd0dcd10476106fda82",
+ "HTTPS://ASTEROIDSATHOME.NET/BOINC/": "476179_e114636a09b4d451daacc9488c1f3b83",
+ "HTTPS://EINSTEINATHOME.ORG/": "1043421_4a19901b420ccc1aab1df9021e59e5ee",
+ "HTTPS://EINSTEIN.PHYS.UWM.EDU/": "1043421_4a19901b420ccc1aab1df9021e59e5ee",
+ "HTTPS://WWW.GPUGRID.NET/": "575631_e05399c996a36746d603d0edcc6fdcb2",
+ "HTTPS://MILKYWAY.CS.RPI.EDU/MILKYWAY/": "3206506_1ff09dd6be13aabc509b535934de40f8",
+ "HTTPS://ESCATTER11.FULLERTON.EDU/NFS/": "2583967_720a4730bb15ae246935cf911d496ba3",
+ "HTTPS://NUMBERFIELDS.ASU.EDU/NUMBERFIELDS/": "860933_e543b27624d04eea09d74f2cf39afd31",
+ "HTTPS://BOINC.MULTI-POOL.INFO/LATINSQUARES/": "33541_de185e7045673c1485d16148683c22d9",
+ "HTTPS://BOINC.BAKERLAB.ORG/ROSETTA/": "2382329_f817670777925b63d7090fa50ac11e0b",
+ "HTTPS://SRBASE.MY-FIREWALL.ORG/SR5/": "2909_3675395fe23cc846696609fc72114b17",
+ "HTTPS://WWW.SIDOCK.SI/SIDOCK": "9302_bfbe2dcf1bc6e6f50fbc4c67841e8624",
+ "HTTPS://GENE.DISI.UNITN.IT/TEST/": "3813_54b95766c943370c5517bce39742b4fe",
+ "HTTPS://UNIVERSEATHOME.PL/UNIVERSE/": "239667_e7bfb47b3f94750632796b03b2bc7954",
+ "HTTPS://WWW.WORLDCOMMUNITYGRID.ORG": "1156028_7f2601c3a6dc1b1b9f7eb99261db96f0",
}
- def update_table(sleep_reason:str=DATABASE.get('TABLE_SLEEP_REASON',''), status:str=DATABASE.get('TABLE_STATUS',''),dev_status:bool=False):
+
+ def update_table(
+ sleep_reason: str = DATABASE.get("TABLE_SLEEP_REASON", ""),
+ status: str = DATABASE.get("TABLE_STATUS", ""),
+ dev_status: bool = False,
+ ):
"""
Function to update table printed to user.
:param status = Most recent status "waiting for xfers, starting crunching on x, etc"
@@ -1948,82 +2677,124 @@ def update_table(sleep_reason:str=DATABASE.get('TABLE_SLEEP_REASON',''), status:
# don't update table in dev loop because all our variables reference dev install not main one
if dev_loop or SKIP_TABLE_UPDATES:
return
- rename_dict={
- 'TOTALTASKS':'TASKS',
- 'TOTALTIME(HRS)':'TIME',
- 'TOTALCPUTIME(HRS)':'CPUTIME',
- 'AVGCREDITPERHOUR':'CREDIT/HR',
- 'AVGMAGPERHOUR':'MAG/HR',
- 'XDAYWALLTIME':'R-WTIME',
- 'AVGWALLTIME': 'ATIME',
- 'AVGCREDITPERTASK':'ACPT',
- 'TOTALWALLTIME':'WTIME',
- 'TOTALCPUTIME': 'CPUTIME',
- 'AVGCPUTIME': 'ACTIME'
+ rename_dict = {
+ "TOTALTASKS": "TASKS",
+ "TOTALTIME(HRS)": "TIME",
+ "TOTALCPUTIME(HRS)": "CPUTIME",
+ "AVGCREDITPERHOUR": "CREDIT/HR",
+ "AVGMAGPERHOUR": "MAG/HR",
+ "XDAYWALLTIME": "R-WTIME",
+ "AVGWALLTIME": "ATIME",
+ "AVGCREDITPERTASK": "ACPT",
+ "TOTALWALLTIME": "WTIME",
+ "TOTALCPUTIME": "CPUTIME",
+ "AVGCPUTIME": "ACTIME",
}
- ignore_list=['MAGPERCREDIT']
+ ignore_list = ["MAGPERCREDIT"]
# generate table to print pretty
- os.system('cls' if os.name == 'nt' else 'clear') # clear terminal
+ os.system("cls" if os.name == "nt" else "clear") # clear terminal
table_dict = {}
for project_url, stats_dict in combined_stats.items():
table_dict[project_url] = {}
- priority_results_extract=get_project_from_dict(project_url,priority_results,'searching priority_results in update_table')
+ priority_results_extract = get_project_from_dict(
+ project_url,
+ priority_results,
+ "searching priority_results in update_table",
+ )
if priority_results_extract:
- table_dict[project_url]['HOURSOFF'] = str(round(float(priority_results_extract), 3))
+ table_dict[project_url]["HOURSOFF"] = str(
+ round(float(priority_results_extract), 3)
+ )
else:
- table_dict[project_url]['HOURSOFF'] = str(round(float(0), 3))
- for stat_name, stat_value in stats_dict['COMPILED_STATS'].items():
+ table_dict[project_url]["HOURSOFF"] = str(round(float(0), 3))
+ for stat_name, stat_value in stats_dict["COMPILED_STATS"].items():
if stat_name in ignore_list:
continue
- renamed=stat_name
+ renamed = stat_name
if stat_name in rename_dict:
- renamed=rename_dict[stat_name]
+ renamed = rename_dict[stat_name]
rounding = 2
- if stat_name == 'MAGPERCREDIT':
+ if stat_name == "MAGPERCREDIT":
rounding = 5
- if stat_name=='AVGMAGPERHOUR':
- rounding=3
- table_dict[project_url][renamed] = str(round(float(stat_value), rounding))
- final_project_weights_extract = get_project_from_dict(project_url, final_project_weights,'searching final_project_weights in update_table')
+ if stat_name == "AVGMAGPERHOUR":
+ rounding = 3
+ table_dict[project_url][renamed] = str(
+ round(float(stat_value), rounding)
+ )
+ final_project_weights_extract = get_project_from_dict(
+ project_url,
+ final_project_weights,
+ "searching final_project_weights in update_table",
+ )
if final_project_weights_extract:
- table_dict[project_url]['WEIGHT']=str(final_project_weights_extract)
+ table_dict[project_url]["WEIGHT"] = str(final_project_weights_extract)
else:
- table_dict[project_url]['WEIGHT'] = 'NA'
- print_table(table_dict, sortby='GRC/HR',sleep_reason=sleep_reason,status=status,dev_status=dev_status)
+ table_dict[project_url]["WEIGHT"] = "NA"
+ print_table(
+ table_dict,
+ sortby="GRC/HR",
+ sleep_reason=sleep_reason,
+ status=status,
+ dev_status=dev_status,
+ )
while True:
# If we have done sufficient crunching in dev mode, exit dev loop
- if DATABASE.get('DEVTIMECOUNTER', 0) < 1 and not FORCE_DEV_MODE and dev_loop:
+ if DATABASE.get("DEVTIMECOUNTER", 0) < 1 and not FORCE_DEV_MODE and dev_loop:
return None
# Re-authorize in case we have become de-authorized since last run. This is put in a try loop b/c sometimes it throws exceptions
while True:
try:
authorize_response = loop.run_until_complete(rpc_client.authorize())
- BOINC_PROJECT_LIST, BOINC_PROJECT_NAMES = loop.run_until_complete(get_attached_projects(rpc_client)) # we need to re-fetch this as it's different for dev and client
+ BOINC_PROJECT_LIST, BOINC_PROJECT_NAMES = loop.run_until_complete(
+ get_attached_projects(rpc_client)
+ ) # we need to re-fetch this as it's different for dev and client
except Exception as e:
- print_and_log('Transient error connecting to BOINC, sleeping 30s','ERROR')
+ print_and_log(
+ "Transient error connecting to BOINC, sleeping 30s", "ERROR"
+ )
sleep(30)
else:
break
-
# If we haven't re-calculated stats recently enough, do it
- stats_calc_delta = datetime.datetime.now() - DATABASE.get('STATSLASTCALCULATED',datetime.datetime(1997,3,3))
- if ((abs(stats_calc_delta.days)*24*60)+(abs(stats_calc_delta.seconds)/60)) > recalculate_stats_interval: #only re-calculate stats every x minutes
- log.debug('Calculating stats..')
- DATABASE['STATSLASTCALCULATED'] = datetime.datetime.now()
+ stats_calc_delta = datetime.datetime.now() - DATABASE.get(
+ "STATSLASTCALCULATED", datetime.datetime(1997, 3, 3)
+ )
+ if (
+ (abs(stats_calc_delta.days) * 24 * 60)
+ + (abs(stats_calc_delta.seconds) / 60)
+ ) > recalculate_stats_interval: # only re-calculate stats every x minutes
+ log.debug("Calculating stats..")
+ DATABASE["STATSLASTCALCULATED"] = datetime.datetime.now()
combined_stats = config_files_to_stats(boinc_data_dir)
# total_time = combined_stats_to_total_time(combined_stats) # Not sure what this line did but commented out, we'll see if anything breaks
- combined_stats, final_project_weights, total_preferred_weight, total_mining_weight, dev_project_weights = generate_stats(
- APPROVED_PROJECT_URLS=APPROVED_PROJECT_URLS, preferred_projects=preferred_projects,
- ignored_projects=ignored_projects, quiet=True, ignore_unattached=True,
- attached_list=BOINC_PROJECT_LIST,mag_ratios=mag_ratios)
+ (
+ combined_stats,
+ final_project_weights,
+ total_preferred_weight,
+ total_mining_weight,
+ dev_project_weights,
+ ) = generate_stats(
+ APPROVED_PROJECT_URLS=APPROVED_PROJECT_URLS,
+ preferred_projects=preferred_projects,
+ ignored_projects=ignored_projects,
+ quiet=True,
+ ignore_unattached=True,
+ attached_list=BOINC_PROJECT_LIST,
+ mag_ratios=mag_ratios,
+ )
# Get list of projects ordered by priority
- highest_priority_projects, priority_results = get_highest_priority_project(combined_stats=combined_stats,
- project_weights=final_project_weights,
- attached_projects=BOINC_PROJECT_LIST,quiet=True)
- log.debug('Highest priority projects are: '+str(highest_priority_projects))
+ highest_priority_projects, priority_results = get_highest_priority_project(
+ combined_stats=combined_stats,
+ project_weights=final_project_weights,
+ attached_projects=BOINC_PROJECT_LIST,
+ quiet=True,
+ )
+ log.debug(
+ "Highest priority projects are: " + str(highest_priority_projects)
+ )
# print some pretty stats
update_table()
@@ -2031,427 +2802,675 @@ def update_table(sleep_reason:str=DATABASE.get('TABLE_SLEEP_REASON',''), status:
loop.run_until_complete(nnt_all_projects(rpc_client)) # NNT all projects
# If we haven't checked GRC prices in a while, do it
- price_check_delta= datetime.datetime.now() - DATABASE.get('GRCPRICELASTCHECKED',datetime.datetime(1993,3,3))
- price_check_calc=((abs(price_check_delta.days) * 24 * 60) + (abs(price_check_delta.seconds) / 60))
- if price_check_calc > max(price_check_interval,60):
+ price_check_delta = datetime.datetime.now() - DATABASE.get(
+ "GRCPRICELASTCHECKED", datetime.datetime(1993, 3, 3)
+ )
+ price_check_calc = (abs(price_check_delta.days) * 24 * 60) + (
+ abs(price_check_delta.seconds) / 60
+ )
+ if price_check_calc > max(price_check_interval, 60):
grc_price = get_grc_price()
- DATABASE['GRCPRICELASTCHECKED'] = datetime.datetime.now()
- DATABASE['GRCPRICE'] = grc_price
+ DATABASE["GRCPRICELASTCHECKED"] = datetime.datetime.now()
+ DATABASE["GRCPRICE"] = grc_price
else:
- grc_price=DATABASE['GRCPRICE']
+ grc_price = DATABASE["GRCPRICE"]
# Check profitability of all projects, if none profitable (and user doesn't want unprofitable crunching), sleep for 1hr
if only_BOINC_if_profitable and not dev_loop:
- profitability_list=[]
+ profitability_list = []
for project in highest_priority_projects:
- profitability_result=profitability_check(grc_price=grc_price,exchange_fee=exchange_fee,host_power_usage=host_power_usage,grc_sell_price=grc_sell_price,local_kwh=local_kwh,project=project,min_profit_per_hour=min_profit_per_hour,combined_stats=combined_stats)
- benchmarking_result=benchmark_check(project_url=project,combined_stats=combined_stats,benchmarking_minimum_wus=benchmarking_minimum_wus,benchmarking_minimum_time=benchmarking_minimum_time,benchmarking_delay_in_days=benchmarking_delay_in_days,skip_benchmarking=skip_benchmarking)
+ profitability_result = profitability_check(
+ grc_price=grc_price,
+ exchange_fee=exchange_fee,
+ host_power_usage=host_power_usage,
+ grc_sell_price=grc_sell_price,
+ local_kwh=local_kwh,
+ project=project,
+ min_profit_per_hour=min_profit_per_hour,
+ combined_stats=combined_stats,
+ )
+ benchmarking_result = benchmark_check(
+ project_url=project,
+ combined_stats=combined_stats,
+ benchmarking_minimum_wus=benchmarking_minimum_wus,
+ benchmarking_minimum_time=benchmarking_minimum_time,
+ benchmarking_delay_in_days=benchmarking_delay_in_days,
+ skip_benchmarking=skip_benchmarking,
+ )
profitability_list.append(profitability_result)
profitability_list.append(benchmarking_result)
if True not in profitability_list:
- log.info('No projects currently profitable and no benchmarking required, sleeping for 1 hour and killing all non-started tasks')
- tasks_list=get_task_list(rpc_client)
- kill_all_unstarted_tasks(rpc_client=rpc_client,task_list=tasks_list)
+ log.info(
+ "No projects currently profitable and no benchmarking required, sleeping for 1 hour and killing all non-started tasks"
+ )
+ tasks_list = get_task_list(rpc_client)
+ kill_all_unstarted_tasks(rpc_client=rpc_client, task_list=tasks_list)
nnt_all_projects(rpc_client)
- DATABASE['TABLE_SLEEP_REASON']= 'No profitable projects and no benchmarking required, sleeping 1 hr, killing all non-started tasks'
+ DATABASE[
+ "TABLE_SLEEP_REASON"
+ ] = "No profitable projects and no benchmarking required, sleeping 1 hr, killing all non-started tasks"
update_table()
- sleep(60*60)
+ sleep(60 * 60)
continue
# If we have enabled temperature control, verify that crunching is allowed at current temp
if enable_temp_control:
# Get BOINC's starting CPU and GPU modes
- existing_mode_info = loop.run_until_complete(run_rpc_command(rpc_client, 'get_cc_status'))
- existing_cpu_mode = existing_mode_info['task_mode']
- existing_gpu_mode = str(existing_mode_info['gpu_mode'])
+ existing_mode_info = loop.run_until_complete(
+ run_rpc_command(rpc_client, "get_cc_status")
+ )
+ existing_cpu_mode = existing_mode_info["task_mode"]
+ existing_gpu_mode = str(existing_mode_info["gpu_mode"])
if existing_cpu_mode in CPU_MODE_DICT:
existing_cpu_mode = CPU_MODE_DICT[existing_cpu_mode]
else:
- print_and_log('Error: Unknown cpu mode {}'.format(existing_cpu_mode),'ERROR')
+ print_and_log(
+ "Error: Unknown cpu mode {}".format(existing_cpu_mode), "ERROR"
+ )
if existing_gpu_mode in GPU_MODE_DICT:
existing_gpu_mode = GPU_MODE_DICT[existing_gpu_mode]
else:
- print_and_log('Error: Unknown gpu mode {}'.format(existing_gpu_mode),"ERROR")
+ print_and_log(
+ "Error: Unknown gpu mode {}".format(existing_gpu_mode), "ERROR"
+ )
# If temp is too high:
if not temp_check():
- while True: # Keep sleeping until we pass a temp check
- log.debug('Sleeping due to temperature')
+ while True: # Keep sleeping until we pass a temp check
+ log.debug("Sleeping due to temperature")
# Put BOINC into sleep mode, automatically reverting if script closes unexpectedly
- sleep_interval=str(int(((60*temp_sleep_time)+60)))
+ sleep_interval = str(int(((60 * temp_sleep_time) + 60)))
loop.run_until_complete(
- run_rpc_command(rpc_client, 'set_run_mode', 'never', sleep_interval))
+ run_rpc_command(
+ rpc_client, "set_run_mode", "never", sleep_interval
+ )
+ )
loop.run_until_complete(
- run_rpc_command(rpc_client, 'set_gpu_mode', 'never', sleep_interval))
- DATABASE['TABLE_SLEEP_REASON']= 'Temperature'
+ run_rpc_command(
+ rpc_client, "set_gpu_mode", "never", sleep_interval
+ )
+ )
+ DATABASE["TABLE_SLEEP_REASON"] = "Temperature"
update_table()
- sleep(60*temp_sleep_time)
+ sleep(60 * temp_sleep_time)
if temp_check():
# Reset to initial crunching modes now that temp is satisfied
loop.run_until_complete(
- run_rpc_command(rpc_client, 'set_run_mode', existing_cpu_mode))
+ run_rpc_command(
+ rpc_client, "set_run_mode", existing_cpu_mode
+ )
+ )
loop.run_until_complete(
- run_rpc_command(rpc_client, 'set_gpu_mode', existing_gpu_mode))
+ run_rpc_command(
+ rpc_client, "set_gpu_mode", existing_gpu_mode
+ )
+ )
break
# If we are due to run under dev account, do it
if should_crunch_for_dev():
- boinc_password=setup_dev_boinc() # Setup and start dev boinc
- DEV_BOINC_PASSWORD=boinc_password
- dev_rpc_client=None
- if boinc_password=='ERROR':
- log.error('Error setting up crunching to developer account')
+ boinc_password = setup_dev_boinc() # Setup and start dev boinc
+ DEV_BOINC_PASSWORD = boinc_password
+ dev_rpc_client = None
+ if boinc_password == "ERROR":
+ log.error("Error setting up crunching to developer account")
else:
# setup dev RPC connection, it may take a few tries while we wait for it to come online
- tries=1
- tries_max=5
- dev_rpc_client=None
- while tries<=tries_max:
+ tries = 1
+ tries_max = 5
+ dev_rpc_client = None
+ while tries <= tries_max:
try:
- dev_rpc_client = loop.run_until_complete(setup_connection(boinc_ip, boinc_password, port=DEV_RPC_PORT)) # setup dev BOINC RPC connection
- authorize_response = loop.run_until_complete(dev_rpc_client.authorize()) # authorize dev RPC connection
+ dev_rpc_client = loop.run_until_complete(
+ setup_connection(
+ boinc_ip, boinc_password, port=DEV_RPC_PORT
+ )
+ ) # setup dev BOINC RPC connection
+ authorize_response = loop.run_until_complete(
+ dev_rpc_client.authorize()
+ ) # authorize dev RPC connection
except Exception as e:
- log.error('Error connecting to BOINC dev client {}'.format(e))
+ log.error("Error connecting to BOINC dev client {}".format(e))
else:
- if tries>1:
- log.info('Finally connected to BOINC dev client {}')
+ if tries > 1:
+ log.info("Finally connected to BOINC dev client {}")
break
sleep(30)
- tries+=1
+ tries += 1
if tries > tries_max:
- log.error('Giving up on connecting to BOINC dev client')
+ log.error("Giving up on connecting to BOINC dev client")
if dev_rpc_client:
# Set main BOINC to suspend until we're done crunching in dev mode. It will automatically re-enable itself in 100x the time if nothing is done
# This allows for non-graceful exits of this script to not brick client's BOINC and considerations that dev account may not be crunching full time if client
# is actively using computer.
- existing_mode_info=loop.run_until_complete(run_rpc_command(rpc_client,'get_cc_status'))
- existing_cpu_mode= existing_mode_info['task_mode']
- existing_gpu_mode = str(existing_mode_info['gpu_mode'])
+ existing_mode_info = loop.run_until_complete(
+ run_rpc_command(rpc_client, "get_cc_status")
+ )
+ existing_cpu_mode = existing_mode_info["task_mode"]
+ existing_gpu_mode = str(existing_mode_info["gpu_mode"])
if existing_cpu_mode in CPU_MODE_DICT:
- existing_cpu_mode=CPU_MODE_DICT[existing_cpu_mode]
+ existing_cpu_mode = CPU_MODE_DICT[existing_cpu_mode]
else:
- print('Error: Unknown cpu mode {}'.format(existing_cpu_mode))
- log.error('Error: Unknown cpu mode {}'.format(existing_cpu_mode))
+ print("Error: Unknown cpu mode {}".format(existing_cpu_mode))
+ log.error("Error: Unknown cpu mode {}".format(existing_cpu_mode))
if existing_gpu_mode in GPU_MODE_DICT:
- existing_gpu_mode=GPU_MODE_DICT[existing_gpu_mode]
+ existing_gpu_mode = GPU_MODE_DICT[existing_gpu_mode]
else:
- print('Error: Unknown gpu mode {}'.format(existing_gpu_mode))
- log.error('Error: Unknown gpu mode {}'.format(existing_gpu_mode))
-
- loop.run_until_complete(run_rpc_command(rpc_client,'set_run_mode','never',str(int((DATABASE['DEVTIMECOUNTER']*60)*100))))
- loop.run_until_complete(run_rpc_command(rpc_client, 'set_gpu_mode', 'never', str(int((DATABASE['DEVTIMECOUNTER'] * 60) * 100))))
- log.info('Starting crunching under dev account, entering dev loop')
- DATABASE['TABLE_SLEEP_REASON']= 'Crunching for developer\'s account, {}% of crunching total'.format(dev_fee * 100)
- DEV_LOOP_RUNNING=True
+ print("Error: Unknown gpu mode {}".format(existing_gpu_mode))
+ log.error("Error: Unknown gpu mode {}".format(existing_gpu_mode))
+
+ loop.run_until_complete(
+ run_rpc_command(
+ rpc_client,
+ "set_run_mode",
+ "never",
+ str(int((DATABASE["DEVTIMECOUNTER"] * 60) * 100)),
+ )
+ )
+ loop.run_until_complete(
+ run_rpc_command(
+ rpc_client,
+ "set_gpu_mode",
+ "never",
+ str(int((DATABASE["DEVTIMECOUNTER"] * 60) * 100)),
+ )
+ )
+ log.info("Starting crunching under dev account, entering dev loop")
+ DATABASE[
+ "TABLE_SLEEP_REASON"
+ ] = "Crunching for developer's account, {}% of crunching total".format(
+ dev_fee * 100
+ )
+ DEV_LOOP_RUNNING = True
update_table()
- boinc_loop(dev_loop=True,rpc_client=dev_rpc_client,client_rpc_client=rpc_client,time=DATABASE['DEVTIMECOUNTER']) # run the BOINC loop :)
+ boinc_loop(
+ dev_loop=True,
+ rpc_client=dev_rpc_client,
+ client_rpc_client=rpc_client,
+ time=DATABASE["DEVTIMECOUNTER"],
+ ) # run the BOINC loop :)
update_table()
- authorize_response = loop.run_until_complete(dev_rpc_client.authorize()) # authorize dev RPC connection
- loop.run_until_complete(run_rpc_command(dev_rpc_client, 'quit')) # quit dev client
- DEV_LOOP_RUNNING=False
+ authorize_response = loop.run_until_complete(
+ dev_rpc_client.authorize()
+ ) # authorize dev RPC connection
+ loop.run_until_complete(
+ run_rpc_command(dev_rpc_client, "quit")
+ ) # quit dev client
+ DEV_LOOP_RUNNING = False
# re-enable client BOINC
loop.run_until_complete(
- run_rpc_command(rpc_client, 'set_gpu_mode', existing_gpu_mode))
+ run_rpc_command(rpc_client, "set_gpu_mode", existing_gpu_mode)
+ )
loop.run_until_complete(
- run_rpc_command(rpc_client, 'set_run_mode', existing_cpu_mode))
+ run_rpc_command(rpc_client, "set_run_mode", existing_cpu_mode)
+ )
# loop through each project in order of priority and request new tasks if not backed off
# stopping looping if cache becomes full
- dont_nnt=None
+ dont_nnt = None
if dev_loop:
- project_loop=dev_project_weights
+ project_loop = dev_project_weights
else:
- project_loop=highest_priority_projects
+ project_loop = highest_priority_projects
for highest_priority_project in project_loop:
- boincified_url=resolve_boinc_url_new(highest_priority_project)
- benchmark_result=benchmark_check(project_url=highest_priority_project,combined_stats=combined_stats,benchmarking_minimum_wus=benchmarking_minimum_wus,benchmarking_minimum_time=benchmarking_minimum_time,benchmarking_delay_in_days=benchmarking_delay_in_days,skip_benchmarking=skip_benchmarking)
- profitability_result = profitability_check(grc_price=grc_price, exchange_fee=exchange_fee,
- host_power_usage=host_power_usage,
- grc_sell_price=grc_sell_price, local_kwh=local_kwh,
- project=highest_priority_project,
- min_profit_per_hour=min_profit_per_hour,
- combined_stats=combined_stats)
- if only_BOINC_if_profitable and not benchmark_result and not profitability_result and not dev_loop:
- DATABASE['TABLE_STATUS']='No fetch for {} bc not profitable'.format(highest_priority_project)
+ boincified_url = resolve_boinc_url_new(highest_priority_project)
+ benchmark_result = benchmark_check(
+ project_url=highest_priority_project,
+ combined_stats=combined_stats,
+ benchmarking_minimum_wus=benchmarking_minimum_wus,
+ benchmarking_minimum_time=benchmarking_minimum_time,
+ benchmarking_delay_in_days=benchmarking_delay_in_days,
+ skip_benchmarking=skip_benchmarking,
+ )
+ profitability_result = profitability_check(
+ grc_price=grc_price,
+ exchange_fee=exchange_fee,
+ host_power_usage=host_power_usage,
+ grc_sell_price=grc_sell_price,
+ local_kwh=local_kwh,
+ project=highest_priority_project,
+ min_profit_per_hour=min_profit_per_hour,
+ combined_stats=combined_stats,
+ )
+ if (
+ only_BOINC_if_profitable
+ and not benchmark_result
+ and not profitability_result
+ and not dev_loop
+ ):
+ DATABASE["TABLE_STATUS"] = "No fetch for {} bc not profitable".format(
+ highest_priority_project
+ )
update_table()
- log.info('Skipping work fetch for {} bc not profitable and only_boinc_if_profitable is set to true'.format(highest_priority_project))
+ log.info(
+ "Skipping work fetch for {} bc not profitable and only_boinc_if_profitable is set to true".format(
+ highest_priority_project
+ )
+ )
continue
# If user has set to only mine highest mag project if profitable and it's not profitable or in benchmarking mode, skip
- if only_mine_if_profitable and not profitability_result and final_project_weights[highest_priority_project]!=1 and not dev_loop:
- DATABASE['TABLE_STATUS']='Skipping work fetch for {} bc not profitable and only_mine_if_profitable set to true'.format(highest_priority_project)
+ if (
+ only_mine_if_profitable
+ and not profitability_result
+ and final_project_weights[highest_priority_project] != 1
+ and not dev_loop
+ ):
+ DATABASE[
+ "TABLE_STATUS"
+ ] = "Skipping work fetch for {} bc not profitable and only_mine_if_profitable set to true".format(
+ highest_priority_project
+ )
update_table()
- log.info('Skipping work fetch for {} bc not profitable and only_mine_if_profitable set to true'.format(
- highest_priority_project))
+ log.info(
+ "Skipping work fetch for {} bc not profitable and only_mine_if_profitable set to true".format(
+ highest_priority_project
+ )
+ )
continue
- highest_priority_project = resolve_boinc_url(highest_priority_project,
- ALL_BOINC_PROJECTS) # make sure we are using correct URL, BOINC requires capitalization to be exact
+ highest_priority_project = resolve_boinc_url(
+ highest_priority_project, ALL_BOINC_PROJECTS
+ ) # make sure we are using correct URL, BOINC requires capitalization to be exact
if highest_priority_project.upper() not in DATABASE[mode]:
DATABASE[mode][highest_priority_project.upper()] = {}
# skip checking project if we have a backoff counter going and it hasn't been long enough
- time_since_last_project_check=datetime.datetime.now() - DATABASE[mode][highest_priority_project.upper()].get('LAST_CHECKED',datetime.datetime(1997, 6, 21, 18, 25, 30))
- minutes_since_last_project_check = time_since_last_project_check.seconds / 60
- if minutes_since_last_project_check < DATABASE[mode].get(highest_priority_project.upper(), {}).get('BACKOFF', 0):
- DATABASE['TABLE_STATUS']='Skipping {} due to backoff period...'.format({highest_priority_project})
+ time_since_last_project_check = datetime.datetime.now() - DATABASE[mode][
+ highest_priority_project.upper()
+ ].get("LAST_CHECKED", datetime.datetime(1997, 6, 21, 18, 25, 30))
+ minutes_since_last_project_check = (
+ time_since_last_project_check.seconds / 60
+ )
+ if minutes_since_last_project_check < DATABASE[mode].get(
+ highest_priority_project.upper(), {}
+ ).get("BACKOFF", 0):
+ DATABASE[
+ "TABLE_STATUS"
+ ] = "Skipping {} due to backoff period...".format(
+ {highest_priority_project}
+ )
update_table()
- log.debug('Skipping project {} due to backoff period... minutes_since is {}'.format(highest_priority_project,minutes_since_last_project_check))
+ log.debug(
+ "Skipping project {} due to backoff period... minutes_since is {}".format(
+ highest_priority_project, minutes_since_last_project_check
+ )
+ )
continue
- DATABASE['TABLE_STATUS']='Waiting for xfers to complete..'
+ DATABASE["TABLE_STATUS"] = "Waiting for xfers to complete.."
update_table()
- log.info('Waiting for any xfers to complete...')
- dl_response = wait_till_no_xfers(rpc_client) # wait until all network activity has concluded
+ log.info("Waiting for any xfers to complete...")
+ dl_response = wait_till_no_xfers(
+ rpc_client
+ ) # wait until all network activity has concluded
# if in dev_loop, attach to project if needed
if dev_loop:
- get_project_list = loop.run_until_complete(run_rpc_command(rpc_client,'get_project_status'))
+ get_project_list = loop.run_until_complete(
+ run_rpc_command(rpc_client, "get_project_status")
+ )
# on first run, there is no project list
- if isinstance(get_project_list,list):
- converted_project_list=project_list_to_project_list(get_project_list) # convert to simple list of strings so we can check if project URL is in list
+ if isinstance(get_project_list, list):
+ converted_project_list = project_list_to_project_list(
+ get_project_list
+ ) # convert to simple list of strings so we can check if project URL is in list
else:
- log.warning('Dev BOINC shows empty project list, this is normal on first run')
- converted_project_list=[]
-
- if not project_in_list_check(highest_priority_project,converted_project_list):
+ log.warning(
+ "Dev BOINC shows empty project list, this is normal on first run"
+ )
+ converted_project_list = []
+
+ if not project_in_list_check(
+ highest_priority_project, converted_project_list
+ ):
# yoyo will never be in project dict due to not supporting weak auth
- converted_dev_project_url=project_to_dev_project(highest_priority_project,DEV_PROJECT_DICT)
+ converted_dev_project_url = project_to_dev_project(
+ highest_priority_project, DEV_PROJECT_DICT
+ )
if converted_dev_project_url not in DEV_PROJECT_DICT:
- if 'YOYO' not in converted_dev_project_url.upper():
- log.error('Unable to attach dev account to {} bc not in DEV_PROJECT_DICT'.format(highest_priority_project))
+ if "YOYO" not in converted_dev_project_url.upper():
+ log.error(
+ "Unable to attach dev account to {} bc not in DEV_PROJECT_DICT".format(
+ highest_priority_project
+ )
+ )
continue
else:
- log.info('Attaching dev account to {}'.format(boincified_url))
- attach_response = loop.run_until_complete(run_rpc_command(rpc_client, 'project_attach', arg1='project_url',arg1_val=boincified_url, arg2='authenticator',arg2_val=DEV_PROJECT_DICT[converted_dev_project_url])) # update project
- sleep(30) # give it a chance to finish attaching
- BOINC_PROJECT_LIST, BOINC_PROJECT_NAMES = loop.run_until_complete(
- get_attached_projects(
- rpc_client)) # we need to re-fetch this as it's now changed
- highest_priority_project = resolve_boinc_url(highest_priority_project,
- ALL_BOINC_PROJECTS) # this may have changed, so check
- if len(BOINC_PROJECT_LIST)==0: # using this as a proxy for "failed attach"
- log.error('Appears to fail to attach to {}'.format(boincified_url))
+ log.info("Attaching dev account to {}".format(boincified_url))
+ attach_response = loop.run_until_complete(
+ run_rpc_command(
+ rpc_client,
+ "project_attach",
+ arg1="project_url",
+ arg1_val=boincified_url,
+ arg2="authenticator",
+ arg2_val=DEV_PROJECT_DICT[converted_dev_project_url],
+ )
+ ) # update project
+ sleep(30) # give it a chance to finish attaching
+ (
+ BOINC_PROJECT_LIST,
+ BOINC_PROJECT_NAMES,
+ ) = loop.run_until_complete(
+ get_attached_projects(rpc_client)
+ ) # we need to re-fetch this as it's now changed
+ highest_priority_project = resolve_boinc_url(
+ highest_priority_project, ALL_BOINC_PROJECTS
+ ) # this may have changed, so check
+ if (
+ len(BOINC_PROJECT_LIST) == 0
+ ): # using this as a proxy for "failed attach"
+ log.error(
+ "Appears to fail to attach to {}".format(boincified_url)
+ )
continue
- print('')
+ print("")
project_name = ALL_BOINC_PROJECTS[highest_priority_project]
- DATABASE['TABLE_STATUS']='Allowing new tasks & updating {}'.format(project_name)
- log.info('Allowing new tasks and updating {}'.format(highest_priority_project))
+ DATABASE["TABLE_STATUS"] = "Allowing new tasks & updating {}".format(
+ project_name
+ )
+ log.info(
+ "Allowing new tasks and updating {}".format(highest_priority_project)
+ )
update_table()
- allow_response=loop.run_until_complete(run_rpc_command(rpc_client,'project_allowmorework','project_url',boincified_url))
- update_response = loop.run_until_complete(run_rpc_command(rpc_client, 'project_update', 'project_url', boincified_url)) # update project
- log.debug('Requesting work from {} added to debug no new tasks bug' + str(
- boincified_url))
- log.debug('Update response is {}'.format(update_response))
- sleep(15) # give BOINC time to update w project, I don't know a less hacky way to do this, suggestions are welcome
- DATABASE[mode][highest_priority_project.upper()]['LAST_CHECKED'] = datetime.datetime.now()
+ allow_response = loop.run_until_complete(
+ run_rpc_command(
+ rpc_client, "project_allowmorework", "project_url", boincified_url
+ )
+ )
+ update_response = loop.run_until_complete(
+ run_rpc_command(
+ rpc_client, "project_update", "project_url", boincified_url
+ )
+ ) # update project
+ log.debug(
+ "Requesting work from {} added to debug no new tasks bug"
+ + str(boincified_url)
+ )
+ log.debug("Update response is {}".format(update_response))
+ sleep(
+ 15
+ ) # give BOINC time to update w project, I don't know a less hacky way to do this, suggestions are welcome
+ DATABASE[mode][highest_priority_project.upper()][
+ "LAST_CHECKED"
+ ] = datetime.datetime.now()
# check if project should be backed off. If so, back it off.
# This is an exponentially increasing backoff with a maximum time of 1 day
# Projects are backed off if they request it, if they are unresponsive/down, or if no work is available
- backoff_response = loop.run_until_complete(check_log_entries_for_backoff(rpc_client, project_name=project_name))
+ backoff_response = loop.run_until_complete(
+ check_log_entries_for_backoff(rpc_client, project_name=project_name)
+ )
if backoff_response:
- if DATABASE[mode][highest_priority_project.upper()].get('BACKOFF'):
- DATABASE[mode][highest_priority_project.upper()]['BACKOFF']=min(DATABASE[mode][highest_priority_project.upper()]['BACKOFF']*2,1440)
+ if DATABASE[mode][highest_priority_project.upper()].get("BACKOFF"):
+ DATABASE[mode][highest_priority_project.upper()]["BACKOFF"] = min(
+ DATABASE[mode][highest_priority_project.upper()]["BACKOFF"] * 2,
+ 1440,
+ )
else:
- DATABASE[mode][highest_priority_project.upper()]['BACKOFF']=min_recheck_time
+ DATABASE[mode][highest_priority_project.upper()][
+ "BACKOFF"
+ ] = min_recheck_time
else:
- DATABASE[mode][highest_priority_project.upper()]['BACKOFF'] = 0
- log.debug('Waiting for any xfers to complete...')
- dl_response = wait_till_no_xfers(rpc_client) # wait until all network activity has concluded
+ DATABASE[mode][highest_priority_project.upper()]["BACKOFF"] = 0
+ log.debug("Waiting for any xfers to complete...")
+ dl_response = wait_till_no_xfers(
+ rpc_client
+ ) # wait until all network activity has concluded
- if not dont_nnt: # if we didn't get a backoff signal and we haven't picked a project to leave non-NNTed during sleeping of loop, pick this one for that purpose
- dont_nnt=highest_priority_project.upper()
+ if (
+ not dont_nnt
+ ): # if we didn't get a backoff signal and we haven't picked a project to leave non-NNTed during sleeping of loop, pick this one for that purpose
+ dont_nnt = highest_priority_project.upper()
# re-NNT all projects
- nnt_response = loop.run_until_complete(nnt_all_projects(rpc_client)) # NNT all projects
+ nnt_response = loop.run_until_complete(
+ nnt_all_projects(rpc_client)
+ ) # NNT all projects
# Check logs to see if both work caches are full
- cache_full = loop.run_until_complete(check_log_entries(rpc_client,project_name=project_name))
- log.debug('checking log response for work cache status....')
+ cache_full = loop.run_until_complete(
+ check_log_entries(rpc_client, project_name=project_name)
+ )
+ log.debug("checking log response for work cache status....")
# If BOINC job cache is full, stop asking projects for work
if cache_full:
- DATABASE['TABLE_SLEEP_REASON']='BOINC work cache full...'
+ DATABASE["TABLE_SLEEP_REASON"] = "BOINC work cache full..."
update_table()
break
# Allow highest non-backedoff project to be non-NNTd.
# This enables BOINC to fetch work if it's needed before our sleep period elapses
if dont_nnt:
- allow_this_project=resolve_boinc_url_new(dont_nnt)
+ allow_this_project = resolve_boinc_url_new(dont_nnt)
allow_response = loop.run_until_complete(
- run_rpc_command(rpc_client, 'project_allowmorework', 'project_url', allow_this_project))
- custom_sleep(30,rpc_client,dev_loop=dev_loop) # There's no reason to loop through all projects more than once every 30 minutes
-def print_and_log(msg:str,log_level:str)->None:
+ run_rpc_command(
+ rpc_client,
+ "project_allowmorework",
+ "project_url",
+ allow_this_project,
+ )
+ )
+ custom_sleep(
+ 30, rpc_client, dev_loop=dev_loop
+ ) # There's no reason to loop through all projects more than once every 30 minutes
+
+
+def print_and_log(msg: str, log_level: str) -> None:
"""
Print a message and add it to the log at log_level. Valid log_levels are DEBUG, INFO, WARNING, ERROR
"""
print(msg)
- if log_level=='DEBUG':
+ if log_level == "DEBUG":
log.debug(msg)
- elif log_level=='WARNING':
+ elif log_level == "WARNING":
log.warning(msg)
- elif log_level=='INFO':
+ elif log_level == "INFO":
log.info(msg)
- elif log_level=='ERROR':
+ elif log_level == "ERROR":
log.error(msg)
else:
- print('Being asked to log at an unknown level: {}'.format(log_level))
-def create_default_database()->Dict[str,Any]:
+ print("Being asked to log at an unknown level: {}".format(log_level))
+
+
+def create_default_database() -> Dict[str, Any]:
DATABASE: Dict[str, Any] = {}
- DATABASE['DEVTIMECOUNTER'] = 0
- DATABASE['FTMTOTAL'] = 0
- DATABASE['DEVTIMETOTAL'] = 0
- DATABASE['TABLE_STATUS'] = ''
- DATABASE['TABLE_SLEEP_REASON'] = ''
+ DATABASE["DEVTIMECOUNTER"] = 0
+ DATABASE["FTMTOTAL"] = 0
+ DATABASE["DEVTIMETOTAL"] = 0
+ DATABASE["TABLE_STATUS"] = ""
+ DATABASE["TABLE_SLEEP_REASON"] = ""
return DATABASE
-if __name__ == '__main__':
- wallet_running=True # switches to false if we have issues connecting
+
+if __name__ == "__main__":
+ wallet_running = True # switches to false if we have issues connecting
# Verify we are in appropriate python environment
- python_major=sys.version_info.major
- python_minor=sys.version_info.minor
- if python_major<3:
- print('Error: This program requires python 3.6 or higher to run, you are running it as Python {}'.format(platform.python_version()))
- input('Press enter to exit')
+ python_major = sys.version_info.major
+ python_minor = sys.version_info.minor
+ if python_major < 3:
+ print(
+ "Error: This program requires python 3.6 or higher to run, you are running it as Python {}".format(
+ platform.python_version()
+ )
+ )
+ input("Press enter to exit")
quit()
- elif python_major==3 and python_minor<6:
- print('Error: This program requires python 3.6 or higher to run, you are running it as Python {}'.format(platform.python_version()))
- input('Some things may not work as expected. Press enter to continue')
+ elif python_major == 3 and python_minor < 6:
+ print(
+ "Error: This program requires python 3.6 or higher to run, you are running it as Python {}".format(
+ platform.python_version()
+ )
+ )
+ input("Some things may not work as expected. Press enter to continue")
del python_minor
del python_major
- log.debug('Python version {}'.format(platform.python_version()))
+ log.debug("Python version {}".format(platform.python_version()))
- shutdown_dev_client(quiet=True) # shut down dev client is it's running. This is useful if program shuts down unexpectedly
+ shutdown_dev_client(
+ quiet=True
+ ) # shut down dev client is it's running. This is useful if program shuts down unexpectedly
# Load long-term stats
- if os.path.exists('stats.json'):
+ if os.path.exists("stats.json"):
try:
- with open('stats.json') as json_file:
- DATABASE:Dict[str,Any] = json.load(json_file,object_hook=object_hook)
+ with open("stats.json") as json_file:
+ DATABASE: Dict[str, Any] = json.load(json_file, object_hook=object_hook)
except Exception as e:
- if os.path.exists('stats.json.backup'):
- print('Error opening stats file, trying backup...')
- log.error('Error opening stats file, trying backup...')
+ if os.path.exists("stats.json.backup"):
+ print("Error opening stats file, trying backup...")
+ log.error("Error opening stats file, trying backup...")
try:
- with open('stats.json.backup') as json_file:
- DATABASE:Dict[str,Any] = json.load(json_file,object_hook=object_hook)
+ with open("stats.json.backup") as json_file:
+ DATABASE: Dict[str, Any] = json.load(
+ json_file, object_hook=object_hook
+ )
except:
- print_and_log('Error opening stats file, making new one...','ERROR')
+ print_and_log(
+ "Error opening stats file, making new one...", "ERROR"
+ )
DATABASE = create_default_database()
save_stats(DATABASE)
else:
- print_and_log('Error loading stats file. Making new one...','ERROR')
- shutil.copy('stats.json','stats.json.corrupted')
+ print_and_log("Error loading stats file. Making new one...", "ERROR")
+ shutil.copy("stats.json", "stats.json.corrupted")
DATABASE = create_default_database()
save_stats(DATABASE)
else:
- DATABASE=create_default_database()
+ DATABASE = create_default_database()
save_stats(DATABASE)
# These vars should reset and/or checked each run
- DATABASE['TABLE_STATUS']=''
- DATABASE['TABLE_SLEEP_REASON'] = ''
- if 'FTMTOTAL' not in DATABASE:
- DATABASE['FTMTOTAL']=0
- if 'DEVTIMETOTAL' not in DATABASE:
- DATABASE['DEVTIMETOTAL']=0
-
- signal.signal(signal.SIGINT, safe_exit) # Capture ctrl+c from client to exit gracefully
- update_check() # Check for updates to FTM
+ DATABASE["TABLE_STATUS"] = ""
+ DATABASE["TABLE_SLEEP_REASON"] = ""
+ if "FTMTOTAL" not in DATABASE:
+ DATABASE["FTMTOTAL"] = 0
+ if "DEVTIMETOTAL" not in DATABASE:
+ DATABASE["DEVTIMETOTAL"] = 0
+
+ signal.signal(
+ signal.SIGINT, safe_exit
+ ) # Capture ctrl+c from client to exit gracefully
+ update_check() # Check for updates to FTM
combined_stats = {}
APPROVED_PROJECT_URLS = []
# combined_stats has format:
-# COMBINED_STATS_EXAMPLE = {
-# 'HTTP://PROJECT.COM/PROJECT': {
-# 'COMPILED_STATS': {
-# 'AVGWALLTIME': 30.01, 'AVGCPUTIME': 10.02, 'TOTALTASKS': 51, 'TOTALWALLTIME': 223311.34,
-# 'AVGCREDITPERHOUR': 31.2, 'XDAYWALLTIME': 30, 'AVGCREDITPERTASK': 32.12, 'AVGMAGPERHOUR': 32.1, 'TOTALCPUTIME':300010.10},
-# 'CREDIT_HISTORY': {
-# '11-29-21': {'CREDITAWARDED':100.54},
-# '11-28-21': {'CREDITAWARDED':100.21},
-# },
-# 'WU_HISTORY': {
-# '07-31-2021':{'STARTTIME': '1627765997', 'ESTTIME': '6128.136145', 'CPUTIME': '3621.724000',
-# 'ESTIMATEDFLOPS': '30000000000000', 'TASKNAME': 'wu_sf3_DS-16x271-9_Grp218448of1000000_0',
-# 'WALLTIME': '3643.133927', 'EXITCODE': '0'},
-# '07-29-2021': {'STARTTIME': '1627765996', 'ESTTIME': '6128.136145', 'CPUTIME': '3621.724000',
-# 'ESTIMATEDFLOPS': '30000000000000',
-# 'TASKNAME': 'wu_sf3_DS-16x271-9_Grp218448of1000000_0',
-# 'WALLTIME': '3643.133927', 'EXITCODE': '0'},
-# }
-# },
-# }
+ # COMBINED_STATS_EXAMPLE = {
+ # 'HTTP://PROJECT.COM/PROJECT': {
+ # 'COMPILED_STATS': {
+ # 'AVGWALLTIME': 30.01, 'AVGCPUTIME': 10.02, 'TOTALTASKS': 51, 'TOTALWALLTIME': 223311.34,
+ # 'AVGCREDITPERHOUR': 31.2, 'XDAYWALLTIME': 30, 'AVGCREDITPERTASK': 32.12, 'AVGMAGPERHOUR': 32.1, 'TOTALCPUTIME':300010.10},
+ # 'CREDIT_HISTORY': {
+ # '11-29-21': {'CREDITAWARDED':100.54},
+ # '11-28-21': {'CREDITAWARDED':100.21},
+ # },
+ # 'WU_HISTORY': {
+ # '07-31-2021':{'STARTTIME': '1627765997', 'ESTTIME': '6128.136145', 'CPUTIME': '3621.724000',
+ # 'ESTIMATEDFLOPS': '30000000000000', 'TASKNAME': 'wu_sf3_DS-16x271-9_Grp218448of1000000_0',
+ # 'WALLTIME': '3643.133927', 'EXITCODE': '0'},
+ # '07-29-2021': {'STARTTIME': '1627765996', 'ESTTIME': '6128.136145', 'CPUTIME': '3621.724000',
+ # 'ESTIMATEDFLOPS': '30000000000000',
+ # 'TASKNAME': 'wu_sf3_DS-16x271-9_Grp218448of1000000_0',
+ # 'WALLTIME': '3643.133927', 'EXITCODE': '0'},
+ # }
+ # },
+ # }
# Define starting parameters
- found_platform=platform.system()
+ found_platform = platform.system()
if not boinc_data_dir:
- if found_platform=='Linux':
- if os.path.isdir('/var/lib/boinc-client'):
- boinc_data_dir='/var/lib/boinc-client'
+ if found_platform == "Linux":
+ if os.path.isdir("/var/lib/boinc-client"):
+ boinc_data_dir = "/var/lib/boinc-client"
else:
- boinc_data_dir=os.path.join(Path.home(), 'BOINC/')
- elif found_platform=='Darwin':
- boinc_data_dir=os.path.join('/Library/Application Support/BOINC Data/')
+ boinc_data_dir = os.path.join(Path.home(), "BOINC/")
+ elif found_platform == "Darwin":
+ boinc_data_dir = os.path.join("/Library/Application Support/BOINC Data/")
else:
- boinc_data_dir = 'C:\ProgramData\BOINC\\'
+ boinc_data_dir = "C:\ProgramData\BOINC\\"
if not gridcoin_data_dir:
- if found_platform=='Linux':
- gridcoin_data_dir=os.path.join(Path.home(),'.GridcoinResearch/')
- elif found_platform=='Darwin':
- gridcoin_data_dir = os.path.join(Path.home(), 'Library/Application Support/GridcoinResearch/')
+ if found_platform == "Linux":
+ gridcoin_data_dir = os.path.join(Path.home(), ".GridcoinResearch/")
+ elif found_platform == "Darwin":
+ gridcoin_data_dir = os.path.join(
+ Path.home(), "Library/Application Support/GridcoinResearch/"
+ )
else:
- gridcoin_data_dir=os.path.join(Path.home(),'AppData\Roaming\GridcoinResearch\\')
+ gridcoin_data_dir = os.path.join(
+ Path.home(), "AppData\Roaming\GridcoinResearch\\"
+ )
# check that directories exist
- log.info('Guessing BOINC data dir is ' + str(boinc_data_dir))
+ log.info("Guessing BOINC data dir is " + str(boinc_data_dir))
if not os.path.isdir(boinc_data_dir):
- print('BOINC data dir does not appear to exist. If you have it in a non-standard location, please edit config.py so we know where to look')
+ print(
+ "BOINC data dir does not appear to exist. If you have it in a non-standard location, please edit config.py so we know where to look"
+ )
log.error(
- 'BOINC data dir does not appear to exist. If you have it in a non-standard location, please edit config.py so we know where to look')
- input('Press enter to exit')
+ "BOINC data dir does not appear to exist. If you have it in a non-standard location, please edit config.py so we know where to look"
+ )
+ input("Press enter to exit")
quit()
- log.info('Guessing Gridcoin data dir is ' + str(gridcoin_data_dir))
+ log.info("Guessing Gridcoin data dir is " + str(gridcoin_data_dir))
if not os.path.isdir(gridcoin_data_dir):
- print('Gridcoin data dir does not appear to exist. If you have it in a non-standard location, please edit config.py so we know where to look')
+ print(
+ "Gridcoin data dir does not appear to exist. If you have it in a non-standard location, please edit config.py so we know where to look"
+ )
log.error(
- 'Gridcoin data dir does not appear to exist. If you have it in a non-standard location, please edit config.py so we know where to look')
- input('Press enter to continue or CTRL+C to quit')
- wallet_running=False
- override_path = os.path.join(boinc_data_dir, 'global_prefs_override.xml')
- override_dest_path=os.path.join(os.getcwd(),'global_prefs_override_backup.xml')
+ "Gridcoin data dir does not appear to exist. If you have it in a non-standard location, please edit config.py so we know where to look"
+ )
+ input("Press enter to continue or CTRL+C to quit")
+ wallet_running = False
+ override_path = os.path.join(boinc_data_dir, "global_prefs_override.xml")
+ override_dest_path = os.path.join(os.getcwd(), "global_prefs_override_backup.xml")
try:
os.access(override_path, os.W_OK)
except Exception as e:
- print_and_log('This program does not have write access to your BOINC config file, meaning it can\'t reset settings back to your original ones upon close','ERROR')
- print_and_log("Linux users try 'sudo chown your_username {}' to fix this error".format(override_path),'INFO')
+ print_and_log(
+ "This program does not have write access to your BOINC config file, meaning it can't reset settings back to your original ones upon close",
+ "ERROR",
+ )
+ print_and_log(
+ "Linux users try 'sudo chown your_username {}' to fix this error".format(
+ override_path
+ ),
+ "INFO",
+ )
if not SCRIPTED_RUN:
- input('Press enter to continue')
+ input("Press enter to continue")
# auto-detect password for BOINC RPC if it exists and user didn't know
# BOINC on Windows automatically generates an RPC password
- auth_location = os.path.join(boinc_data_dir, 'gui_rpc_auth.cfg')
+ auth_location = os.path.join(boinc_data_dir, "gui_rpc_auth.cfg")
if not boinc_password:
try:
if os.path.exists(auth_location):
- with open(auth_location, 'r') as file:
+ with open(auth_location, "r") as file:
data = file.read().rstrip()
- if data != '':
+ if data != "":
boinc_password = data
except Exception as e:
# This error can generally be disregarded on Linux/OSX
- if 'WINDOWS' in found_platform.upper():
- print('Error reading boinc RPC file at {}: {}'.format(auth_location, e))
- log.error('Error reading boinc RPC file at {}: {}'.format(auth_location, e))
+ if "WINDOWS" in found_platform.upper():
+ print("Error reading boinc RPC file at {}: {}".format(auth_location, e))
+ log.error(
+ "Error reading boinc RPC file at {}: {}".format(auth_location, e)
+ )
else:
- log.debug('Error reading boinc RPC file at {}: {}'.format(auth_location, e))
-
+ log.debug(
+ "Error reading boinc RPC file at {}: {}".format(auth_location, e)
+ )
# Check that project weights make sense
total_found_values = 0
for url, found_value in preferred_projects.items():
- total_found_values+=found_value
- if total_found_values!=100 and len(preferred_projects)>0:
- print('Warning: The weights of your preferred projects do not add up to 100! Quitting.')
- log.error('Warning: The weights of your preferred projects do not add up to 100! Quitting.')
- input('Press enter to exit')
+ total_found_values += found_value
+ if total_found_values != 100 and len(preferred_projects) > 0:
+ print(
+ "Warning: The weights of your preferred projects do not add up to 100! Quitting."
+ )
+ log.error(
+ "Warning: The weights of your preferred projects do not add up to 100! Quitting."
+ )
+ input("Press enter to exit")
quit()
# Establish connections to BOINC and Gridcoin clients, get basic info
@@ -2462,194 +3481,316 @@ def create_default_database()->Dict[str,Any]:
try:
boinc_client = BoincClientConnection(config_dir=boinc_data_dir)
except Exception as e:
- print('Unable to connect to a BOINC client. Are you sure BOINC is running? Error ' + str(e))
- log.error('Unable to connect to a BOINC client. Are you sure BOINC is running? Error ' + str(e))
- input('Press enter to exit')
+ print(
+ "Unable to connect to a BOINC client. Are you sure BOINC is running? Error "
+ + str(e)
+ )
+ log.error(
+ "Unable to connect to a BOINC client. Are you sure BOINC is running? Error "
+ + str(e)
+ )
+ input("Press enter to exit")
quit()
if wallet_running:
try:
gridcoin_conf = get_config_parameters(gridcoin_data_dir)
except Exception as e:
- print('Error parsing gridcoin config file in directory: '+gridcoin_data_dir+' Error: '+str(e))
- log.error('Error parsing gridcoin config file in directory: ' + gridcoin_data_dir + ' Error: ' + str(e))
- wallet_running=False
+ print(
+ "Error parsing gridcoin config file in directory: "
+ + gridcoin_data_dir
+ + " Error: "
+ + str(e)
+ )
+ log.error(
+ "Error parsing gridcoin config file in directory: "
+ + gridcoin_data_dir
+ + " Error: "
+ + str(e)
+ )
+ wallet_running = False
rpc_user = None
gridcoin_rpc_password = None
rpc_port = None
else:
- #Get project lists from Gridcoin wallet
- rpc_user = gridcoin_conf.get('rpcuser')
- gridcoin_rpc_password = gridcoin_conf.get('rpcpassword')
- rpc_port = gridcoin_conf.get('rpcport')
+ # Get project lists from Gridcoin wallet
+ rpc_user = gridcoin_conf.get("rpcuser")
+ gridcoin_rpc_password = gridcoin_conf.get("rpcpassword")
+ rpc_port = gridcoin_conf.get("rpcport")
if not rpc_user or not gridcoin_rpc_password or not rpc_port:
- print('Error: Gridcoin wallet is not configured to accept RPC commands based on config file from ' + str(
- gridcoin_data_dir))
- log.error('Error: Gridcoin wallet is not configured to accept RPC commands based on config file from ' + str(
- gridcoin_data_dir))
print(
- 'RPC commands enable us to talk to the Gridcoin client and get information about project magnitude ratios')
- print('Would you like us to automatically configure your Gridcoin client to accept RPC commands?')
- print('It will be configured to only accept commands from your machine.')
- print('If you do not enable this, this script can only update its information about project magnitudes once a day through an external website')
- print('This can cause inefficient crunching and is not advised')
+ "Error: Gridcoin wallet is not configured to accept RPC commands based on config file from "
+ + str(gridcoin_data_dir)
+ )
+ log.error(
+ "Error: Gridcoin wallet is not configured to accept RPC commands based on config file from "
+ + str(gridcoin_data_dir)
+ )
+ print(
+ "RPC commands enable us to talk to the Gridcoin client and get information about project magnitude ratios"
+ )
+ print(
+ "Would you like us to automatically configure your Gridcoin client to accept RPC commands?"
+ )
+ print("It will be configured to only accept commands from your machine.")
+ print(
+ "If you do not enable this, this script can only update its information about project magnitudes once a day through an external website"
+ )
+ print("This can cause inefficient crunching and is not advised")
print('Please answer "Y" or "N" without quotes. Then press the enter key')
answer = input("")
- log.debug('User input: '+answer)
- while answer not in ['Y', 'N']:
- print('Error: Y or N not entered. Try again please :)')
+ log.debug("User input: " + answer)
+ while answer not in ["Y", "N"]:
+ print("Error: Y or N not entered. Try again please :)")
answer = input("")
if answer == "N":
- print('Ok, we won\'t')
+ print("Ok, we won't")
else:
- with open(os.path.join(gridcoin_data_dir, 'gridcoinresearch.conf'), "a") as myfile:
+ with open(
+ os.path.join(gridcoin_data_dir, "gridcoinresearch.conf"), "a"
+ ) as myfile:
from random import choice
from string import ascii_uppercase
from string import ascii_lowercase
from string import digits
- rpc_user = ''.join(choice(ascii_uppercase) for i in range(8))
- gridcoin_rpc_password = ''.join(choice(ascii_uppercase+ascii_lowercase+digits) for i in range(12))
+
+ rpc_user = "".join(choice(ascii_uppercase) for i in range(8))
+ gridcoin_rpc_password = "".join(
+ choice(ascii_uppercase + ascii_lowercase + digits)
+ for i in range(12)
+ )
rpc_port = 9876
- print('Your RPC username is: ' + rpc_user)
- print('Your RPC password is: ' + gridcoin_rpc_password)
- print('You don\'t need to remember these.')
- print('Modifying config file...')
+ print("Your RPC username is: " + rpc_user)
+ print("Your RPC password is: " + gridcoin_rpc_password)
+ print("You don't need to remember these.")
+ print("Modifying config file...")
myfile.write("rpcport=9876\n")
myfile.write("server=1\n")
- myfile.write("rpcuser=" + rpc_user + '\n')
- myfile.write("rpcpassword=" + gridcoin_rpc_password + '\n')
- print('Alright, we\'ve modified the config file. Please restart the gridcoin wallet.')
- print('Once it\'s loaded and --fully-- synced, press enter to continue')
- input('')
-
- #Get project list from BOINC
- rpc_client=None
+ myfile.write("rpcuser=" + rpc_user + "\n")
+ myfile.write("rpcpassword=" + gridcoin_rpc_password + "\n")
+ print(
+ "Alright, we've modified the config file. Please restart the gridcoin wallet."
+ )
+ print("Once it's loaded and --fully-- synced, press enter to continue")
+ input("")
+
+ # Get project list from BOINC
+ rpc_client = None
try:
- rpc_client = loop.run_until_complete(setup_connection(boinc_ip,boinc_password,boinc_port)) # setup BOINC RPC connection
+ rpc_client = loop.run_until_complete(
+ setup_connection(boinc_ip, boinc_password, boinc_port)
+ ) # setup BOINC RPC connection
except Exception as e:
- print_and_log('Error: Unable to connect to BOINC client, quitting now','ERROR')
+ print_and_log("Error: Unable to connect to BOINC client, quitting now", "ERROR")
quit()
- if not rpc_client: # this was just added so pycharm would stop complaining about rpc_client not being declared
- print_and_log('Error connecting to BOINC client, quitting now', 'ERROR')
+ if (
+ not rpc_client
+ ): # this was just added so pycharm would stop complaining about rpc_client not being declared
+ print_and_log("Error connecting to BOINC client, quitting now", "ERROR")
quit()
- BOINC_PROJECT_LIST,BOINC_PROJECT_NAMES = loop.run_until_complete(get_attached_projects(rpc_client)) # get project list from BOINC client directly. This is needed for correct capitalization
- ALL_BOINC_PROJECTS=loop.run_until_complete(get_all_projects(rpc_client))
+ BOINC_PROJECT_LIST, BOINC_PROJECT_NAMES = loop.run_until_complete(
+ get_attached_projects(rpc_client)
+ ) # get project list from BOINC client directly. This is needed for correct capitalization
+ ALL_BOINC_PROJECTS = loop.run_until_complete(get_all_projects(rpc_client))
# Get project list from Gridcoin wallet and/or gridcoinstats
- check_sidestake_results=False
- foundation_address = 'bc3NA8e8E3EoTL1qhRmeprbjWcmuoZ26A2'
- developer_address = 'RzUgcntbFm8PeSJpauk6a44qbtu92dpw3K'
+ check_sidestake_results = False
+ foundation_address = "bc3NA8e8E3EoTL1qhRmeprbjWcmuoZ26A2"
+ developer_address = "RzUgcntbFm8PeSJpauk6a44qbtu92dpw3K"
try:
- grc_client = GridcoinClientConnection(rpc_user=rpc_user,rpc_port=rpc_port,rpc_password=gridcoin_rpc_password)
+ grc_client = GridcoinClientConnection(
+ rpc_user=rpc_user, rpc_port=rpc_port, rpc_password=gridcoin_rpc_password
+ )
APPROVED_PROJECT_URLS = grc_client.get_approved_project_urls()
- mag_ratios = get_project_mag_ratios(grc_client,lookback_period)
+ mag_ratios = get_project_mag_ratios(grc_client, lookback_period)
except Exception as e:
- print_and_log('Unable to connect to Gridcoin wallet. Assuming it doesn\'t exist. Error: ','ERROR')
- log.error('{}'.format(e))
- print('It is suggested to install the Gridcoin wallet for the most up-to-date magnitude information')
- print('Otherwise, we will fetch data from gridcoinstats.eu which is limited to once per day')
- log.warning('Unable to connect to gridcoin wallet! {} Trying web-based option...'.format(e))
- wallet_running=False
+ print_and_log(
+ "Unable to connect to Gridcoin wallet. Assuming it doesn't exist. Error: ",
+ "ERROR",
+ )
+ log.error("{}".format(e))
+ print(
+ "It is suggested to install the Gridcoin wallet for the most up-to-date magnitude information"
+ )
+ print(
+ "Otherwise, we will fetch data from gridcoinstats.eu which is limited to once per day"
+ )
+ log.warning(
+ "Unable to connect to gridcoin wallet! {} Trying web-based option...".format(
+ e
+ )
+ )
+ wallet_running = False
try:
- APPROVED_PROJECT_URLS, project_resolver_dict = get_approved_project_urls_web()
- mag_ratios=get_project_mag_ratios_from_url(project_resolver_dict=project_resolver_dict)
+ (
+ APPROVED_PROJECT_URLS,
+ project_resolver_dict,
+ ) = get_approved_project_urls_web()
+ mag_ratios = get_project_mag_ratios_from_url(
+ project_resolver_dict=project_resolver_dict
+ )
except Exception as e:
- print_and_log('Error getting project URL list from URL. Are you sure it\'s open? Error: '+str(e),'ERROR')
- input('Press enter to exit')
+ print_and_log(
+ "Error getting project URL list from URL. Are you sure it's open? Error: "
+ + str(e),
+ "ERROR",
+ )
+ input("Press enter to exit")
quit()
else:
if not SCRIPTED_RUN:
# Check sidestakes
- check_sidestake_results = check_sidestake(gridcoin_conf, foundation_address, 1)
- sidestake_check(check_sidestake_results, 'FOUNDATION', foundation_address)
- check_sidestake_results = check_sidestake(gridcoin_conf, developer_address, 1)
- sidestake_check(check_sidestake_results, 'DEVELOPER', developer_address)
+ check_sidestake_results = check_sidestake(
+ gridcoin_conf, foundation_address, 1
+ )
+ sidestake_check(check_sidestake_results, "FOUNDATION", foundation_address)
+ check_sidestake_results = check_sidestake(
+ gridcoin_conf, developer_address, 1
+ )
+ sidestake_check(check_sidestake_results, "DEVELOPER", developer_address)
print(
- 'Welcome to FindTheMag and thank you for trying out this tool. Your feedback and suggestions are welcome on the github page : )')
- check_sidestake_results = check_sidestake(gridcoin_conf, developer_address, 1)
+ "Welcome to FindTheMag and thank you for trying out this tool. Your feedback and suggestions are welcome on the github page : )"
+ )
+ check_sidestake_results = check_sidestake(
+ gridcoin_conf, developer_address, 1
+ )
# Get project list from BOINC
try:
ALL_PROJECT_URLS = boinc_client.get_project_list()
except Exception as e:
- print_and_log('Error getting project URL list from BOINC '+str(e),'ERROR')
-
- combined_stats,final_project_weights,total_preferred_weight,total_mining_weight,dev_project_weights=generate_stats(APPROVED_PROJECT_URLS=APPROVED_PROJECT_URLS,preferred_projects=preferred_projects,ignored_projects=ignored_projects,quiet=True,mag_ratios=mag_ratios)
- log.debug('Printing pretty stats...')
+ print_and_log("Error getting project URL list from BOINC " + str(e), "ERROR")
+
+ (
+ combined_stats,
+ final_project_weights,
+ total_preferred_weight,
+ total_mining_weight,
+ dev_project_weights,
+ ) = generate_stats(
+ APPROVED_PROJECT_URLS=APPROVED_PROJECT_URLS,
+ preferred_projects=preferred_projects,
+ ignored_projects=ignored_projects,
+ quiet=True,
+ mag_ratios=mag_ratios,
+ )
+ log.debug("Printing pretty stats...")
# calculate starting efficiency stats
- if 'STARTMAGHR' not in DATABASE:
- DATABASE['STARTMAGHR']=get_avg_mag_hr(combined_stats)
+ if "STARTMAGHR" not in DATABASE:
+ DATABASE["STARTMAGHR"] = get_avg_mag_hr(combined_stats)
else:
- original_avg_mag_hr=DATABASE['STARTMAGHR']
- current_avg_mag_hr=get_avg_mag_hr(combined_stats)
- if current_avg_mag_hr>original_avg_mag_hr and original_avg_mag_hr!=0:
- percent_increase=((current_avg_mag_hr-original_avg_mag_hr)/original_avg_mag_hr)*100
- print('When you started using this tool, your average mag/hr was: {:.4f} now it is {:.4f}, a {}% increase!'.format(
- original_avg_mag_hr, current_avg_mag_hr,percent_increase))
+ original_avg_mag_hr = DATABASE["STARTMAGHR"]
+ current_avg_mag_hr = get_avg_mag_hr(combined_stats)
+ if current_avg_mag_hr > original_avg_mag_hr and original_avg_mag_hr != 0:
+ percent_increase = (
+ (current_avg_mag_hr - original_avg_mag_hr) / original_avg_mag_hr
+ ) * 100
+ print(
+ "When you started using this tool, your average mag/hr was: {:.4f} now it is {:.4f}, a {}% increase!".format(
+ original_avg_mag_hr, current_avg_mag_hr, percent_increase
+ )
+ )
else:
- print('When you started using this tool, your average mag/hr was: {:.4f} now it is {:.4f}'.format(
- original_avg_mag_hr, current_avg_mag_hr))
- #generate table to print pretty
- table_dict={}
- for project_url,stats_dict in combined_stats.items():
- table_dict[project_url]={}
- for stat_name,stat_value in stats_dict['COMPILED_STATS'].items():
- rounding=2
- if stat_name=='MAGPERCREDIT':
- rounding=5
- table_dict[project_url][stat_name]=str(round(float(stat_value),rounding))
- print('')
- if len(table_dict)>0:
- print('SOME PRETTY STATS JUST FOR YOU, SORTED BY AVERAGE MAG/HOUR')
- print_table(table_dict,sortby='AVGMAGPERHOUR')
+ print(
+ "When you started using this tool, your average mag/hr was: {:.4f} now it is {:.4f}".format(
+ original_avg_mag_hr, current_avg_mag_hr
+ )
+ )
+ # generate table to print pretty
+ table_dict = {}
+ for project_url, stats_dict in combined_stats.items():
+ table_dict[project_url] = {}
+ for stat_name, stat_value in stats_dict["COMPILED_STATS"].items():
+ rounding = 2
+ if stat_name == "MAGPERCREDIT":
+ rounding = 5
+ table_dict[project_url][stat_name] = str(round(float(stat_value), rounding))
+ print("")
+ if len(table_dict) > 0:
+ print("SOME PRETTY STATS JUST FOR YOU, SORTED BY AVERAGE MAG/HOUR")
+ print_table(table_dict, sortby="AVGMAGPERHOUR")
else:
- print('Not enough stats to print a table of them yet, guessing this is a new BOINC install?')
- print('Total project weight will be 1000. We will reserve a minimum .01% of processing power for monitoring each project')
- print_and_log('Total weight for preferred projects is ' + str(round(float(total_preferred_weight),2)),'INFO')
- print_and_log('Total weight for mining projects is ' + str(round(float(total_mining_weight),2)),'INFO')
- print_and_log('FINAL SUGGESTED PROJECT WEIGHTS','INFO')
- for project,weight in final_project_weights.items():
- print_and_log(project.lower()+': '+str(weight),'INFO')
+ print(
+ "Not enough stats to print a table of them yet, guessing this is a new BOINC install?"
+ )
+ print(
+ "Total project weight will be 1000. We will reserve a minimum .01% of processing power for monitoring each project"
+ )
+ print_and_log(
+ "Total weight for preferred projects is "
+ + str(round(float(total_preferred_weight), 2)),
+ "INFO",
+ )
+ print_and_log(
+ "Total weight for mining projects is "
+ + str(round(float(total_mining_weight), 2)),
+ "INFO",
+ )
+ print_and_log("FINAL SUGGESTED PROJECT WEIGHTS", "INFO")
+ for project, weight in final_project_weights.items():
+ print_and_log(project.lower() + ": " + str(weight), "INFO")
if check_sidestake_results:
- print('~~---***Wow THANK YOU for sidestaking to our development. You rock!***---~~~')
- print('Yeeeehaw! We\'re going to the pony store!')
- print('This also means 100% of the crunching time on this machine will be under your account, no need to crunch for developer')
- print("""
+ print(
+ "~~---***Wow THANK YOU for sidestaking to our development. You rock!***---~~~"
+ )
+ print("Yeeeehaw! We're going to the pony store!")
+ print(
+ "This also means 100% of the crunching time on this machine will be under your account, no need to crunch for developer"
+ )
+ print(
+ """
--- ,--,
---- _ ___/ /\|
----- ;( )__, )
----- ; // '--;
---- \ |
---- v v""")
+--- v v"""
+ )
else:
- print('If you\'d like to say thank you to the developers of this tool, please help us buy our next round of energy drinks by sending GRC to:')
+ print(
+ "If you'd like to say thank you to the developers of this tool, please help us buy our next round of energy drinks by sending GRC to:"
+ )
print(developer_address)
if not control_boinc and not SCRIPTED_RUN:
- input('Press enter key or CTRL+C to quit')
+ input("Press enter key or CTRL+C to quit")
quit()
else:
if not SCRIPTED_RUN:
- print('Press enter key to start controlling BOINC. Press Ctrl+C to quit')
+ print("Press enter key to start controlling BOINC. Press Ctrl+C to quit")
if not SCRIPTED_RUN:
answer = input("")
- print_and_log('Starting control of BOINC...','DEBUG')
+ print_and_log("Starting control of BOINC...", "DEBUG")
if "DARWIN" in found_platform.upper() and not check_sidestake_results:
- print_and_log('Sidestaking must be setup for BOINC control on OS X as "crunch for dev" is not an option. Re-run the script to set this up.','ERROR')
+ print_and_log(
+ 'Sidestaking must be setup for BOINC control on OS X as "crunch for dev" is not an option. Re-run the script to set this up.',
+ "ERROR",
+ )
quit()
# Backup user preferences.
try:
- shutil.copy(override_path,override_dest_path)
+ shutil.copy(override_path, override_dest_path)
except Exception as e:
- log.warning('global_prefs_override.xml does not appear to exist, not backing up. Some users may not have one. Error: {}'.format(e))
+ log.warning(
+ "global_prefs_override.xml does not appear to exist, not backing up. Some users may not have one. Error: {}".format(
+ e
+ )
+ )
verification_result = loop.run_until_complete(verify_boinc_connection(rpc_client))
if not verification_result:
- print_and_log('Error connecting to BOINC client, does your gui_rpc_auth.cfg specify a password or a non-standard port?\n If so, be sure to include it in your config.py','ERROR')
- print('You can find your gui_rpc_auth.cfg at {}'.format(auth_location))
- print('Linux users: make sure your username is in the BOINC group so FTM can access your BOINC config file')
- print('sudo usermod -aG boinc your_username_here')
- print('Note that you will need to restart your computer after changing your group permissions')
- answer=input('Press enter to quit')
+ print_and_log(
+ "Error connecting to BOINC client, does your gui_rpc_auth.cfg specify a password or a non-standard port?\n If so, be sure to include it in your config.py",
+ "ERROR",
+ )
+ print("You can find your gui_rpc_auth.cfg at {}".format(auth_location))
+ print(
+ "Linux users: make sure your username is in the BOINC group so FTM can access your BOINC config file"
+ )
+ print("sudo usermod -aG boinc your_username_here")
+ print(
+ "Note that you will need to restart your computer after changing your group permissions"
+ )
+ answer = input("Press enter to quit")
quit()
loop.run_until_complete(prefs_check(rpc_client))
# NNT all projects
@@ -2657,12 +3798,16 @@ def create_default_database()->Dict[str,Any]:
# Abort unstarted tasks if the user requested it
if abort_unstarted_tasks:
tasks_list = loop.run_until_complete(get_task_list(rpc_client))
- loop.run_until_complete(kill_all_unstarted_tasks(rpc_client,task_list=tasks_list))
+ loop.run_until_complete(
+ kill_all_unstarted_tasks(rpc_client, task_list=tasks_list)
+ )
priority_results = {}
- highest_priority_project=''
- highest_priority_projects=[]
- DATABASE['STATSLASTCALCULATED']=datetime.datetime(1997, 3, 3) # force calculation of stats at first run since they are not cached in DB
+ highest_priority_project = ""
+ highest_priority_projects = []
+ DATABASE["STATSLASTCALCULATED"] = datetime.datetime(
+ 1997, 3, 3
+ ) # force calculation of stats at first run since they are not cached in DB
# While we don't have enough tasks, continue cycling through project list and updating. If we have cycled through all projects, get_highest_priority_project will stall to prevent requesting too often
- boinc_loop(False,rpc_client)
+ boinc_loop(False, rpc_client)
# Restore user prefs
- safe_exit(None,None)
\ No newline at end of file
+ safe_exit(None, None)
From 69e6484157a5cc63d11c1ef6a5c697b191e1cedc Mon Sep 17 00:00:00 2001
From: HeyMerlin <26638413+HeyMerlin@users.noreply.github.com>
Date: Tue, 16 May 2023 22:34:44 -0700
Subject: [PATCH 03/23] Initial formatting of comments to Black line lenght
limit.
---
main.py | 118 +++++++++++++++++++++++++++++++++++++++-----------------
1 file changed, 82 insertions(+), 36 deletions(-)
diff --git a/main.py b/main.py
index beae69c..946768e 100644
--- a/main.py
+++ b/main.py
@@ -42,7 +42,9 @@
boinc_port: int = 31416
boinc_username: Union[str, None] = None
boinc_password: Union[str, None] = None
-min_recheck_time: int = 30 # minimum time in minutes before re-asking a project for work who previously said they were out
+# minimum time in minutes before re-asking a project for work who previously said
+# they were out
+min_recheck_time: int = 30
abort_unstarted_tasks: bool = False
recalculate_stats_interval: int = 60
price_check_interval: int = 720
@@ -86,7 +88,8 @@
SCRIPTED_RUN: bool = False
SKIP_TABLE_UPDATES: bool = False
HOST_COST_PER_HOUR = (host_power_usage / 1000) * local_kwh
-# Translates BOINC's CPU and GPU Mode replies into English. Note difference between keys integer vs string.
+# Translates BOINC's CPU and GPU Mode replies into English. Note difference between
+# keys integer vs string.
CPU_MODE_DICT = {1: "always", 2: "auto", 3: "never"}
GPU_MODE_DICT = {"1": "always", "2": "auto", "3": "never"}
DEV_BOINC_PASSWORD = "" # this is only used for printing to table, not used elsewhere
@@ -237,9 +240,11 @@ def safe_exit(arg1, arg2) -> None:
arg1/2 required by the signal handler library, but aren't used for anything inside this function
"""
+ # This is needed in case this function is called while main loop is still waiting
+ # for an RPC command etc
new_loop = (
asyncio.get_event_loop()
- ) # this is needed in case this function is called while main loop is still waiting for an RPC command etc
+ )
print_and_log("Program exiting gracefully", "INFO")
# Backup most recent database save then save database to json file
@@ -320,7 +325,8 @@ async def is_boinc_crunching(rpc_client: libs.pyboinc.rpc_client) -> bool:
reply = await run_rpc_command(rpc_client, "get_cc_status")
task_suspend_reason = int(reply["task_suspend_reason"])
if task_suspend_reason != 0:
- # These are documented at https://github.com/BOINC/boinc/blob/73a7754e7fd1ae3b7bf337e8dd42a7a0b42cf3d2/android/BOINC/app/src/main/java/edu/berkeley/boinc/utils/BOINCDefs.kt
+ # These are documented at
+ # https://github.com/BOINC/boinc/blob/73a7754e7fd1ae3b7bf337e8dd42a7a0b42cf3d2/android/BOINC/app/src/main/java/edu/berkeley/boinc/utils/BOINCDefs.kt
log.debug(
"Determined BOINC client is not crunching task_suspend_reason: {}".format(
task_suspend_reason
@@ -616,7 +622,8 @@ def xfers_happening(xfer_list: list) -> bool:
log.warning("Found xfer with unknown status: " + str(xfer))
return False
- # Every ten seconds we will request the list of file transfers from BOINC until there are none left
+ # Every ten seconds we will request the list of file transfers from BOINC until
+ # there are none left
while current_loops < max_loops:
current_loops += 1
# Ask BOINC for a list of file transfers
@@ -1023,7 +1030,8 @@ def config_files_to_stats(
)
stat_list = stat_file_to_list(statsfile)
log.debug("In statsfile for " + project_url)
- # Compute the first and last date in the stats file. Currently not used but does work
+ # Compute the first and last date in the stats file. Currently not used but
+ # does work
startdate = str(
datetime.datetime.fromtimestamp(float(stat_list[0]["STARTTIME"])).strftime(
"%m-%d-%Y"
@@ -1881,7 +1889,9 @@ def cache_full(project_name: str, messages) -> bool:
else:
if (
not gpu_full
- ): # if GPU is not mentioned in log, this would always happen so using this to stop erroneous messages
+ ):
+ # if GPU is not mentioned in log, this would always
+ # happen so using this to stop erroneous messages
# print('GPU cache appears not full {}'.format(message['body']))
log.debug(
"GPU cache appears not full {}".format(
@@ -1938,7 +1948,8 @@ def project_backoff(project_name: str, messages) -> bool:
Returns TRUE if project should be backed off. False otherwise or if unable to determine
"""
# Phrases which indicate project SHOULD be backed off
- # removed 'project requested delay' from positive phrases because projects always provide this, even if work was provided!
+ # - removed 'project requested delay' from positive phrases because
+ # projects always provide this, even if work was provided!
positive_phrases = [
"project has no tasks available",
"scheduler request failed",
@@ -2052,7 +2063,9 @@ async def get_attached_projects(
found_projects.append(project.master_url)
if isinstance(
project.project_name, bool
- ): # this happens if project is "attached" but unable to communicate w project due to it being down or some other issue
+ ):
+ # this happens if project is "attached" but unable to communicate with
+ # the project due to it being down or some other issue
project_names[project.master_url] = project.master_url
else:
project_names[project.master_url] = project.project_name
@@ -2604,7 +2617,8 @@ def boinc_loop(
"""
if not client_rpc_client:
client_rpc_client = rpc_client
- # these variables are referenced outside the loop (or in recursive calls of the loop) so should be made global
+ # These variables are referenced outside the loop
+ # (or in recursive calls of the loop) so should be made global
global combined_stats
global final_project_weights
global total_preferred_weight
@@ -2674,7 +2688,8 @@ def update_table(
Function to update table printed to user.
:param status = Most recent status "waiting for xfers, starting crunching on x, etc"
"""
- # don't update table in dev loop because all our variables reference dev install not main one
+ # don't update table in dev loop because all our variables reference
+ # dev install not main one
if dev_loop or SKIP_TABLE_UPDATES:
return
rename_dict = {
@@ -2743,7 +2758,8 @@ def update_table(
if DATABASE.get("DEVTIMECOUNTER", 0) < 1 and not FORCE_DEV_MODE and dev_loop:
return None
- # Re-authorize in case we have become de-authorized since last run. This is put in a try loop b/c sometimes it throws exceptions
+ # Re-authorize in case we have become de-authorized since last run. This is
+ # put in a try loop b/c sometimes it throws exceptions
while True:
try:
authorize_response = loop.run_until_complete(rpc_client.authorize())
@@ -2769,7 +2785,8 @@ def update_table(
log.debug("Calculating stats..")
DATABASE["STATSLASTCALCULATED"] = datetime.datetime.now()
combined_stats = config_files_to_stats(boinc_data_dir)
- # total_time = combined_stats_to_total_time(combined_stats) # Not sure what this line did but commented out, we'll see if anything breaks
+ # Not sure what this line did but commented out, we'll see if anything breaks
+ #total_time = combined_stats_to_total_time(combined_stats)
(
combined_stats,
final_project_weights,
@@ -2814,7 +2831,8 @@ def update_table(
DATABASE["GRCPRICE"] = grc_price
else:
grc_price = DATABASE["GRCPRICE"]
- # Check profitability of all projects, if none profitable (and user doesn't want unprofitable crunching), sleep for 1hr
+ # Check profitability of all projects, if none profitable
+ # (and user doesn't want unprofitable crunching), sleep for 1hr
if only_BOINC_if_profitable and not dev_loop:
profitability_list = []
for project in highest_priority_projects:
@@ -2852,7 +2870,8 @@ def update_table(
sleep(60 * 60)
continue
- # If we have enabled temperature control, verify that crunching is allowed at current temp
+ # If we have enabled temperature control, verify that crunching is
+ # allowed at current temp
if enable_temp_control:
# Get BOINC's starting CPU and GPU modes
existing_mode_info = loop.run_until_complete(
@@ -2877,7 +2896,8 @@ def update_table(
if not temp_check():
while True: # Keep sleeping until we pass a temp check
log.debug("Sleeping due to temperature")
- # Put BOINC into sleep mode, automatically reverting if script closes unexpectedly
+ # Put BOINC into sleep mode, automatically reverting if
+ # script closes unexpectedly
sleep_interval = str(int(((60 * temp_sleep_time) + 60)))
loop.run_until_complete(
run_rpc_command(
@@ -2913,7 +2933,8 @@ def update_table(
if boinc_password == "ERROR":
log.error("Error setting up crunching to developer account")
else:
- # setup dev RPC connection, it may take a few tries while we wait for it to come online
+ # setup dev RPC connection, it may take a few tries while we
+ # wait for it to come online
tries = 1
tries_max = 5
dev_rpc_client = None
@@ -2938,9 +2959,12 @@ def update_table(
if tries > tries_max:
log.error("Giving up on connecting to BOINC dev client")
if dev_rpc_client:
- # Set main BOINC to suspend until we're done crunching in dev mode. It will automatically re-enable itself in 100x the time if nothing is done
- # This allows for non-graceful exits of this script to not brick client's BOINC and considerations that dev account may not be crunching full time if client
- # is actively using computer.
+ # Set main BOINC to suspend until we're done crunching in dev mode.
+ # It will automatically re-enable itself in 100x the time if nothing
+ # is done.
+ # This allows for non-graceful exits of this script to not brick
+ # client's BOINC and considerations that dev account may not be
+ # crunching full time if client is actively using computer.
existing_mode_info = loop.run_until_complete(
run_rpc_command(rpc_client, "get_cc_status")
)
@@ -3003,8 +3027,8 @@ def update_table(
run_rpc_command(rpc_client, "set_run_mode", existing_cpu_mode)
)
- # loop through each project in order of priority and request new tasks if not backed off
- # stopping looping if cache becomes full
+ # loop through each project in order of priority and request new tasks if
+ # not backed off, stopping looping if cache becomes full
dont_nnt = None
if dev_loop:
project_loop = dev_project_weights
@@ -3046,7 +3070,8 @@ def update_table(
)
)
continue
- # If user has set to only mine highest mag project if profitable and it's not profitable or in benchmarking mode, skip
+ # If user has set to only mine highest mag project if profitable and
+ # it's not profitable or in benchmarking mode, skip
if (
only_mine_if_profitable
and not profitability_result
@@ -3066,12 +3091,15 @@ def update_table(
)
continue
+ # make sure we are using correct URL, BOINC requires capitalization to
+ # be exact
highest_priority_project = resolve_boinc_url(
highest_priority_project, ALL_BOINC_PROJECTS
- ) # make sure we are using correct URL, BOINC requires capitalization to be exact
+ )
if highest_priority_project.upper() not in DATABASE[mode]:
DATABASE[mode][highest_priority_project.upper()] = {}
- # skip checking project if we have a backoff counter going and it hasn't been long enough
+ # skip checking project if we have a backoff counter going and it
+ # hasn't been long enough
time_since_last_project_check = datetime.datetime.now() - DATABASE[mode][
highest_priority_project.upper()
].get("LAST_CHECKED", datetime.datetime(1997, 6, 21, 18, 25, 30))
@@ -3107,9 +3135,11 @@ def update_table(
# on first run, there is no project list
if isinstance(get_project_list, list):
+ # convert to simple list of strings so we can check if
+ # project URL is in list
converted_project_list = project_list_to_project_list(
get_project_list
- ) # convert to simple list of strings so we can check if project URL is in list
+ )
else:
log.warning(
"Dev BOINC shows empty project list, this is normal on first run"
@@ -3184,15 +3214,18 @@ def update_table(
+ str(boincified_url)
)
log.debug("Update response is {}".format(update_response))
+ # give BOINC time to update w project, I don't know a less hacky way to
+ # do this, suggestions are welcome
sleep(
15
- ) # give BOINC time to update w project, I don't know a less hacky way to do this, suggestions are welcome
+ )
DATABASE[mode][highest_priority_project.upper()][
"LAST_CHECKED"
] = datetime.datetime.now()
# check if project should be backed off. If so, back it off.
# This is an exponentially increasing backoff with a maximum time of 1 day
- # Projects are backed off if they request it, if they are unresponsive/down, or if no work is available
+ # Projects are backed off if they request it, if they are
+ # unresponsive/down, or if no work is available
backoff_response = loop.run_until_complete(
check_log_entries_for_backoff(rpc_client, project_name=project_name)
)
@@ -3215,7 +3248,10 @@ def update_table(
if (
not dont_nnt
- ): # if we didn't get a backoff signal and we haven't picked a project to leave non-NNTed during sleeping of loop, pick this one for that purpose
+ ):
+ # if we didn't get a backoff signal and we haven't picked
+ # a project to leave non-NNTed during sleeping of loop,
+ # pick this one for that purpose
dont_nnt = highest_priority_project.upper()
# re-NNT all projects
@@ -3236,7 +3272,8 @@ def update_table(
break
# Allow highest non-backedoff project to be non-NNTd.
- # This enables BOINC to fetch work if it's needed before our sleep period elapses
+ # This enables BOINC to fetch work if it's needed before our
+ # sleep period elapses
if dont_nnt:
allow_this_project = resolve_boinc_url_new(dont_nnt)
allow_response = loop.run_until_complete(
@@ -3247,9 +3284,10 @@ def update_table(
allow_this_project,
)
)
+ # There's no reason to loop through all projects more than once every 30 minutes
custom_sleep(
30, rpc_client, dev_loop=dev_loop
- ) # There's no reason to loop through all projects more than once every 30 minutes
+ )
def print_and_log(msg: str, log_level: str) -> None:
@@ -3304,9 +3342,11 @@ def create_default_database() -> Dict[str, Any]:
del python_major
log.debug("Python version {}".format(platform.python_version()))
+ # shut down dev client is it's running. This is useful if program shuts
+ # down unexpectedly
shutdown_dev_client(
quiet=True
- ) # shut down dev client is it's running. This is useful if program shuts down unexpectedly
+ )
# Load long-term stats
if os.path.exists("stats.json"):
@@ -3584,12 +3624,15 @@ def create_default_database() -> Dict[str, Any]:
quit()
if (
not rpc_client
- ): # this was just added so pycharm would stop complaining about rpc_client not being declared
+ ): # this was just added so pycharm would stop complaining about
+ # rpc_client not being declared
print_and_log("Error connecting to BOINC client, quitting now", "ERROR")
quit()
+ # get project list from BOINC client directly. This is needed for
+ # correct capitalization
BOINC_PROJECT_LIST, BOINC_PROJECT_NAMES = loop.run_until_complete(
get_attached_projects(rpc_client)
- ) # get project list from BOINC client directly. This is needed for correct capitalization
+ )
ALL_BOINC_PROJECTS = loop.run_until_complete(get_all_projects(rpc_client))
# Get project list from Gridcoin wallet and/or gridcoinstats
@@ -3804,10 +3847,13 @@ def create_default_database() -> Dict[str, Any]:
priority_results = {}
highest_priority_project = ""
highest_priority_projects = []
+ # force calculation of stats at first run since they are not cached in DB
DATABASE["STATSLASTCALCULATED"] = datetime.datetime(
1997, 3, 3
- ) # force calculation of stats at first run since they are not cached in DB
- # While we don't have enough tasks, continue cycling through project list and updating. If we have cycled through all projects, get_highest_priority_project will stall to prevent requesting too often
+ )
+ # While we don't have enough tasks, continue cycling through project list and
+ # updating. If we have cycled through all projects, get_highest_priority_project
+ # will stall to prevent requesting too often
boinc_loop(False, rpc_client)
# Restore user prefs
safe_exit(None, None)
From d17e62f6b2c6f56c177b404bbb81b84f9a409a48 Mon Sep 17 00:00:00 2001
From: HeyMerlin <26638413+HeyMerlin@users.noreply.github.com>
Date: Fri, 19 May 2023 22:28:21 -0700
Subject: [PATCH 04/23] Tweaked and formated comments
---
main.py | 230 ++++++++++++++++++++++++++++----------------------------
1 file changed, 115 insertions(+), 115 deletions(-)
diff --git a/main.py b/main.py
index 946768e..ab39816 100644
--- a/main.py
+++ b/main.py
@@ -26,7 +26,7 @@
from typing import List, Union, Dict, Tuple, Any
import sys, signal
-# ignore deprecation warnings in Windows
+# Ignore deprecation warnings in Windows
import warnings
warnings.filterwarnings("ignore", category=DeprecationWarning)
@@ -42,7 +42,7 @@
boinc_port: int = 31416
boinc_username: Union[str, None] = None
boinc_password: Union[str, None] = None
-# minimum time in minutes before re-asking a project for work who previously said
+# Minimum time in minutes before re-asking a project for work who previously said
# they were out
min_recheck_time: int = 30
abort_unstarted_tasks: bool = False
@@ -75,16 +75,16 @@
# Some globals we need. I try to have all globals be ALL CAPS
FORCE_DEV_MODE = (
- False # used for debugging purposes to force crunching under dev account
+ False # Used for debugging purposes to force crunching under dev account
)
BOINC_PROJECT_NAMES = {}
DATABASE = {}
DATABASE[
"TABLE_SLEEP_REASON"
-] = "" # sleep reason printed in table, must be reset at script start
+] = "" # Sleep reason printed in table, must be reset at script start
DATABASE[
"TABLE_STATUS"
-] = "" # info status printed in table, must be reset at script start
+] = "" # Info status printed in table, must be reset at script start
SCRIPTED_RUN: bool = False
SKIP_TABLE_UPDATES: bool = False
HOST_COST_PER_HOUR = (host_power_usage / 1000) * local_kwh
@@ -92,19 +92,19 @@
# keys integer vs string.
CPU_MODE_DICT = {1: "always", 2: "auto", 3: "never"}
GPU_MODE_DICT = {"1": "always", "2": "auto", "3": "never"}
-DEV_BOINC_PASSWORD = "" # this is only used for printing to table, not used elsewhere
+DEV_BOINC_PASSWORD = "" # This is only used for printing to table, not used elsewhere
DEV_LOOP_RUNNING = False
-# import user settings from config
+# Import user settings from config
try:
from config import *
except Exception as e:
print("Error opening config.py, using defaults! Error is: {}".format(e))
-# if user has no preferred projects, their % of crunching should be 0
+# If user has no preferred projects, their % of crunching should be 0
if len(preferred_projects) == 0:
preferred_projects_percent: float = 0
-# setup logging
+# Setup logging
log = logging.getLogger()
if log_level == "NONE":
log.addHandler(logging.NullHandler())
@@ -133,7 +133,7 @@ def __init__(
rpc_user: str = None,
rpc_password: str = None,
):
- self.configfile = config_file # absolute path to the client config file
+ self.configfile = config_file # Absolute path to the client config file
self.ipaddress = ip_address
self.rpc_port = rpc_port
self.rpcuser = rpc_user
@@ -198,7 +198,7 @@ def __init__(
if config_dir is None:
self.config_dir = "/var/lib/boinc-client"
else:
- self.config_dir = config_dir # absolute path to the client config dir
+ self.config_dir = config_dir # Absolute path to the client config dir
self.ip_address = ip_address
self.port = port
self.rpc_user = rpc_user
@@ -223,10 +223,10 @@ def shutdown_dev_client(quiet: bool = False) -> None:
try:
dev_rpc_client = exit_loop.run_until_complete(
setup_connection(boinc_ip, boinc_password, port=DEV_RPC_PORT)
- ) # setup dev BOINC RPC connection
+ ) # Setup dev BOINC RPC connection
authorize_response = exit_loop.run_until_complete(
dev_rpc_client.authorize()
- ) # authorize dev RPC connection
+ ) # Authorize dev RPC connection
shutdown_response = exit_loop.run_until_complete(
run_rpc_command(dev_rpc_client, "quit")
)
@@ -602,7 +602,7 @@ def wait_till_no_xfers(rpc_client: libs.pyboinc.rpc_client) -> None:
"""
max_loops = 30
current_loops = 0
- loop_wait_in_seconds = 30 # wait this long between loops
+ loop_wait_in_seconds = 30 # Wait this long between loops
def xfers_happening(xfer_list: list) -> bool:
"""
@@ -616,7 +616,7 @@ def xfers_happening(xfer_list: list) -> bool:
if str(xfer["status"]) == "0":
if "persistent_file_xfer" in xfer:
if float(xfer["persistent_file_xfer"].get("num_retries", 0)) > 1:
- continue # assume xfers with multiple retries are stalled
+ continue # Assume xfers with multiple retries are stalled
return True
else:
log.warning("Found xfer with unknown status: " + str(xfer))
@@ -751,12 +751,12 @@ def projecturlfromstatsfile(
"""
Guess a project url from the name of a stats file
"""
- # remove extraneous information from name
+ # Remove extraneous information from name
statsfilename = statsfilename.replace("job_log_", "")
statsfilename = statsfilename.split("_")[0]
statsfilename = statsfilename.replace(".txt", "")
- # check if name is in any known URLs
+ # Check if name is in any known URLs
for knownurl in approved_project_urls:
if statsfilename.upper() in knownurl:
return knownurl
@@ -767,7 +767,7 @@ def projecturlfromstatsfile(
if statsfilename.upper() in knownurl.upper():
return (
knownurl.upper()
- ) # we have to upper these as they are not uppered by default
+ ) # We have to upper these as they are not uppered by default
print(
"WARNING: Found stats file "
+ statsfilename
@@ -801,7 +801,7 @@ def project_url_from_credit_history_file(
return knownurl
for knownurl in boinc_projects_list:
if filename.upper() in knownurl.upper():
- return knownurl.upper() # have to upper as this list is not uppered
+ return knownurl.upper() # Have to upper as this list is not uppered
print(
"WARNING: Found credit history file "
+ filename
@@ -955,7 +955,7 @@ async def run_rpc_command(
"""
full_command = "{} {} {} {}".format(
command, arg1, arg1_val, arg2, arg2_val
- ) # added for debugging purposes
+ ) # Added for debugging purposes
log.debug("Running BOINC rpc request " + full_command)
req = ET.Element(command)
if arg1 is not None:
@@ -1011,7 +1011,7 @@ def config_files_to_stats(
credit_history_files: List[str] = []
return_stats = {}
- # find files to search through, add them to lists
+ # Find files to search through, add them to lists
for file in os.listdir(config_dir_abs_path):
if "job_log" in file:
stats_files.append(os.path.join(config_dir_abs_path, file))
@@ -1065,7 +1065,7 @@ def config_files_to_stats(
wu_history[date]["TOTALWUS"] += 1
wu_history[date]["total_wall_time"] += float(wu["WALLTIME"])
wu_history[date]["total_cpu_time"] += float(wu["CPUTIME"])
- # process credit logs
+ # Process credit logs
for credit_history_file in credit_history_files:
project_url = project_url_from_credit_history_file(
os.path.basename(credit_history_file),
@@ -1118,7 +1118,7 @@ def config_files_to_stats(
if "CREDITAWARDED" not in credit_history[date]:
credit_history[date]["CREDITAWARDED"] = 0
credit_history[date]["CREDITAWARDED"] += delta_credits
- # find averages
+ # Find averages
for project_url, parent_dict in return_stats.items():
total_wus = 0
total_credit = 0
@@ -1147,9 +1147,9 @@ def config_files_to_stats(
avg_credit_per_task = 0
credits_per_hour = 0
else:
- total_cpu_time = total_cpu_time / 60 / 60 # convert to hours
- total_wall_time = total_wall_time / 60 / 60 # convert to hours
- x_day_wall_time = x_day_wall_time / 60 / 60 # convert to hours
+ total_cpu_time = total_cpu_time / 60 / 60 # Convert to hours
+ total_wall_time = total_wall_time / 60 / 60 # Convert to hours
+ x_day_wall_time = x_day_wall_time / 60 / 60 # Convert to hours
avg_wall_time = total_wall_time / total_wus
avg_cpu_time = total_cpu_time / total_wus
avg_credit_per_task = total_credit / total_wus
@@ -1235,7 +1235,7 @@ def is_eligible(project_url: str, project_stats: dict):
try:
highest_project = next(
iter(combinedstats)
- ) # first project is the "highest project" until we test others against it
+ ) # First project is the "highest project" until we test others against it
except Exception as e:
if not quiet:
print(
@@ -1248,7 +1248,7 @@ def is_eligible(project_url: str, project_stats: dict):
)
return []
- # find the highest project
+ # Find the highest project
for project_url, project_stats in combinedstats.items():
current_mag_per_hour = project_stats["COMPILED_STATS"]["AVGMAGPERHOUR"]
highest_mag_per_hour = combinedstats[highest_project]["COMPILED_STATS"][
@@ -1274,7 +1274,7 @@ def is_eligible(project_url: str, project_stats: dict):
)
return_list.append(highest_project)
- # then compare other projects to it to see if any are within 10% of it
+ # Then compare other projects to it to see if any are within 10% of it
highest_avg_mag = combinedstats[highest_project]["COMPILED_STATS"]["AVGMAGPERHOUR"]
minimum_for_inclusion = highest_avg_mag - (highest_avg_mag * 0.10)
for project_url, project_stats in combinedstats.items():
@@ -1373,7 +1373,7 @@ def get_project_mag_ratios(
if i == 0:
projects[project_name] = []
else:
- continue # skip projects which are on greylist
+ continue # Skip projects which are on greylist
projects[project_name].append(project_stats["rac"])
for project_name, project_racs in projects.items():
average_rac = sum(project_racs) / len(project_racs)
@@ -1428,10 +1428,10 @@ def center_align(yourstring: str, total_len: int, min_pad: int = 0) -> str:
if len(table_dict) == 0:
return
headings = []
- heading_length: Dict[str, int] = {} # length of each heading column
+ heading_length: Dict[str, int] = {} # Length of each heading column
values = {}
working_dict = copy.deepcopy(table_dict)
- # convert urls to nice names, add USD/GRC/hr
+ # Convert urls to nice names, add USD/GRC/hr
for url in list(working_dict.keys()):
name = project_url_to_name(url, ALL_BOINC_PROJECTS)
if not name:
@@ -1440,7 +1440,7 @@ def center_align(yourstring: str, total_len: int, min_pad: int = 0) -> str:
working_dict[name] = stats
if name != url:
del working_dict[url]
- # add usd/grc/hr to each project
+ # Add usd/grc/hr to each project
if working_dict[name].get("MAG/HR"):
grc_per_hour = float(working_dict[name].get("MAG/HR", 0)) / 4
grc_per_day = (float(working_dict[name].get("MAG/HR", 0)) / 4) * 24
@@ -1460,7 +1460,7 @@ def center_align(yourstring: str, total_len: int, min_pad: int = 0) -> str:
working_dict[name]["USD/HR R/P"] = "0"
del working_dict[name]["MAG/HR"]
- # figure out table headings
+ # Figure out table headings
for url, stats in working_dict.items():
for key, value in stats.items():
if key not in headings:
@@ -1475,20 +1475,20 @@ def center_align(yourstring: str, total_len: int, min_pad: int = 0) -> str:
longest_url = len(max(working_dict.keys(), key=len))
table_width = longest_url + len(str(values.keys()))
- # print header
- ## print first line
+ # Print header
+ ## Print first line
print("*" * table_width)
print("*" + center_align("FINDTHEMAG V2.0", table_width - 2) + "*")
print("*" * table_width)
- ## print rest of header
+ ## Print rest of header
padding_str = " " * (longest_url + 1)
print("*" + padding_str, end="|")
for heading in headings:
print(center_align(heading, heading_length[heading]) + "|", end="")
print("")
- # print contents
+ # Print contents
sortedprojects = sorted(
working_dict.keys(),
key=lambda a: float(working_dict[a].get(sortby, 0)),
@@ -1504,7 +1504,7 @@ def center_align(yourstring: str, total_len: int, min_pad: int = 0) -> str:
print(left_align(value, heading_length[heading]), end="|")
print("")
- # print bottom bar
+ # Print bottom bar
print("*" * table_width)
if not sleep_reason:
sleep_reason = "NONE"
@@ -1531,7 +1531,7 @@ def center_align(yourstring: str, total_len: int, min_pad: int = 0) -> str:
DEV_BOINC_PASSWORD
)
)
- # print improved stats
+ # Print improved stats
addl = ""
curr_avg_mag = get_avg_mag_hr(combined_stats)
if curr_avg_mag > DATABASE["STARTMAGHR"] and DATABASE["STARTMAGHR"] > 0:
@@ -1548,7 +1548,7 @@ def center_align(yourstring: str, total_len: int, min_pad: int = 0) -> str:
DATABASE["FTMTOTAL"] / 60, DATABASE["DEVTIMETOTAL"] / 60
)
)
- # print final line
+ # Print final line
if not check_sidestake_results:
print(
"Consider donating to this app's development directly or via sidestake: RzUgcntbFm8PeSJpauk6a44qbtu92dpw3K. Sidestaking means you can skip crunching for dev"
@@ -1564,7 +1564,7 @@ def in_list(str, list) -> bool:
search_str = search_str.replace("WWW.", "")
search_str = search_str.replace(
"WORLDCOMMUNITYGRID.ORG/BOINC/", "WORLDCOMMUNITYGRID.ORG"
- ) # fix for WCG
+ ) # Fix for WCG
for item in list:
if search_str == item.upper() or search_str in item.upper():
return True
@@ -1600,8 +1600,8 @@ def generate_stats(
preferred_projects[url.upper()] = weight
ignored_projects = [
x.upper() for x in ignored_projects
- ] # uppercase ignored project url list
- # ignore unattached projects if requested
+ ] # Uppercase ignored project url list
+ # Ignore unattached projects if requested
if ignore_unattached:
for project in APPROVED_PROJECT_URLS:
if not in_list(project, attached_list):
@@ -1642,13 +1642,13 @@ def generate_stats(
total_preferred_weight = (preferred_projects_percent / 100) * 1000
total_mining_weight = 1000 - total_preferred_weight
total_mining_weight_remaining = total_mining_weight
- # assign weight of 1 to all projects which didn't make the cut
+ # Assign weight of 1 to all projects which didn't make the cut
for project_url in APPROVED_PROJECT_URLS:
preferred_extract = get_project_from_dict(
project_url, preferred_projects, "IGNOREME"
)
if preferred_extract:
- continue # exclude preferred projects
+ continue # Exclude preferred projects
if project_url in ignored_projects:
final_project_weights[project_url] = 0
dev_project_weights[project_url] = 0
@@ -1767,7 +1767,7 @@ async def kill_all_unstarted_tasks(
b = ET.SubElement(req, "name")
b.text = name
response = await rpc_client._request(req)
- parsed = parse_generic(response) # returns True if successful
+ parsed = parse_generic(response) # Returns True if successful
a = "21"
else:
# print('Keeping task {}'.format(task))
@@ -1784,7 +1784,7 @@ async def nnt_all_projects(rpc_client: libs.pyboinc.rpc_client):
a = ET.SubElement(req, "project_url")
a.text = project
response = await rpc_client._request(req)
- parsed = parse_generic(response) # returns True if successful
+ parsed = parse_generic(response) # Returns True if successful
async def check_log_entries(
@@ -1845,7 +1845,7 @@ def cache_full(project_name: str, messages) -> bool:
if project_name.upper() not in str(message).upper():
continue
difference = datetime.datetime.now() - message["time"]
- if difference.seconds > 60 * 5: # if message is > 5 min old, skip
+ if difference.seconds > 60 * 5: # If message is > 5 min old, skip
continue
if project_name.upper() == message["project"].upper():
if (
@@ -1853,7 +1853,7 @@ def cache_full(project_name: str, messages) -> bool:
in message["body"].upper()
):
if "GPU" not in message["body"].upper():
- gpu_full = True # if no GPU, GPU cache is always full
+ gpu_full = True # If no GPU, GPU cache is always full
if (
"CPU: job cache full".upper() in message["body"].upper()
or "Not requesting tasks: don't need (job cache full)".upper()
@@ -1890,7 +1890,7 @@ def cache_full(project_name: str, messages) -> bool:
if (
not gpu_full
):
- # if GPU is not mentioned in log, this would always
+ # If GPU is not mentioned in log, this would always
# happen so using this to stop erroneous messages
# print('GPU cache appears not full {}'.format(message['body']))
log.debug(
@@ -1913,9 +1913,9 @@ def cache_full(project_name: str, messages) -> bool:
message_count = int(parse_generic(msg_count_response))
req = ET.Element("get_messages")
a = ET.SubElement(req, "seqno")
- a.text = str(message_count - 50) # get ten most recent messages
+ a.text = str(message_count - 50) # Get ten most recent messages
messages_response = await rpc_client._request(req)
- messages = parse_generic(messages_response) # returns True if successful
+ messages = parse_generic(messages_response) # Returns True if successful
if cache_full(project_name, messages):
return True
return False
@@ -1985,7 +1985,7 @@ def project_backoff(project_name: str, messages) -> bool:
if project_name.upper() not in str(message).upper():
continue
difference = datetime.datetime.now() - message["time"]
- if difference.seconds > 60 * 5: # if message is > 5 min old, skip
+ if difference.seconds > 60 * 5: # If message is > 5 min old, skip
continue
if ignore_message(message, ignore_phrases):
continue
@@ -2024,12 +2024,12 @@ def project_backoff(project_name: str, messages) -> bool:
message_count = int(parse_generic(msg_count_response))
req = ET.Element("get_messages")
a = ET.SubElement(req, "seqno")
- a.text = str(message_count - 50) # get ten most recent messages
+ a.text = str(message_count - 50) # Get ten most recent messages
messages_response = await rpc_client._request(req)
- messages = parse_generic(messages_response) # returns True if successful
+ messages = parse_generic(messages_response) # Returns True if successful
if project_name.upper() == "GPUGRID.NET":
project_name = (
- "GPUGRID" # fix for log entries which show up under different name
+ "GPUGRID" # Fix for log entries which show up under different name
)
return project_backoff(project_name, messages)
@@ -2042,14 +2042,14 @@ async def get_all_projects(rpc_client: libs.pyboinc.rpc_client) -> Dict[str, str
messages_response = await rpc_client._request(req)
project_status_reply = parse_generic(
messages_response
- ) # returns True if successful
+ ) # Returns True if successful
found_projects = []
project_names = {}
for project in project_status_reply:
project_names[project["url"]] = project["name"]
project_names[
"https://gene.disi.unitn.it/test/"
- ] = "TN-Grid" # added bc BOINC client does not list this project for some reason
+ ] = "TN-Grid" # Added bc BOINC client does not list this project for some reason
return project_names
@@ -2064,7 +2064,7 @@ async def get_attached_projects(
if isinstance(
project.project_name, bool
):
- # this happens if project is "attached" but unable to communicate with
+ # This happens if project is "attached" but unable to communicate with
# the project due to it being down or some other issue
project_names[project.master_url] = project.master_url
else:
@@ -2086,16 +2086,16 @@ async def verify_boinc_connection(rpc_client: libs.pyboinc.rpc_client) -> bool:
async def prefs_check(rpc_client: libs.pyboinc.rpc_client) -> dict:
- # authorize BOINC client
+ # Authorize BOINC client
authorize_response = await rpc_client.authorize()
- # get prefs
+ # Get prefs
req = ET.Element("get_global_prefs_working")
response = await rpc_client._request(req)
- parsed = parse_generic(response) # returns True if successful
- # get actual disk usage
+ parsed = parse_generic(response) # Returns True if successful
+ # Get actual disk usage
req = ET.Element("get_disk_usage")
response = await rpc_client._request(req)
- usage = parse_generic(response) # returns True if successful
+ usage = parse_generic(response) # Returns True if successful
max_gb = int(float(parsed.get("disk_max_used_gb", 0)))
used_max_gb = int(int(usage["d_allowed"]) / 1024 / 1024 / 1024)
if (max_gb < 10 and max_gb != 0) or used_max_gb < 9.5:
@@ -2145,7 +2145,7 @@ def get_highest_priority_project(
if not attached_projects:
attached_projects = []
priority_dict = {}
- # calculate total time from stats
+ # Calculate total time from stats
total_xday_time = 0
total_time = 0
for found_key, projectstats in combined_stats.items():
@@ -2178,7 +2178,7 @@ def get_highest_priority_project(
else:
if (
weight == 1
- ): # benchmarking projects should be over ALL time not just recent time
+ ): # Benchmarking projects should be over ALL time not just recent time
existing_time = combined_stats_extract["COMPILED_STATS"][
"TOTALWALLTIME"
]
@@ -2251,7 +2251,7 @@ def project_name_to_url(
if i == 0:
projects[project_name] = []
else:
- continue # skip projects which are on greylist
+ continue # Skip projects which are on greylist
projects[project_name].append(project_stats["rac"])
for project_name, project_racs in projects.items():
average_rac = sum(project_racs) / len(project_racs)
@@ -2437,7 +2437,7 @@ def custom_sleep(sleep_time: float, boinc_rpc_client, dev_loop: bool = False):
else:
DATABASE["DEVTIMECOUNTER"] += max(dev_fee, 0.01)
DATABASE["FTMTOTAL"] += 1
- # save database every ten minutes or at end of routine
+ # Save database every ten minutes or at end of routine
if str(elapsed).endswith("0") or elapsed + 1 >= sleep_time:
save_stats(DATABASE)
elapsed += 1
@@ -2488,9 +2488,9 @@ def setup_dev_boinc() -> str:
"""
Do initial setup of and start dev boinc client. Returns RPC password or 'ERROR' if unable to start BOINC
"""
- # check if dev BOINC directory exists
- ## create if it doesn't
- # start BOINC
+ # Check if dev BOINC directory exists
+ ## Create if it doesn't
+ # Start BOINC
dev_path = os.path.abspath("DEVACCOUNT")
boinc_executable = "/usr/bin/boinc"
if "WINDOWS" in found_platform.upper():
@@ -2499,7 +2499,7 @@ def setup_dev_boinc() -> str:
boinc_executable = "/Applications/BOINCManager.app/Contents/resources/boinc"
if not os.path.exists("DEVACCOUNT"):
os.mkdir(dev_path)
- # update settings to match user settings from main BOINC install
+ # Update settings to match user settings from main BOINC install
global_settings_path = os.path.join(boinc_data_dir, "global_prefs.xml")
override_path = os.path.join(boinc_data_dir, "global_prefs_override.xml")
override_dest_path = os.path.join(
@@ -2688,7 +2688,7 @@ def update_table(
Function to update table printed to user.
:param status = Most recent status "waiting for xfers, starting crunching on x, etc"
"""
- # don't update table in dev loop because all our variables reference
+ # Don't update table in dev loop because all our variables reference
# dev install not main one
if dev_loop or SKIP_TABLE_UPDATES:
return
@@ -2706,8 +2706,8 @@ def update_table(
"AVGCPUTIME": "ACTIME",
}
ignore_list = ["MAGPERCREDIT"]
- # generate table to print pretty
- os.system("cls" if os.name == "nt" else "clear") # clear terminal
+ # Generate table to print pretty
+ os.system("cls" if os.name == "nt" else "clear") # Clear terminal
table_dict = {}
for project_url, stats_dict in combined_stats.items():
table_dict[project_url] = {}
@@ -2765,7 +2765,7 @@ def update_table(
authorize_response = loop.run_until_complete(rpc_client.authorize())
BOINC_PROJECT_LIST, BOINC_PROJECT_NAMES = loop.run_until_complete(
get_attached_projects(rpc_client)
- ) # we need to re-fetch this as it's different for dev and client
+ ) # We need to re-fetch this as it's different for dev and client
except Exception as e:
print_and_log(
"Transient error connecting to BOINC, sleeping 30s", "ERROR"
@@ -2781,7 +2781,7 @@ def update_table(
if (
(abs(stats_calc_delta.days) * 24 * 60)
+ (abs(stats_calc_delta.seconds) / 60)
- ) > recalculate_stats_interval: # only re-calculate stats every x minutes
+ ) > recalculate_stats_interval: # Only re-calculate stats every x minutes
log.debug("Calculating stats..")
DATABASE["STATSLASTCALCULATED"] = datetime.datetime.now()
combined_stats = config_files_to_stats(boinc_data_dir)
@@ -2812,7 +2812,7 @@ def update_table(
log.debug(
"Highest priority projects are: " + str(highest_priority_projects)
)
- # print some pretty stats
+ # Print some pretty stats
update_table()
log.info("Highest priority project is {}".format(highest_priority_projects[0]))
@@ -2933,7 +2933,7 @@ def update_table(
if boinc_password == "ERROR":
log.error("Error setting up crunching to developer account")
else:
- # setup dev RPC connection, it may take a few tries while we
+ # Setup dev RPC connection, it may take a few tries while we
# wait for it to come online
tries = 1
tries_max = 5
@@ -2944,10 +2944,10 @@ def update_table(
setup_connection(
boinc_ip, boinc_password, port=DEV_RPC_PORT
)
- ) # setup dev BOINC RPC connection
+ ) # Setup dev BOINC RPC connection
authorize_response = loop.run_until_complete(
dev_rpc_client.authorize()
- ) # authorize dev RPC connection
+ ) # Authorize dev RPC connection
except Exception as e:
log.error("Error connecting to BOINC dev client {}".format(e))
else:
@@ -3010,16 +3010,16 @@ def update_table(
rpc_client=dev_rpc_client,
client_rpc_client=rpc_client,
time=DATABASE["DEVTIMECOUNTER"],
- ) # run the BOINC loop :)
+ ) # Run the BOINC loop :)
update_table()
authorize_response = loop.run_until_complete(
dev_rpc_client.authorize()
- ) # authorize dev RPC connection
+ ) # Authorize dev RPC connection
loop.run_until_complete(
run_rpc_command(dev_rpc_client, "quit")
- ) # quit dev client
+ ) # Quit dev client
DEV_LOOP_RUNNING = False
- # re-enable client BOINC
+ # Re-enable client BOINC
loop.run_until_complete(
run_rpc_command(rpc_client, "set_gpu_mode", existing_gpu_mode)
)
@@ -3091,14 +3091,14 @@ def update_table(
)
continue
- # make sure we are using correct URL, BOINC requires capitalization to
+ # Make sure we are using correct URL, BOINC requires capitalization to
# be exact
highest_priority_project = resolve_boinc_url(
highest_priority_project, ALL_BOINC_PROJECTS
)
if highest_priority_project.upper() not in DATABASE[mode]:
DATABASE[mode][highest_priority_project.upper()] = {}
- # skip checking project if we have a backoff counter going and it
+ # Skip checking project if we have a backoff counter going and it
# hasn't been long enough
time_since_last_project_check = datetime.datetime.now() - DATABASE[mode][
highest_priority_project.upper()
@@ -3126,16 +3126,16 @@ def update_table(
log.info("Waiting for any xfers to complete...")
dl_response = wait_till_no_xfers(
rpc_client
- ) # wait until all network activity has concluded
- # if in dev_loop, attach to project if needed
+ ) # Wait until all network activity has concluded
+ # If in dev_loop, attach to project if needed
if dev_loop:
get_project_list = loop.run_until_complete(
run_rpc_command(rpc_client, "get_project_status")
)
- # on first run, there is no project list
+ # On first run, there is no project list
if isinstance(get_project_list, list):
- # convert to simple list of strings so we can check if
+ # Convert to simple list of strings so we can check if
# project URL is in list
converted_project_list = project_list_to_project_list(
get_project_list
@@ -3149,7 +3149,7 @@ def update_table(
if not project_in_list_check(
highest_priority_project, converted_project_list
):
- # yoyo will never be in project dict due to not supporting weak auth
+ # Yoyo will never be in project dict due to not supporting weak auth
converted_dev_project_url = project_to_dev_project(
highest_priority_project, DEV_PROJECT_DICT
)
@@ -3172,20 +3172,20 @@ def update_table(
arg2="authenticator",
arg2_val=DEV_PROJECT_DICT[converted_dev_project_url],
)
- ) # update project
- sleep(30) # give it a chance to finish attaching
+ ) # Update project
+ sleep(30) # Give it a chance to finish attaching
(
BOINC_PROJECT_LIST,
BOINC_PROJECT_NAMES,
) = loop.run_until_complete(
get_attached_projects(rpc_client)
- ) # we need to re-fetch this as it's now changed
+ ) # We need to re-fetch this as it's now changed
highest_priority_project = resolve_boinc_url(
highest_priority_project, ALL_BOINC_PROJECTS
- ) # this may have changed, so check
+ ) # This may have changed, so check
if (
len(BOINC_PROJECT_LIST) == 0
- ): # using this as a proxy for "failed attach"
+ ): # Using this as a proxy for "failed attach"
log.error(
"Appears to fail to attach to {}".format(boincified_url)
)
@@ -3208,13 +3208,13 @@ def update_table(
run_rpc_command(
rpc_client, "project_update", "project_url", boincified_url
)
- ) # update project
+ ) # Update project
log.debug(
"Requesting work from {} added to debug no new tasks bug"
+ str(boincified_url)
)
log.debug("Update response is {}".format(update_response))
- # give BOINC time to update w project, I don't know a less hacky way to
+ # Give BOINC time to update w project, I don't know a less hacky way to
# do this, suggestions are welcome
sleep(
15
@@ -3222,7 +3222,7 @@ def update_table(
DATABASE[mode][highest_priority_project.upper()][
"LAST_CHECKED"
] = datetime.datetime.now()
- # check if project should be backed off. If so, back it off.
+ # Check if project should be backed off. If so, back it off.
# This is an exponentially increasing backoff with a maximum time of 1 day
# Projects are backed off if they request it, if they are
# unresponsive/down, or if no work is available
@@ -3244,17 +3244,17 @@ def update_table(
log.debug("Waiting for any xfers to complete...")
dl_response = wait_till_no_xfers(
rpc_client
- ) # wait until all network activity has concluded
+ ) # Wait until all network activity has concluded
if (
not dont_nnt
):
- # if we didn't get a backoff signal and we haven't picked
+ # If we didn't get a backoff signal and we haven't picked
# a project to leave non-NNTed during sleeping of loop,
# pick this one for that purpose
dont_nnt = highest_priority_project.upper()
- # re-NNT all projects
+ # Re-NNT all projects
nnt_response = loop.run_until_complete(
nnt_all_projects(rpc_client)
) # NNT all projects
@@ -3318,7 +3318,7 @@ def create_default_database() -> Dict[str, Any]:
if __name__ == "__main__":
- wallet_running = True # switches to false if we have issues connecting
+ wallet_running = True # Switches to false if we have issues connecting
# Verify we are in appropriate python environment
python_major = sys.version_info.major
@@ -3342,7 +3342,7 @@ def create_default_database() -> Dict[str, Any]:
del python_major
log.debug("Python version {}".format(platform.python_version()))
- # shut down dev client is it's running. This is useful if program shuts
+ # Shut down dev client is it's running. This is useful if program shuts
# down unexpectedly
shutdown_dev_client(
quiet=True
@@ -3391,7 +3391,7 @@ def create_default_database() -> Dict[str, Any]:
update_check() # Check for updates to FTM
combined_stats = {}
APPROVED_PROJECT_URLS = []
- # combined_stats has format:
+ # Combined_stats has format:
# COMBINED_STATS_EXAMPLE = {
# 'HTTP://PROJECT.COM/PROJECT': {
# 'COMPILED_STATS': {
@@ -3437,7 +3437,7 @@ def create_default_database() -> Dict[str, Any]:
Path.home(), "AppData\Roaming\GridcoinResearch\\"
)
- # check that directories exist
+ # Check that directories exist
log.info("Guessing BOINC data dir is " + str(boinc_data_dir))
if not os.path.isdir(boinc_data_dir):
print(
@@ -3477,7 +3477,7 @@ def create_default_database() -> Dict[str, Any]:
if not SCRIPTED_RUN:
input("Press enter to continue")
- # auto-detect password for BOINC RPC if it exists and user didn't know
+ # Auto-detect password for BOINC RPC if it exists and user didn't know
# BOINC on Windows automatically generates an RPC password
auth_location = os.path.join(boinc_data_dir, "gui_rpc_auth.cfg")
if not boinc_password:
@@ -3618,17 +3618,17 @@ def create_default_database() -> Dict[str, Any]:
try:
rpc_client = loop.run_until_complete(
setup_connection(boinc_ip, boinc_password, boinc_port)
- ) # setup BOINC RPC connection
+ ) # Setup BOINC RPC connection
except Exception as e:
print_and_log("Error: Unable to connect to BOINC client, quitting now", "ERROR")
quit()
if (
not rpc_client
- ): # this was just added so pycharm would stop complaining about
+ ): # This was just added so pycharm would stop complaining about
# rpc_client not being declared
print_and_log("Error connecting to BOINC client, quitting now", "ERROR")
quit()
- # get project list from BOINC client directly. This is needed for
+ # Get project list from BOINC client directly. This is needed for
# correct capitalization
BOINC_PROJECT_LIST, BOINC_PROJECT_NAMES = loop.run_until_complete(
get_attached_projects(rpc_client)
@@ -3717,7 +3717,7 @@ def create_default_database() -> Dict[str, Any]:
mag_ratios=mag_ratios,
)
log.debug("Printing pretty stats...")
- # calculate starting efficiency stats
+ # Calculate starting efficiency stats
if "STARTMAGHR" not in DATABASE:
DATABASE["STARTMAGHR"] = get_avg_mag_hr(combined_stats)
else:
@@ -3738,7 +3738,7 @@ def create_default_database() -> Dict[str, Any]:
original_avg_mag_hr, current_avg_mag_hr
)
)
- # generate table to print pretty
+ # Generate table to print pretty
table_dict = {}
for project_url, stats_dict in combined_stats.items():
table_dict[project_url] = {}
@@ -3847,7 +3847,7 @@ def create_default_database() -> Dict[str, Any]:
priority_results = {}
highest_priority_project = ""
highest_priority_projects = []
- # force calculation of stats at first run since they are not cached in DB
+ # Force calculation of stats at first run since they are not cached in DB
DATABASE["STATSLASTCALCULATED"] = datetime.datetime(
1997, 3, 3
)
From 8e2918889909ed15dcb1b57a9051ddd3d06ab337 Mon Sep 17 00:00:00 2001
From: HeyMerlin <26638413+HeyMerlin@users.noreply.github.com>
Date: Sat, 20 May 2023 16:14:03 -0700
Subject: [PATCH 05/23] Added SPDX ID for AGPL v3.0. Started docstring
reformatting and flushing out.
---
main.py | 17 +++++++++++++++--
1 file changed, 15 insertions(+), 2 deletions(-)
diff --git a/main.py b/main.py
index ab39816..f644fa9 100644
--- a/main.py
+++ b/main.py
@@ -1,4 +1,15 @@
# DO NOT EDIT THIS FILE, EDIT CONFIG.PY INSTEAD
+# SPDX-License-Identifier: AGPL-3.0-only
+
+"""FindTheMag is a powerful utility which assists in optimizing BOINC crunching.
+
+FindTheMag uses your own BOINC client's stats to determine which projects get the
+most credit per hour on your machine. If you ask it to, it will also control BOINC
+to crunch projects according to your preferences. It then figures out which projects
+earn you the most Gridcoin. It even has options to only crunch according to
+profitability and/or temperature.
+
+"""
from itertools import chain
from math import floor, ceil
import copy
@@ -121,8 +132,10 @@
class GridcoinClientConnection:
- """
- A class for connecting to a Gridcoin wallet and issuing RPC commands. Currently quite barebones.
+ """Allows connecting to a Gridcoin wallet and issuing RPC commands.
+
+ A class for connecting to a Gridcoin wallet and issuing RPC commands. Currently
+ quite barebones.
"""
def __init__(
From c11f2bc22c715f51cf9f41ceb3e1f14915077673 Mon Sep 17 00:00:00 2001
From: HeyMerlin <26638413+HeyMerlin@users.noreply.github.com>
Date: Tue, 23 May 2023 23:33:22 -0700
Subject: [PATCH 06/23] More docstring changes
---
main.py | 63 ++++++++++++++++++++++++++++++++++++++++++++++++---------
1 file changed, 54 insertions(+), 9 deletions(-)
diff --git a/main.py b/main.py
index f644fa9..edf375e 100644
--- a/main.py
+++ b/main.py
@@ -136,6 +136,13 @@ class GridcoinClientConnection:
A class for connecting to a Gridcoin wallet and issuing RPC commands. Currently
quite barebones.
+
+ Args:
+ config_file:
+ ip_address:
+ rpc_port:
+ rpc_user:
+ rpc_password:
"""
def __init__(
@@ -155,6 +162,18 @@ def __init__(
def run_command(
self, command: str, arguments: List[Union[str, bool]] = None
) -> dict:
+ """Send command to local Gridcoin wallet
+
+ Sends specifified Gridcoin command to the Gridcoin wallet instance and
+ retrieves result of the command execution.
+
+ Args:
+ command:
+ arguments:
+
+ Returns:
+ Response from command execution.
+ """
if not arguments:
arguments = []
credentials = None
@@ -175,8 +194,13 @@ def run_command(
return response.json()
def get_approved_project_urls(self) -> List[str]:
- """
- :return: A list of UPPERCASED project URLs using gridcoin command listprojects
+ """Retrieves list of projects appoved for Gridcoin.
+
+ Retrieves the list of projects from the local Gridcoin wallet that are
+ approved for earning Gridcoin.
+
+ Returns:
+ A list of UPPERCASED project URLs using gridcoin command listprojects
"""
return_list = []
all_projects = self.run_command("listprojects")
@@ -185,8 +209,15 @@ def get_approved_project_urls(self) -> List[str]:
return return_list
def project_name_to_url(self, searchname: str) -> Union[str, None]:
- """
+ """Return project URL.
+
Convert a project name into its project url, then UPPERCASE it
+
+ Args:
+ searchname:
+
+ Returns:
+ A uppercase project URL.
"""
all_projects = self.run_command("listprojects")
for found_project_name, project_dict in all_projects["result"].items():
@@ -196,7 +227,8 @@ def project_name_to_url(self, searchname: str) -> Union[str, None]:
class BoincClientConnection:
- """
+ """Access to BOINC client configuration files.
+
A simple class for grepping BOINC config files etc. Doesn't do any RPC communication
"""
@@ -218,8 +250,9 @@ def __init__(
self.rpc_password = rpc_password
def get_project_list(self) -> List[str]:
- """
- :return: UPPERCASED list of project URLs. This is all of them, not just ones which are attached
+ """Retrieve the list of projects supported by the BOINC client
+
+ Returns: UPPERCASED list of project URLs. This is all of them, not just ones which are attached
"""
project_list_file = os.path.join(self.config_dir, "all_projects_list.xml")
return_list = []
@@ -231,6 +264,13 @@ def get_project_list(self) -> List[str]:
def shutdown_dev_client(quiet: bool = False) -> None:
+ """Shutdown developer BOINC client.
+
+ Sends RPC quit command to running dev BOINC client.
+
+ Raises:
+ Exception: An error occured shutting down the dev BOINC client.
+ """
exit_loop = asyncio.get_event_loop()
log.info("Attempting to shut down dev client at safe_exit...")
try:
@@ -248,9 +288,14 @@ def shutdown_dev_client(quiet: bool = False) -> None:
def safe_exit(arg1, arg2) -> None:
- """
- Function to safely exit tool by saving database, restoring original user preferences, and quitting dev BOINC client.
- arg1/2 required by the signal handler library, but aren't used for anything inside this function
+ """Safely exit Find The Mag.
+
+ Safely exit tool by saving database, restoring original user preferences,
+ and quitting dev BOINC client.
+
+ Args: arg1 and arg2:
+ Required by the signal handler library,
+ but aren't used for anything inside this function
"""
# This is needed in case this function is called while main loop is still waiting
From 5890f8bb4d72b6d1a47b106f4b07adf279c564dc Mon Sep 17 00:00:00 2001
From: HeyMerlin <26638413+HeyMerlin@users.noreply.github.com>
Date: Sat, 27 May 2023 13:52:20 -0700
Subject: [PATCH 07/23] More Docstrings and comment tweaks
---
main.py | 166 +++++++++++++++++++++++++++++++++++++++++++++++---------
1 file changed, 139 insertions(+), 27 deletions(-)
diff --git a/main.py b/main.py
index edf375e..4ecbfc5 100644
--- a/main.py
+++ b/main.py
@@ -137,7 +137,7 @@ class GridcoinClientConnection:
A class for connecting to a Gridcoin wallet and issuing RPC commands. Currently
quite barebones.
- Args:
+ Attributes:
config_file:
ip_address:
rpc_port:
@@ -153,6 +153,15 @@ def __init__(
rpc_user: str = None,
rpc_password: str = None,
):
+ """Initializes the instance based on the connection attributes.
+
+ Attributes:
+ config_file:
+ ip_address:
+ rpc_port:
+ rpc_user:
+ rpc_password:
+ """
self.configfile = config_file # Absolute path to the client config file
self.ipaddress = ip_address
self.rpc_port = rpc_port
@@ -230,6 +239,13 @@ class BoincClientConnection:
"""Access to BOINC client configuration files.
A simple class for grepping BOINC config files etc. Doesn't do any RPC communication
+
+ Attributes:
+ config_dir:
+ ip_address:
+ port:
+ rpc_user:
+ rpc_password:
"""
def __init__(
@@ -240,6 +256,15 @@ def __init__(
rpc_user: str = boinc_username,
rpc_password: str = None,
):
+ """Initializes the instance based on connection attributes.
+
+ Args:
+ config_dir:
+ ip_address:
+ port:
+ rpc_user:
+ rpc_password:
+ """
if config_dir is None:
self.config_dir = "/var/lib/boinc-client"
else:
@@ -268,6 +293,9 @@ def shutdown_dev_client(quiet: bool = False) -> None:
Sends RPC quit command to running dev BOINC client.
+ Args:
+ quiet:
+
Raises:
Exception: An error occured shutting down the dev BOINC client.
"""
@@ -355,17 +383,25 @@ def safe_exit(arg1, arg2) -> None:
async def get_task_list(rpc_client: libs.pyboinc.rpc_client) -> list:
- """
+ """List of active, waiting, or paused BOINC tasks.
+
Return list of tasks from BOINC client which are not completed/failed. These
can be active tasks, tasks waiting to be started, or paused tasks.
+
+ Args:
+ rpc_client:
+
+ Returns:
+ List of BOINC tasks.
+
"""
- # Known task states
- # 2: Active
return_value = []
reply = await run_rpc_command(rpc_client, "get_results")
if isinstance(reply, str):
log.info("BOINC appears to have no tasks...")
return return_value
+ # Known task states:
+ # 2: Active
for task in reply:
if task["state"] in [2]:
return_value.append(task)
@@ -377,8 +413,17 @@ async def get_task_list(rpc_client: libs.pyboinc.rpc_client) -> list:
async def is_boinc_crunching(rpc_client: libs.pyboinc.rpc_client) -> bool:
- """
- Returns True is boinc is crunching, false otherwise
+ """Check if BOINC is actively crunching tasks.
+
+ Queries BOINC client as to crunching status. Returns True is BOINC client
+ is crunching, false otherwise.
+
+ Args:
+ rpc_client:
+
+ Returns:
+ True if crunching, or False if not crunching or unsure.
+
"""
reply = await run_rpc_command(rpc_client, "get_cc_status")
task_suspend_reason = int(reply["task_suspend_reason"])
@@ -405,16 +450,32 @@ async def is_boinc_crunching(rpc_client: libs.pyboinc.rpc_client) -> bool:
async def setup_connection(
boinc_ip: str = boinc_ip, boinc_password: str = boinc_password, port: int = 31416
) -> libs.pyboinc.rpc_client:
- """
+ """Create BOINC RPC client connection.
+
Sets up a BOINC RPC client connection
+
+ Args:
+ boinc_ip:
+ boinc_password:
+ port:
+
+ Returns:
+
"""
rpc_client = await init_rpc_client(boinc_ip, boinc_password, port=port)
return rpc_client
def temp_check() -> bool:
- """
- Returns True if we should keep crunching based on temperature, False otherwise
+ """Checks if temperature is within acceptable limit.
+
+ Confirms if we should keep crunching based on temperature, or not.
+
+ Raises:
+ Exception: An error occured attempting to read the temperature.
+
+ Returns:
+ True if we should keep crunching, False otherwise.
"""
if not enable_temp_control:
return True
@@ -455,8 +516,13 @@ def temp_check() -> bool:
def update_check() -> None:
- """
- Check for updates to the FindTheMag tool
+ """Check if FindTheMag updates are avialable.
+
+ Check with FindTheMag repository on GitHub whether or not an update is
+ available. If avaialble, inform the user and provide some information.
+
+ Update checks are performed no often then once per week. Check times are
+ stored in the database for future reference.
"""
# If we've checked for updates in the last week, ignore
delta = datetime.datetime.now() - DATABASE.get(
@@ -518,8 +584,19 @@ def update_check() -> None:
def get_grc_price() -> float:
- """
- Gets average GRC price from three online sources.
+ """Retrieve current average Gridcoin price.
+
+ Calculates the average GRC price based on values from three online sources.
+
+ Note: Retrieving the prices is dependent on the target website formatting. If the
+ source website changes significantly, retrieval may fail until the relevant
+ search pattern in updated.
+
+ Raises:
+ Exception: An error occurred accessing an online GRC price source.
+
+ Returns:
+ Average GCR price in decimal, or 0 if unable to determine price.
"""
import requests as req
@@ -597,8 +674,17 @@ def get_grc_price() -> float:
def get_approved_project_urls_web() -> Tuple[List[str], Dict[str, str]]:
- """
- Gets current whitelist from Gridcoinstats
+ """List of projects currently witelised by Gridcoin.
+
+ Gets current whitelist from the Gridcoinstats website. Limits fetching
+ from website to once every 24 hours through caching list in database.
+
+ Raises:
+ Exception: An error occurred parsing data from the source website.
+
+ Returns:
+ A tuple consisting of a list of project base URLs, and a dictionary
+ mapping base URLs to project names.
"""
# Return cached version if we have it and requested it < 24 hrs ago
@@ -655,16 +741,25 @@ def get_approved_project_urls_web() -> Tuple[List[str], Dict[str, str]]:
def wait_till_no_xfers(rpc_client: libs.pyboinc.rpc_client) -> None:
- """
- Wait for BOINC to finish all pending xfers, return None when done
+ """Wait on BOINC client to finish all pending transfers.
+
+ Wait for BOINC to finish all pending xfers, return None when done.
"""
max_loops = 30
current_loops = 0
loop_wait_in_seconds = 30 # Wait this long between loops
def xfers_happening(xfer_list: list) -> bool:
- """
- Returns True if any active xfers are happening, false if none are happening or if only stalled xfers exist
+ """Confirms whether or not the BOINC client has any active transfers.
+
+ Checks list of transfers for any that are active.
+
+ Args:
+ xfer_list: List of transfers.
+
+ Returns:
+ True if any active xfers are happening, False if none are happening or
+ if only stalled xfers exist.
"""
# Known statuses:
# 0 = Active
@@ -714,9 +809,21 @@ def xfers_happening(xfer_list: list) -> bool:
def get_config_parameters(gridcoin_dir: str) -> Dict[str, str]:
- """
- :param gridcoin_dir: Absolute path to a gridcoin config directory
- :return: All config parameters found, preferring those in the json file to the conf. Note that sidestakes become a list as there may be multiple
+ """Retrive Gridcoin wallet configuration.
+
+ Parses Gridcoin configuration .json and .conf file for configuration parameters.
+ Preference is given to those in the json file over those in the to the conf file.
+
+ Note that sidestakes become a list as there may be multiple.
+
+ Args:
+ gridcoin_dir: Absolute path to a gridcoin config directory.
+
+ Raises:
+ Exception: An error occurred while parsing the config file.
+
+ Returns:
+ A dictionary of all config parameters found,
"""
return_dict = dict()
if "gridcoinsettings.json" in os.listdir(gridcoin_dir):
@@ -778,12 +885,17 @@ def get_config_parameters(gridcoin_dir: str) -> Dict[str, str]:
def check_sidestake(
config_params: Dict[str, Union[str, List[str]]], address: str, minval: float
) -> bool:
- """
+ """Confirms whether or not the given address is being adequately sidestaked.
+
Checks if a given address is being sidestaked to or not. Returns False if value < minval
- :param config_params: config_params from get_config_parameters
- :param address: address to check
- :param minval: minimum value to pass check
- :return: True or False
+
+ Args:
+ config_params: config_params from get_config_parameters
+ address: address to check
+ minval: minimum value to pass check
+
+ Returns:
+ True if given address is sidestaked for more than the given minium.
"""
if "enablesidestaking" not in config_params:
return False
From 0d40dbd6b5440b1ada3e8d5050afe6f4fe5eb6a6 Mon Sep 17 00:00:00 2001
From: HeyMerlin <26638413+HeyMerlin@users.noreply.github.com>
Date: Wed, 12 Jul 2023 22:12:09 -0700
Subject: [PATCH 08/23] More document string updates
---
main.py | 243 ++++++++++++++++++++++++++++++++++++++++++++++----------
1 file changed, 203 insertions(+), 40 deletions(-)
diff --git a/main.py b/main.py
index 4ecbfc5..fe09460 100644
--- a/main.py
+++ b/main.py
@@ -918,8 +918,18 @@ def projecturlfromstatsfile(
approved_project_urls: List[str],
boinc_projects_list: List[str],
) -> str:
- """
- Guess a project url from the name of a stats file
+ """Guess a projec url using stats file name.
+
+ Guess a project URL from the name of a stats file.
+
+ Args:
+ statsfilename:
+ all_project_urls:
+ approved_project_urls:
+ boinc_projects_list:
+
+ Returns:
+ URL for project associated with stats file, or stats file name if URL unknown.
"""
# Remove extraneous information from name
statsfilename = statsfilename.replace("job_log_", "")
@@ -957,8 +967,19 @@ def project_url_from_credit_history_file(
all_project_urls: List[str],
boinc_projects_list: List[str],
) -> str:
- """
- Guess a project url from credit history file name
+ """Guess a project URL using credit history file name
+
+ Guess a project URL from credit history file name.
+
+ Args:
+ filename:
+ all_project_urls:
+ approved_project_urls:
+ boinc_projects_list:
+
+ Returns:
+ URL for project associated with stats file, or credit history
+ file name if URL unknown.
"""
filename = filename.replace("statistics_", "")
filename = filename.replace(".xml", "")
@@ -986,20 +1007,32 @@ def project_url_from_credit_history_file(
def stat_file_to_list(stat_file_abs_path: str) -> List[Dict[str, str]]:
- """
- Turns a BOINC job log into list of dicts we can use, each dict is a task. Dicts have keys below:
- STARTTIME,ESTTIME,CPUTIME,ESTIMATEDFLOPS,TASKNAME,WALLTIME,EXITCODE
- Note that ESTIMATEDFLOPS comes from the project and EXITCODE will always be zero.
- All values and keys in dicts are strings.
+ """Retrieve a list of tasks and related stats from log file.
+
+ Turns a BOINC job log into list of dictionaries we can use, each dictionary
+ is a task.
+ Dictionaries have the following keys:
+ STARTTIME,ESTTIME,CPUTIME,ESTIMATEDFLOPS,TASKNAME,WALLTIME,EXITCODE
+
+ Note that ESTIMATEDFLOPS comes from the project and EXITCODE will always be zero.
+ All values and keys in dicts are strings.
- BOINC's job log format is:
+ BOINC's job log format is:
+ [ue] Estimated runtime BOINC Client estimate (seconds)
+ [ct] CPU time Measured CPU runtime at completion (seconds)
+ [fe] Estimated FLOPs count From project (integer)
+ [nm] Task name From project
+ [et] Elapsed time Wallclock runtime at completion (seconds)
- [ue] Estimated runtime BOINC Client estimate (seconds)
- [ct] CPU time Measured CPU runtime at completion (seconds)
- [fe] Estimated FLOPs count From project (integer)
- [nm] Task name From project
- [et] Elapsed time Wallclock runtime at completion (seconds)
+ Args:
+ stat_file_abs_path:
+
+ Raises:
+ Exception: An error occurred when attempting to read a BOINC job log file.
+ Exception: An error occurred when attempting to parse a BOINC job log file.
+ Returns:
+ List dictionaries, each a BOINC task with statistics.
"""
stats_list = []
try:
@@ -1119,9 +1152,21 @@ async def run_rpc_command(
arg2: Union[str, None] = None,
arg2_val: Union[str, None] = None,
) -> Union[str, Dict[Any, Any]]:
- """
+ """Send command to BOINC client via RPC
+
Runs command on BOINC client via RPC
Example: run_rpc_command(rpc_client,'project_nomorework','http://project.com/project')
+
+ Args:
+ rpc_client:
+ command:
+ arg1:
+ arg1_val:
+ arg2:
+ arg2_val:
+
+ Returns:
+
"""
full_command = "{} {} {} {}".format(
command, arg1, arg1_val, arg2, arg2_val
@@ -1145,10 +1190,21 @@ async def run_rpc_command(
def credit_history_file_to_list(credithistoryfileabspath: str) -> List[Dict[str, str]]:
- """
- Turns a BOINC credit history file into list of dicts we can use. Dicts have keys below:
+ """Retrieve BOINC credit history
+
+ Turns a BOINC credit history file into list of dicts we can use.
+
+ Dicts have keys below:
TIME,USERTOTALCREDIT,USERRAC,HOSTTOTALCREDIT,HOSTRAC
+
Note that ESTIMATEDFLOPS comes from the project and EXITCODE will always be zero.
+
+ Args:
+ credithistoryfileabspath: Filename with full path.
+
+ Returns:
+ List of dicionaries with the following keys:
+ TIME,USERTOTALCREDIT,USERRAC,HOSTTOTALCREDIT,HOSTRAC
"""
statslist = []
with open(
@@ -1173,9 +1229,15 @@ def credit_history_file_to_list(credithistoryfileabspath: str) -> List[Dict[str,
def config_files_to_stats(
config_dir_abs_path: str,
) -> Dict[str, Dict[str, Union[int, float, Dict[str, Union[float, str]]]]]:
- """
- :param config_dir_abs_path: Absolute path to BOINC data directory
- :return: Dict of stats in format COMBINEDSTATSEXAMPLE in main.py
+ """Extract BOINC statistics from all available log and stats files.
+
+ Identifies all job log and statistics files in the specified directory. Extracts
+ all stats from found files and constructs dictionaries of them.
+
+ Args:
+ config_dir_abs_path: Absolute path to BOINC data directory.
+
+ Returns: Dictionary of stats in format COMBINED_STATS_EXAMPLE in main.py
"""
stats_files: List[str] = []
credit_history_files: List[str] = []
@@ -1352,10 +1414,17 @@ def add_mag_to_combined_stats(
approved_projects: List[str],
preferred_projects: List[str],
) -> Tuple[dict, List[str]]:
- """
- :param combined_stats: combined_stats from main.py
- :param mag_ratios: mag ratios returned from get_project_mag_ratios. A dict with project URL as key and mag ratio as value
- :return: combined_stats w/ mag ratios added to us, list of projects which are being crunched but not on approved projects list
+ """Adds magnitude ratios to combined statistics
+
+ Args:
+ combined_stats: Combined_stats from main.py.
+ mag_ratios: Magnitude ratios returned from get_project_mag_ratios.
+ A dict with project URL as key and mag ratio as value
+ approved_projects:
+ preferred_projects:
+
+ Returns: combined_stats w/ mag ratios added to us, list of projects
+ which are being crunched but not on approved projects list
"""
unapproved_list = []
for project_url, project_stats in combined_stats.items():
@@ -1385,11 +1454,18 @@ def get_most_mag_efficient_projects(
percentdiff: int = 10,
quiet: bool = False,
) -> List[str]:
- """
- Given combinedstats, return most mag efficient project(s). This is the #1 most efficient project and any other projects which are within percentdiff of that number.
- :param combinedstats: combinedstats dict
- :param percentdiff: Maximum percent diff
- :return: List of project URLs
+ """Determines most magnitude efficient project(s).
+
+ Given combinedstats, determines most mag efficient project(s). This is the #1
+ most efficient project and any other projects which are within percentdiff of
+ that number.
+
+ Args:
+ combinedstats: combinedstats dict
+ percentdiff: Maximum percent diff
+
+ Returns:
+ List of project URLs
"""
def is_eligible(project_url: str, project_stats: dict):
@@ -1479,6 +1555,29 @@ def is_eligible(project_url: str, project_stats: dict):
def sidestake_check(
check_sidestake_results: bool, check_type: str, address: str
) -> None:
+ """Enable sidestaking if approved by user.
+
+ If sidestaking has not been enabled for the specified check_type, then prompt the user
+ for enabling sidestaking, and enable in the Gridcoin wallet for the specified address
+ and the entered percentage.
+
+ Args:
+ check_sidestake_results:
+ True - sidestaking is currently enabled.
+ False - sidestaking currently not enabled.
+ check_type:
+ 'FOUNDATION' - sidestaking to the Gridcoin foundation.
+ 'DEVELOPER' - sidestaking to the FTM developer.
+ address: Gridcoin address of the check_type.
+
+ Raises:
+ An exception occurred while parsing the user's entered answer.
+
+ TODO:
+ Issue #27 logic error
+ - Return statement in answer == N block only covers 1 if statement.
+ - User will still be asked % if answer no to foundation check.
+ """
if check_type == "FOUNDATION":
message1 = 'It appears that you have not enabled sidestaking to the Gridcoin foundation in your wallet. We believe it is only fair that people benefiting from the Gridcoin network contribute back to it\nSidestaking enables you to contribute a small % of your staking profits (you can choose the %)\nWould you like to enable sidestaking?. \nPlease answer "Y" or "N" (without quotes)'
message2 = "What percent would you like to donate to the Gridcoin foundation? Donations go towards software development, promotion, and growth of the coin. Enter a number like 5 for 5%. Please enter whole numbers only"
@@ -1521,10 +1620,19 @@ def sidestake_check(
def get_project_mag_ratios(
grc_client: GridcoinClientConnection, lookback_period: int = 30
) -> Dict[str, float]:
- """
- :param grc_client:
- :param lookback_period: number of superblocks to look back to determine average
- :return: Dictionary w/ key as project URL and value as project mag ratio (mag per unit of RAC)
+ """Retrieve magnitude to RAC ratios for each project from Gridcoin client.
+
+ Calculate the ratio of magnitude to RAC for each project the Gridcoin client
+ is aware of. Look back the number of specified superblocks for calculating the
+ average.
+
+ Args:
+ grc_client: Connection to Gridcoin client.
+ lookback_period: Number of superblocks to look back to determine average.
+
+ Returns:
+ A dictionary with the key as project URL and value as project magnitude ratio
+ (mag per unit of RAC).
"""
projects = {}
return_dict = {}
@@ -1553,6 +1661,17 @@ def get_project_mag_ratios(
def project_url_to_name(url: str, project_names: dict = None):
+ """Attempt to convert specified project URL to the project name.
+
+ Args:
+ url: URL of desired project.
+ project_names: Dictionary of project names with the key as the project URL.
+
+ Returns:
+ The project name associated with the specified URL, or the converted
+ specified URL if the project is not found.
+
+ """
if not project_names:
project_names = BOINC_PROJECT_NAMES
search = (
@@ -1572,10 +1691,39 @@ def print_table(
status: str = DATABASE["TABLE_STATUS"],
dev_status: bool = False,
):
+ """Outputs to console a text based table with current status and statistics.
+
+ This is the main display of the program. It is refreshed automatically at set
+ intervals. Statistics are displayed for each project as well as general information
+ regarding the performance of FTM.
+
+ Args:
+ table_dict: Dictionary of project statistics.
+ sortby: The table column attribute to sort the table rows by.
+ sleep_reason: Reason to sleep.
+ status: Most recent BOINC client status.
+ dev_status: Whether or not crunching is being done for the FTM developer.
+
+ """
def left_align(yourstring: str, total_len: int, min_pad: int = 0) -> str:
- """
- Return left-aligned string with a total len of X and min_padding (extra space on right side) of min_pad, cutting off string if needed
- If min_pad==1, it looks like this ' yourstring '
+ """Left-aligns specified string using given length and padding.
+
+ Constructs a string of length total_len with yourstring left-aligned and
+ padded with spaces on the right. Padding includes at least min_pad spaces,
+ cutting off yourstring if required.
+
+ Example: ("examplestring", 15, 1) will create a string that looks like
+ this: 'examplestring '.
+
+ Returns:
+ Left-aligned string of total_len with min_pad padding of spaces on the
+ right of the text.
+
+ TODO:
+ Confirm that returned string should be shorter than total_len based on
+ the value of min_pad, or shoudl the length always be total_len.
+ Example ("yourstring",15,1) returns 'yourstring ' where the length
+ is actually 14 instead 15.
"""
if len(yourstring) >= total_len - min_pad:
yourstring = yourstring[0 : total_len - (min_pad)]
@@ -1584,9 +1732,24 @@ def left_align(yourstring: str, total_len: int, min_pad: int = 0) -> str:
return yourstring + right_pad
def center_align(yourstring: str, total_len: int, min_pad: int = 0) -> str:
- """
- Return center-aligned string with a total len of X and min_padding (extra space on right & left side) of min_pad, cutting off string if needed
- If min_pad==1, it looks like this ' yourstring '
+ """Center-aligns specified string using given length and padding.
+
+ Constructs a string of length total_len with yourstring center-aligned and
+ padded with spaces on the left and right. Padding includes at least min_pad
+ spaces, cutting off yourstring if required.
+
+ Example: ("examplestring", 15, 1) will create a string that looks like
+ this: ' examplestring '.
+
+ Returns:
+ Center-aligned string of total_len with min_pad padding of spaces on the
+ left and right of the text.
+
+ TODO:
+ Confirm that returned string should be shorter than total_len based on
+ the value of min_pad, or shoudl the length always be total_len.
+ Example ("yourstring",15,1) returns ' yourstring ' where the length
+ is actually 14 instead 15.
"""
if len(yourstring) >= total_len - min_pad:
yourstring = yourstring[0 : total_len - (min_pad)]
From c51749feebc95a7fe73b391a07d5ff1203ec5c5f Mon Sep 17 00:00:00 2001
From: HeyMerlin <26638413+HeyMerlin@users.noreply.github.com>
Date: Fri, 13 Oct 2023 16:51:40 -0700
Subject: [PATCH 09/23] Changed location of Raises clause in docstrings to be
compliant with Google Python Style Guide.
---
main.py | 75 ++++++++++++++++++++++++++++-----------------------------
1 file changed, 37 insertions(+), 38 deletions(-)
diff --git a/main.py b/main.py
index fe5d3a3..2252bb5 100644
--- a/main.py
+++ b/main.py
@@ -736,15 +736,14 @@ async def is_boinc_crunching(rpc_client: libs.pyboinc.rpc_client) -> bool:
Queries BOINC client as to crunching status. Returns True is BOINC client
is crunching, false otherwise.
- Raises:
- Exception: An error occured attempting to check the BOINC client crunching status.
-
Args:
rpc_client:
Returns:
True if crunching, or False if not crunching or unsure.
-
+
+ Raises:
+ Exception: An error occured attempting to check the BOINC client crunching status.
"""
try:
reply = await run_rpc_command(rpc_client, "get_cc_status")
@@ -803,12 +802,12 @@ def temp_check() -> bool:
"""Checks if temperature is within acceptable limit.
Confirms if we should keep crunching based on temperature, or not.
-
- Raises:
- Exception: An error occured attempting to read the temperature.
Returns:
True if we should keep crunching, False otherwise.
+
+ Raises:
+ Exception: An error occured attempting to read the temperature.
"""
if not ENABLE_TEMP_CONTROL:
return True
@@ -866,14 +865,14 @@ def update_fetch(
update_text: Used for testing purposes. Default: None
current_ver: Added for testing purposes. Default: None
- Raises:
- Exception: An error occured when attempting to parse the retrieved update file.
-
Returns:
A tuple consisting of:
A bool, set to True if and update is available.
A bool, set to True if the update is a security update.
A string containing update related information.
+
+ Raises:
+ Exception: An error occured when attempting to parse the retrieved update file.
"""
update_return = False
return_string = ""
@@ -982,11 +981,11 @@ def get_grc_price(sample_text: str = None) -> Union[float, None]:
sample_text: Used for testing.
Typicaly a "view source" of all pages added together.
- Raises:
- Exception: An error occurred accessing an online GRC price source.
-
Returns:
Average GCR price in decimal, or None if unable to determine price.
+
+ Raises:
+ Exception: An error occurred accessing an online GRC price source.
"""
import requests as req
@@ -1036,12 +1035,12 @@ def get_approved_project_urls_web(query_result: str = None) -> Dict[str, str]:
Args:
query_result: Used for testing.
+ Returns:
+ A dictionary mapping base URLs to project names.
+
Raises:
Exception: An error occurred fetching stats data from the website.
Exception: An error occurred parsing data from the source website.
-
- Returns:
- A dictionary mapping base URLs to project names.
"""
# Check if cache is available
if "GSPROJECTLIST" in DATABASE and "GSRESOLVERDICT" in DATABASE:
@@ -1131,12 +1130,12 @@ def xfers_happening(xfer_list: list) -> bool:
Args:
xfer_list: List of transfers.
- Raises:
- Exception: An error occurred parsing entry in transfer list.
-
Returns:
True if any active xfers are happening, False if none are happening, or
if only stalled xfers exist, or if unable to determine.
+
+ Raises:
+ Exception: An error occurred parsing entry in transfer list.
"""
# Known statuses:
# 0 = Active
@@ -1214,12 +1213,12 @@ def get_gridcoin_config_parameters(gridcoin_dir: str) -> Dict[str, str]:
Args:
gridcoin_dir: Absolute path to a gridcoin config directory.
-
- Raises:
- Exception: An error occurred while parsing the config file.
Returns:
A dictionary of all config parameters found,
+
+ Raises:
+ Exception: An error occurred while parsing the config file.
"""
return_dict = dict()
dupes = {}
@@ -1376,12 +1375,12 @@ def stat_file_to_list(
stat_file_abs_path: BOINC client statistics log file with absolute path
content: Added for testing purposes.
+ Returns:
+ List dictionaries, each a BOINC task with statistics.
+
Raises:
Exception: An error occurred when attempting to read a BOINC job log file.
Exception: An error occurred when attempting to parse a BOINC job log file.
-
- Returns:
- List dictionaries, each a BOINC task with statistics.
"""
stats_list = []
try:
@@ -1460,11 +1459,11 @@ async def run_rpc_command(
arg2: Optional parameter for BOINC command.
arg2_val: Value for optional parameter.
- Raises:
- Exception: An error occurred attempting to communicated with the BOINC client.
-
Returns:
Response from BOINC client, or None if unsuccessful.
+
+ Raises:
+ Exception: An error occurred attempting to communicated with the BOINC client.
"""
max_retries = 3
retry_wait = 5
@@ -1517,12 +1516,12 @@ def credit_history_file_to_list(credithistoryfileabspath: str) -> List[Dict[str,
Args:
credithistoryfileabspath: Filename with absolute path.
- Raises:
- Exception: An error occurred attempting to read and parse the credit history file.
-
Returns:
List of dicionaries with the following keys:
TIME,USERTOTALCREDIT,USERRAC,HOSTTOTALCREDIT,HOSTRAC
+
+ Raises:
+ Exception: An error occurred attempting to read and parse the credit history file.
"""
statslist = []
try:
@@ -1777,13 +1776,13 @@ def config_files_to_stats(
Args:
config_dir_abs_path: Absolute path to BOINC data directory.
- Raises:
- Exception: An error occurred retrieving list of statistics files.
- Exception: An error occurred parsing credit history files.
-
Returns:
Dictionary of statistics in format COMBINED_STATS_EXAMPLE in main.py, or
an empty dictionary if unable to retrieve a list of statistics files.
+
+ Raises:
+ Exception: An error occurred retrieving list of statistics files.
+ Exception: An error occurred parsing credit history files.
"""
stats_files: List[str] = []
credit_history_files: List[str] = []
@@ -2123,14 +2122,14 @@ def get_project_mag_ratios(
response: Used for testing purposes.
grc_projects: Set to None, unless for testing purposes. When testing
This is the output of the 'listprojects' command run on the Gridcoin client.
-
- Raises:
- Exception: An error occurred attempting to communicate with the Gridcoin client.
Returns:
A dictionary with the key as project URL and value as project magnitude ratio
(mag per unit of RAC).
A value of None is returned in the event of an exception and no cached data.
+
+ Raises:
+ Exception: An error occurred attempting to communicate with the Gridcoin client.
"""
global PROJECT_MAG_RATIOS_CACHE
projects = {}
From c066c7ce48dbdb1851e8acf8d5519d0691a5880f Mon Sep 17 00:00:00 2001
From: makeasnek
Date: Fri, 13 Oct 2023 17:55:22 -0700
Subject: [PATCH 10/23] Test fix for errors fetching Gridcoinstats
---
main.py | 13 ++++++++++---
1 file changed, 10 insertions(+), 3 deletions(-)
diff --git a/main.py b/main.py
index c40cb10..fa600b8 100644
--- a/main.py
+++ b/main.py
@@ -2962,11 +2962,21 @@ def get_project_mag_ratios_from_url(
return None
try:
loaded_json = json.loads(resp.text)
+ if not loaded_json:
+ raise Exception
+ if len(loaded_json)==0:
+ raise Exception
response = get_project_mag_ratios_from_response(
loaded_json, lookback_period, project_resolver_dict
)
except Exception as e:
log.error("E in get_project_mag_ratios_from_url:{}".format(e))
+ if len(PROJECT_MAG_RATIOS_CACHE) > 0:
+ print_and_log(
+ "Error communicating with gridcoinstats for magnitude info, using cached data",
+ "ERROR",
+ )
+ return PROJECT_MAG_RATIOS_CACHE
return None
else:
return response
@@ -4388,9 +4398,6 @@ def create_default_database() -> Dict[str, Any]:
# Get project list from Gridcoin wallet and/or gridcoinstats, check sidestakes
foundation_address = "bc3NA8e8E3EoTL1qhRmeprbjWcmuoZ26A2"
developer_address = "RzUgcntbFm8PeSJpauk6a44qbtu92dpw3K"
- MAG_RATIOS = (
- {}
- ) # added to prevent pycharm "may be undefined". Can't be though because the app quits if it can't be found
try:
grc_client = GridcoinClientConnection(
rpc_user=rpc_user, rpc_port=rpc_port, rpc_password=gridcoin_rpc_password
From 9557b3bff27870c4937e940034b201c80d31e541 Mon Sep 17 00:00:00 2001
From: makeasnek
Date: Wed, 25 Oct 2023 17:45:02 -0700
Subject: [PATCH 11/23] Add option to dump database
---
config.py | 1 +
main.py | 3 +++
2 files changed, 4 insertions(+)
diff --git a/config.py b/config.py
index da18f25..0c02ad2 100644
--- a/config.py
+++ b/config.py
@@ -73,6 +73,7 @@ def TEMP_FUNCTION():
DUMP_PROJECT_WEIGHTS: bool = False # Dump weights assigned to projects
DUMP_PROJECT_PRIORITY: bool = False # Dump weights adjusted after considering current and past crunching time
DUMP_RAC_MAG_RATIOS: bool = False # Dump the RAC:MAG ratios from each Gridcoin project
+DUMP_DATABASE:bool = False # Dump the DATABASE
# how many decimal places to round each stat to which is printed in the output table
ROUNDING_DICT = {
"MAGPERCREDIT": 5,
diff --git a/main.py b/main.py
index fa600b8..73fa6ea 100644
--- a/main.py
+++ b/main.py
@@ -86,6 +86,7 @@
False # Dump weights adjusted after considering current and past crunching time
)
DUMP_RAC_MAG_RATIOS: bool = False # Dump the RAC:MAG ratios from each Gridcoin project
+DUMP_DATABASE: bool = False # Dump the DATABASE
DEV_FEE_MODE: str = "CRUNCH" # valid values: CRUNCH|SIDESTAKE
CRUNCHING_FOR_DEV: bool = False
DEV_EXIT_TEST: bool = False # only used for testing
@@ -3453,6 +3454,8 @@ def boinc_loop(
CRUNCHING_FOR_DEV = False
if mode not in DATABASE:
DATABASE[mode] = {}
+ if DUMP_DATABASE:
+ save_stats(DATABASE,'DATABASE_DUMP')
# Note yoyo@home does not support weak auth so it can't be added here
# URLs must be in canonicalized database format
From 9899e8c659d2190dcd1358043148a80a110cd923 Mon Sep 17 00:00:00 2001
From: makeasnek
Date: Wed, 25 Oct 2023 17:48:23 -0700
Subject: [PATCH 12/23] Disable auto-clear at start of app
---
main.py | 8 ++++++--
1 file changed, 6 insertions(+), 2 deletions(-)
diff --git a/main.py b/main.py
index 73fa6ea..7a5263d 100644
--- a/main.py
+++ b/main.py
@@ -1929,6 +1929,7 @@ def print_table(
sleep_reason: str = DATABASE["TABLE_SLEEP_REASON"],
status: str = DATABASE["TABLE_STATUS"],
dev_status: bool = False,
+ clear: bool = False,
):
if len(table_dict) == 0:
print(
@@ -3352,6 +3353,7 @@ def update_table(
status: str = None,
dev_status: bool = False,
dev_loop: bool = False,
+ clear:bool = True,
):
"""
Function to update table printed to user.
@@ -3379,7 +3381,8 @@ def update_table(
}
ignore_list = ["MAGPERCREDIT"]
# generate table to print pretty
- os.system("cls" if os.name == "nt" else "clear") # clear terminal
+ if clear:
+ os.system("cls" if os.name == "nt" else "clear") # clear terminal
table_dict = {}
for project_url, stats_dict in COMBINED_STATS.items():
table_dict[project_url] = {}
@@ -3409,6 +3412,7 @@ def update_table(
sleep_reason=sleep_reason,
status=status,
dev_status=dev_status,
+ clear=clear,
)
@@ -4508,7 +4512,7 @@ def create_default_database() -> Dict[str, Any]:
if len(table_dict) > 0:
print("SOME PRETTY STATS JUST FOR YOU, SORTED BY AVG GRC/DAY")
priority_results = {}
- update_table()
+ update_table(clear=False)
del priority_results # this is only created temporarily as update_table expects it
else:
print(
From c7e2a4930e7a9cb81e04e6dcd8d8fcf21fb97d58 Mon Sep 17 00:00:00 2001
From: makeasnek
Date: Thu, 26 Oct 2023 19:02:43 -0700
Subject: [PATCH 13/23] Simplify save_stats function, hopefully fix issue w
updates not being saved.
---
main.py | 23 +++++++++++++----------
1 file changed, 13 insertions(+), 10 deletions(-)
diff --git a/main.py b/main.py
index 6aef8cf..77d6ab3 100644
--- a/main.py
+++ b/main.py
@@ -3500,19 +3500,22 @@ def actual_save_stats(database: Any, path: str = None) -> None:
def save_stats(database: Any, path: str = None) -> None:
+ """
+ Caching function to save a database. If the database
+ has changed, save it, otherwise don't.
+ """
+ if not path:
+ path='DATABASE'
try:
- if not path:
- if "DATABASE" in SAVE_STATS_DB:
- if database != SAVE_STATS_DB["DATABASE"]:
- actual_save_stats(database, path)
- else:
+ if path in SAVE_STATS_DB:
+ if SAVE_STATS_DB[path] != database:
+ log.debug('Saving DB {}'.format(path))
actual_save_stats(database, path)
- else:
- if path in SAVE_STATS_DB:
- if SAVE_STATS_DB[path] != database:
- actual_save_stats(database, path)
else:
- actual_save_stats(database, path)
+ log.debug('Skipping save of DB {}'.format(path))
+ else:
+ log.debug('Saving DB bc not in SAVE_STATS_DB {}'.format(path))
+ actual_save_stats(database, path)
except Exception as e:
log.error("Error saving db {}{}".format(path, e))
From 001ca683422bbbb2d11691a8c64a6abcac233396 Mon Sep 17 00:00:00 2001
From: makeasnek
Date: Thu, 26 Oct 2023 19:05:50 -0700
Subject: [PATCH 14/23] Add debug note if stats file not found
---
main.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/main.py b/main.py
index 77d6ab3..72c1aec 100644
--- a/main.py
+++ b/main.py
@@ -4569,6 +4569,7 @@ def create_default_database() -> Dict[str, Any]:
DATABASE = create_default_database()
save_stats(DATABASE)
else:
+ log.warning('No stats file found, making new one...')
DATABASE = create_default_database()
save_stats(DATABASE)
From 5a73d5a93f2e09165cf6f62af3958a4d9e546ef8 Mon Sep 17 00:00:00 2001
From: makeasnek
Date: Sat, 28 Oct 2023 18:05:31 -0700
Subject: [PATCH 15/23] Small fix to bug where display included extra curly
braces
---
main.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/main.py b/main.py
index 72c1aec..c415a90 100644
--- a/main.py
+++ b/main.py
@@ -4318,7 +4318,7 @@ def boinc_loop(
DATABASE[
"TABLE_STATUS"
] = "Skipping {} due to backoff period...".format(
- {highest_priority_project}
+ highest_priority_project
)
update_table(dev_loop=dev_loop)
log.debug(
From 4a69e9669e6856a07205d504ce219152455d5dd5 Mon Sep 17 00:00:00 2001
From: makeasnek
Date: Sun, 29 Oct 2023 15:07:04 -0700
Subject: [PATCH 16/23] Fixes issue #40 so python 3.12 wont throw syntax errors
---
main.py | 14 +++++++-------
1 file changed, 7 insertions(+), 7 deletions(-)
diff --git a/main.py b/main.py
index c415a90..e110210 100644
--- a/main.py
+++ b/main.py
@@ -148,16 +148,16 @@
PRICE_URL_DICT: Dict[str, Tuple[str, Union[str, re.Pattern]]] = {
"https://coinmarketcap.com/currencies/gridcoin/": (
"coinmarketcap.com",
- '("low24h":)(\d*.\d*)',
+ r'("low24h":)(\d*.\d*)',
),
"https://finance.yahoo.com/quote/GRC-USD/": (
"yahoo.com",
- '(data-field="regularMarketPrice" data-trend="none" data-pricehint="\d" value=")(\d*\.\d*)',
+ r'(data-field="regularMarketPrice" data-trend="none" data-pricehint="\d" value=")(\d*\.\d*)',
),
"https://www.coingecko.com/en/coins/gridcoin-research": (
"coingecko",
re.compile(
- '(data-coin-id="243" data-coin-symbol="grc" data-target="price.price">\$)(\d*\.\d*)()',
+ r'(data-coin-id="243" data-coin-symbol="grc" data-target="price.price">\$)(\d*\.\d*)()',
flags=re.MULTILINE | re.IGNORECASE,
),
),
@@ -274,7 +274,7 @@ def resolve_url_database(url: str) -> str:
elif FOUND_PLATFORM == "Darwin":
BOINC_DATA_DIR = os.path.join("/Library/Application Support/BOINC Data/")
else:
- BOINC_DATA_DIR = "C:\ProgramData\BOINC\\"
+ BOINC_DATA_DIR = "C:\\ProgramData\\BOINC\\"
if not GRIDCOIN_DATA_DIR:
if FOUND_PLATFORM == "Linux":
GRIDCOIN_DATA_DIR = os.path.join(Path.home(), ".GridcoinResearch/")
@@ -284,7 +284,7 @@ def resolve_url_database(url: str) -> str:
)
else:
GRIDCOIN_DATA_DIR = os.path.join(
- Path.home(), "AppData\Roaming\GridcoinResearch\\"
+ Path.home(), "AppData\\Roaming\\GridcoinResearch\\"
)
@@ -1193,7 +1193,7 @@ def wait_till_no_xfers(rpc_client: libs.pyboinc.rpc_client) -> None:
sleep(loop_wait_in_seconds)
continue
if isinstance(allow_response, str):
- cleaned_response = re.sub("\s*", "", allow_response)
+ cleaned_response = re.sub(r"\s*", "", allow_response)
if cleaned_response == "": # There are no transfers, yay!
return
if xfers_happening(allow_response):
@@ -2305,7 +2305,7 @@ def print_table(
"No projects have any assigned credit yet, a pretty table will appear here once you have earned some credit."
)
# fmt: off
- print("""
+ print(r"""
WNNXXXKKXW
WNK0OkkxxkkkkdoK
WX0kxdolx0XNNWWWNkoK
From e0668244a8b9523d81da969d12086d90ed89e1a3 Mon Sep 17 00:00:00 2001
From: makeasnek
Date: Sun, 29 Oct 2023 15:10:34 -0700
Subject: [PATCH 17/23] Fixes issue #41
---
main.py | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/main.py b/main.py
index e110210..52e0153 100644
--- a/main.py
+++ b/main.py
@@ -4536,6 +4536,10 @@ def create_default_database() -> Dict[str, Any]:
del python_minor
del python_major
log.debug("Python version {}".format(platform.python_version()))
+ # These must be declared early in case the user ctrl+Cs the script.
+ # This way, safe_exit can use these paths
+ override_path = os.path.join(BOINC_DATA_DIR, "global_prefs_override.xml")
+ override_dest_path = os.path.join(os.getcwd(), "global_prefs_override_backup.xml")
# Shut down dev client is it's running. This is useful if program shuts
# down unexpectedly
@@ -4626,8 +4630,6 @@ def create_default_database() -> Dict[str, Any]:
)
input("Press enter to continue or CTRL+C to quit")
wallet_running = False
- override_path = os.path.join(BOINC_DATA_DIR, "global_prefs_override.xml")
- override_dest_path = os.path.join(os.getcwd(), "global_prefs_override_backup.xml")
try:
os.access(override_path, os.W_OK)
From 07e1639e2cfe9f0f3a40a9c1e0c654c95d4d3835 Mon Sep 17 00:00:00 2001
From: makeasnek
Date: Sun, 29 Oct 2023 15:15:47 -0700
Subject: [PATCH 18/23] Hopefully fixes stats DB issue
---
main.py | 13 ++++++++++---
1 file changed, 10 insertions(+), 3 deletions(-)
diff --git a/main.py b/main.py
index 52e0153..27e6ca3 100644
--- a/main.py
+++ b/main.py
@@ -3486,13 +3486,20 @@ def benchmark_check(
def actual_save_stats(database: Any, path: str = None) -> None:
+ """
+ Save a JSON database file. Normally saves to given path.txt unless the path is "stats"
+ in which case it saves to stats.json
+ """
+ if path:
+ if path=='stats':
+ path='stats.json'
try:
if not path:
- with open("stats.json", "w") as fp:
+ with open(path+'.txt', "w") as fp:
json.dump(database, fp, default=json_default)
SAVE_STATS_DB["DATABASE"] = DATABASE
else:
- with open(path + ".txt", "w") as fp:
+ with open(path, "w") as fp:
json.dump(database, fp, default=json_default)
SAVE_STATS_DB[path] = database
finally:
@@ -3505,7 +3512,7 @@ def save_stats(database: Any, path: str = None) -> None:
has changed, save it, otherwise don't.
"""
if not path:
- path='DATABASE'
+ path='stats'
try:
if path in SAVE_STATS_DB:
if SAVE_STATS_DB[path] != database:
From 28ce3d8ec755f87ed6a726339b4eeb03a03ef1f6 Mon Sep 17 00:00:00 2001
From: makeasnek
Date: Sun, 29 Oct 2023 15:18:29 -0700
Subject: [PATCH 19/23] Hopefully fixes stats DB issue
---
main.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/main.py b/main.py
index 27e6ca3..68676c0 100644
--- a/main.py
+++ b/main.py
@@ -3525,6 +3525,7 @@ def save_stats(database: Any, path: str = None) -> None:
actual_save_stats(database, path)
except Exception as e:
log.error("Error saving db {}{}".format(path, e))
+ SAVE_STATS_DB[path] = copy.deepcopy(database)
def custom_sleep(sleep_time: float, boinc_rpc_client, dev_loop: bool = False):
From 9f7ed95f7b208c5aa1aa9438b6ff8e67205d556e Mon Sep 17 00:00:00 2001
From: makeasnek
Date: Sun, 29 Oct 2023 15:23:08 -0700
Subject: [PATCH 20/23] formatting
---
main.py | 254 +++++++++++++++++++++++++++-----------------------------
1 file changed, 123 insertions(+), 131 deletions(-)
diff --git a/main.py b/main.py
index 68676c0..bddfd1b 100644
--- a/main.py
+++ b/main.py
@@ -89,7 +89,7 @@
False # Dump weights adjusted after considering current and past crunching time
)
DUMP_RAC_MAG_RATIOS: bool = False # Dump the RAC:MAG ratios from each Gridcoin project
-DUMP_DATABASE: bool = False # Dump the DATABASE
+DUMP_DATABASE: bool = False # Dump the DATABASE
DEV_FEE_MODE: str = "CRUNCH" # valid values: CRUNCH|SIDESTAKE
CRUNCHING_FOR_DEV: bool = False
DEV_EXIT_TEST: bool = False # Only used for testing
@@ -130,7 +130,7 @@
MAG_RATIO_SOURCE: Union[str, None] = None # Valid values: WALLET|WEB
CHECK_SIDESTAKE_RESULTS = False
loop = asyncio.get_event_loop()
-# Translates BOINC's CPU and GPU Mode replies into English. Note difference between
+# Translates BOINC's CPU and GPU Mode replies into English. Note difference between
# keys integer vs string.
CPU_MODE_DICT = {1: "always", 2: "auto", 3: "never"}
GPU_MODE_DICT = {"1": "always", "2": "auto", "3": "never"}
@@ -143,7 +143,7 @@
SAVE_STATS_DB = (
{}
) # Keeps cache of saved stats databases so we don't write more often than we need too
-# Dictionary for places we query in format key=url, value=Tuple[nickname,regex].
+# Dictionary for places we query in format key=url, value=Tuple[nickname,regex].
# Note they all must match group 2
PRICE_URL_DICT: Dict[str, Tuple[str, Union[str, re.Pattern]]] = {
"https://coinmarketcap.com/currencies/gridcoin/": (
@@ -291,7 +291,7 @@ def resolve_url_database(url: str) -> str:
class GridcoinClientConnection:
"""Allows connecting to a Gridcoin wallet and issuing RPC commands.
- A class for connecting to a Gridcoin wallet and issuing RPC commands. Currently
+ A class for connecting to a Gridcoin wallet and issuing RPC commands. Currently
quite barebones.
Attributes:
@@ -345,8 +345,8 @@ def run_command(
command:
arguments:
- Returns:
- Response from command exectution as a dictionary of json, or None if
+ Returns:
+ Response from command exectution as a dictionary of json, or None if
an error was encounted while connecting to the Gridcoin wallet instance.
"""
if not arguments:
@@ -384,7 +384,7 @@ def get_approved_project_urls(self) -> List[str]:
Retrieves the list of projects from the local Gridcoin wallet that are
approved for earning Gridcoin.
- Returns:
+ Returns:
A list of UPPERCASED project URLs using gridcoin command listprojects
"""
return_list = []
@@ -399,7 +399,7 @@ class BoincClientConnection:
A simple class for grepping BOINC config files etc. Doesn't do any RPC communication
- Note: Usage of it should be wrapped in try/except clauses as it does not
+ Note: Usage of it should be wrapped in try/except clauses as it does not
do any error handling internally.
Attributes:
@@ -419,13 +419,13 @@ def __init__(self, config_dir: str = None):
def get_project_list(self) -> List[str]:
"""Retrieve the list of projects supported by the BOINC client
-
+
Constructs a list of all projects known by the BOINC client. This may include
more projects than those currently attached to the BOINC client. This may also
- not include some projects currently attached, if they are projects not included
+ not include some projects currently attached, if they are projects not included
with BOINC by default.
- Returns: List of project URLs.
+ Returns: List of project URLs.
"""
project_list_file = os.path.join(self.config_dir, "all_projects_list.xml")
return_list = []
@@ -562,17 +562,15 @@ def shutdown_dev_client(quiet: bool = False) -> None:
Sends RPC quit command to running dev BOINC client.
- Args:
+ Args:
quiet:
- Raises:
+ Raises:
Exception: An error occured shutting down the dev BOINC client.
"""
- # This is needed in case this function is called while main loop is still
+ # This is needed in case this function is called while main loop is still
# waiting for an RPC command etc
- new_loop = (
- asyncio.get_event_loop()
- )
+ new_loop = asyncio.get_event_loop()
log.info("Attempting to shut down dev client at safe_exit...")
try:
dev_rpc_client = new_loop.run_until_complete(
@@ -591,11 +589,11 @@ def shutdown_dev_client(quiet: bool = False) -> None:
def safe_exit(arg1, arg2) -> None:
"""Safely exit Find The Mag.
- Safely exit tool by saving database, restoring original user preferences,
+ Safely exit tool by saving database, restoring original user preferences,
and quitting dev BOINC client.
-
- Args: arg1 and arg2:
- Required by the signal handler library,
+
+ Args: arg1 and arg2:
+ Required by the signal handler library,
but aren't used for anything inside this function
"""
print_and_log(
@@ -704,8 +702,8 @@ async def get_task_list(rpc_client: libs.pyboinc.rpc_client) -> list:
Return list of tasks from BOINC client which are not completed/failed. These
can be active tasks, tasks waiting to be started, or paused tasks.
-
- Args:
+
+ Args:
rpc_client:
Returns:
@@ -739,7 +737,7 @@ async def is_boinc_crunching(rpc_client: libs.pyboinc.rpc_client) -> bool:
Args:
rpc_client:
-
+
Returns:
True if crunching, or False if not crunching or unsure.
@@ -750,7 +748,7 @@ async def is_boinc_crunching(rpc_client: libs.pyboinc.rpc_client) -> bool:
reply = await run_rpc_command(rpc_client, "get_cc_status")
task_suspend_reason = int(reply["task_suspend_reason"])
if task_suspend_reason != 0:
- # These are documented at
+ # These are documented at
# https://github.com/BOINC/boinc/blob/73a7754e7fd1ae3b7bf337e8dd42a7a0b42cf3d2/android/BOINC/app/src/main/java/edu/berkeley/boinc/utils/BOINCDefs.kt
log.debug(
"Determined BOINC client is not crunching task_suspend_reason: {}".format(
@@ -783,14 +781,14 @@ async def setup_connection(
"""Create BOINC RPC client connection.
Sets up a BOINC RPC client connection
-
+
Args:
boinc_ip:
boinc_password:
port:
Returns:
-
+
"""
rpc_client = None
if not boinc_ip:
@@ -806,7 +804,7 @@ def temp_check() -> bool:
Returns:
True if we should keep crunching, False otherwise.
-
+
Raises:
Exception: An error occured attempting to read the temperature.
"""
@@ -1030,7 +1028,7 @@ def get_grc_price(sample_text: str = None) -> Union[float, None]:
def get_approved_project_urls_web(query_result: str = None) -> Dict[str, str]:
"""List of projects currently witelised by Gridcoin.
- Gets current whitelist from the Gridcoinstats website. Limits fetching
+ Gets current whitelist from the Gridcoinstats website. Limits fetching
from website to once every 24 hours through caching list in database.
Args:
@@ -1209,16 +1207,16 @@ def get_gridcoin_config_parameters(gridcoin_dir: str) -> Dict[str, str]:
Parses Gridcoin configuration .json and .conf file for configuration parameters.
Preference is given to those in the json file over those in the to the conf file.
-
+
Note that sidestakes become a list as there may be multiple.
Args:
gridcoin_dir: Absolute path to a gridcoin config directory.
Returns:
- A dictionary of all config parameters found,
-
- Raises:
+ A dictionary of all config parameters found,
+
+ Raises:
Exception: An error occurred while parsing the config file.
"""
return_dict = dict()
@@ -1297,7 +1295,7 @@ def check_sidestake(
config_params: config_params from get_config_parameters
address: address to check
minval: minimum value to pass check
-
+
Returns:
True if given address is sidestaked for more than the given minium.
"""
@@ -1320,7 +1318,7 @@ def project_url_from_stats_file(statsfilename: str) -> str:
"""Guess a projec url using stats file name.
Guess a project URL from the name of a stats file.
-
+
Args:
statsfilename:
@@ -1343,7 +1341,7 @@ def project_url_from_credit_history_file(filename: str) -> str:
filename:
Returns:
- URL for project associated with stats file, or credit history
+ URL for project associated with stats file, or credit history
file name if URL unknown.
"""
filename = filename.replace("statistics_", "")
@@ -1357,11 +1355,11 @@ def stat_file_to_list(
) -> List[Dict[str, str]]:
"""Retrieve a list of tasks and related stats from BOINC client log file.
- Turns a BOINC job log into list of dictionaries we can use, each dictionary
- is a task.
+ Turns a BOINC job log into list of dictionaries we can use, each dictionary
+ is a task.
Dictionaries have the following keys:
STARTTIME,ESTTIME,CPUTIME,ESTIMATEDFLOPS,TASKNAME,WALLTIME,EXITCODE
-
+
Note that ESTIMATEDFLOPS comes from the project and EXITCODE will always be zero.
All values and keys in dicts are strings.
@@ -1377,7 +1375,7 @@ def stat_file_to_list(
content: Added for testing purposes.
Returns:
- List dictionaries, each a BOINC task with statistics.
+ List dictionaries, each a BOINC task with statistics.
Raises:
Exception: An error occurred when attempting to read a BOINC job log file.
@@ -1451,8 +1449,8 @@ async def run_rpc_command(
Attempts to communicate with the BOINC client multiple times based on internal
parameters.
-
- Args:
+
+ Args:
rpc_client: Connection to BOINC client instance.
command: Command to be executed by the BOINC client.
arg1: Optional parameter for BOINC command.
@@ -1460,7 +1458,7 @@ async def run_rpc_command(
arg2: Optional parameter for BOINC command.
arg2_val: Value for optional parameter.
- Returns:
+ Returns:
Response from BOINC client, or None if unsuccessful.
Raises:
@@ -1507,13 +1505,13 @@ async def run_rpc_command(
def credit_history_file_to_list(credithistoryfileabspath: str) -> List[Dict[str, str]]:
"""Retrieve BOINC credit history
- Turns a BOINC credit history file into list of dictionaries we can use.
+ Turns a BOINC credit history file into list of dictionaries we can use.
Dictionaries have keys below:
TIME,USERTOTALCREDIT,USERRAC,HOSTTOTALCREDIT,HOSTRAC
-
+
Note that ESTIMATEDFLOPS comes from the project and EXITCODE will always be zero.
-
+
Args:
credithistoryfileabspath: Filename with absolute path.
@@ -1777,7 +1775,7 @@ def config_files_to_stats(
Args:
config_dir_abs_path: Absolute path to BOINC data directory.
- Returns:
+ Returns:
Dictionary of statistics in format COMBINED_STATS_EXAMPLE in main.py, or
an empty dictionary if unable to retrieve a list of statistics files.
@@ -1875,13 +1873,13 @@ def add_mag_to_combined_stats(
Args:
combined_stats: COMBINED_STATS from main.py.
- mag_ratios: Magnitude ratios returned from get_project_mag_ratios.
+ mag_ratios: Magnitude ratios returned from get_project_mag_ratios.
A dictionary with project URL as key and magnitude ratio as value
approved_projects:
preferred_projects:
Returns: A tuple consisting of:
- COMBINED_STATS with magnitude ratios added to it,
+ COMBINED_STATS with magnitude ratios added to it,
list of projects which are being crunched but not on approved projects list.
"""
unapproved_list = []
@@ -1941,14 +1939,14 @@ def get_most_mag_efficient_projects(
) -> List[str]:
"""Determines most magnitude efficient project(s).
- Given combinedstats, determines most mag efficient project(s). This is the #1
- most efficient project and any other projects which are within percentdiff of
+ Given combinedstats, determines most mag efficient project(s). This is the #1
+ most efficient project and any other projects which are within percentdiff of
that number.
-
+
Args:
combinedstats: combinedstats dict
percentdiff: Maximum percent diff
-
+
Returns:
List of project URLs, or empty list if none are found.
"""
@@ -2031,7 +2029,7 @@ def sidestake_prompt(
and the entered percentage.
Args:
- check_sidestake_results:
+ check_sidestake_results:
True - sidestaking is currently enabled.
False - sidestaking currently not enabled.
check_type:
@@ -2041,7 +2039,7 @@ def sidestake_prompt(
Raises:
Exception: An error occurred while parsing the user's entered answer.
- Exception: An error occurred attempting to access the Gridcoin wallet
+ Exception: An error occurred attempting to access the Gridcoin wallet
configuration file.
"""
# If user is sidestaking, skip rest of this function
@@ -2122,10 +2120,10 @@ def get_project_mag_ratios(
lookback_period: Number of superblocks to look back to determine average.
response: Used for testing purposes.
grc_projects: Set to None, unless for testing purposes. When testing
- This is the output of the 'listprojects' command run on the Gridcoin client.
-
+ This is the output of the 'listprojects' command run on the Gridcoin client.
+
Returns:
- A dictionary with the key as project URL and value as project magnitude ratio
+ A dictionary with the key as project URL and value as project magnitude ratio
(mag per unit of RAC).
A value of None is returned in the event of an exception and no cached data.
@@ -2137,7 +2135,9 @@ def get_project_mag_ratios(
return_dict = None
try:
if not response:
- command_result = grc_client.run_command("superblocks", [lookback_period, True])
+ command_result = grc_client.run_command(
+ "superblocks", [lookback_period, True]
+ )
response = command_result
if not response:
raise ConnectionError("Issues w superblocks command")
@@ -2178,8 +2178,8 @@ def project_url_to_name_boinc(url: str, project_names: dict = None):
project_names: Dictionary of project names with the key as the project URL,
from the BOINC client database..
- Returns:
- The human-readable project name associated with the specified URL, or
+ Returns:
+ The human-readable project name associated with the specified URL, or
the converted specified URL if the project is not found.
"""
if not project_names:
@@ -2202,8 +2202,8 @@ def project_url_to_name(url: str, project_names: Dict[str, str] = None):
project_names: Dictionary of project names with the key as the project URL,
from the BOINC client database..
- Returns:
- The human-readable project name associated with the specified URL, or
+ Returns:
+ The human-readable project name associated with the specified URL, or
the converted specified URL if the project is not found.
"""
if not project_names:
@@ -2230,14 +2230,14 @@ def left_align(yourstring: str, total_len: int, min_pad: int = 0) -> str:
this: 'examplestring '.
Returns:
- Left-aligned string of total_len with min_pad padding of spaces on the
+ Left-aligned string of total_len with min_pad padding of spaces on the
right of the text.
TODO:
- Confirm that returned string should be shorter than total_len based on
- the value of min_pad, or should the length always be total_len.
- Example ("yourstring",15,1) returns 'yourstring ' where the length
- is actually 14 instead 15.
+ Confirm that returned string should be shorter than total_len based on
+ the value of min_pad, or should the length always be total_len.
+ Example ("yourstring",15,1) returns 'yourstring ' where the length
+ is actually 14 instead 15.
"""
if len(yourstring) >= total_len - min_pad:
yourstring = yourstring[0 : total_len - (min_pad)]
@@ -2248,9 +2248,9 @@ def left_align(yourstring: str, total_len: int, min_pad: int = 0) -> str:
def center_align(yourstring: str, total_len: int, min_pad: int = 0) -> str:
"""Center-aligns specified string using given length and padding.
-
+
Constructs a string of length total_len with yourstring center-aligned and
- padded with spaces on the left and right. Padding includes at least min_pad
+ padded with spaces on the left and right. Padding includes at least min_pad
spaces, truncating yourstring if required.
If the padding can not be equal on both sides, then an additional +1 padding is
@@ -2260,14 +2260,14 @@ def center_align(yourstring: str, total_len: int, min_pad: int = 0) -> str:
this: ' examplestring '.
Returns:
- Center-aligned string of total_len with min_pad padding of spaces on the
+ Center-aligned string of total_len with min_pad padding of spaces on the
left and right of the text.
TODO:
- Confirm that returned string should be shorter than total_len based on
- the value of min_pad, or should the length always be total_len.
- Example ("yourstring",15,1) returns ' yourstring ' where the length
- is actually 14 instead 15.
+ Confirm that returned string should be shorter than total_len based on
+ the value of min_pad, or should the length always be total_len.
+ Example ("yourstring",15,1) returns ' yourstring ' where the length
+ is actually 14 instead 15.
"""
total_min_pad = min_pad * 2
room_for_string = total_len - total_min_pad
@@ -2543,11 +2543,11 @@ def generate_stats(
final_project_weights = {}
dev_project_weights = {}
# Canonicalize PREFERRED_PROJECTS list
- to_del=[]
+ to_del = []
for url in preferred_projects.keys():
weight = preferred_projects[url]
canonicalized = resolve_url_database(url)
- if canonicalized!=url:
+ if canonicalized != url:
to_del.append(url)
preferred_projects[canonicalized] = weight
for url in to_del:
@@ -2895,7 +2895,7 @@ def cache_full(project_name: str, messages) -> bool:
else:
if (
not gpu_full
- ): # If GPU is not mentioned in log, this would always
+ ): # If GPU is not mentioned in log, this would always
# happen so using this to stop erroneous messages
log.debug(
"GPU cache appears not full {}".format(message["body"])
@@ -2941,7 +2941,7 @@ def project_backoff(project_name: str, messages) -> bool:
Returns TRUE if project should be backed off. False otherwise or if unable to determine
"""
# Phrases which indicate project SHOULD be backed off
- # - removed 'project requested delay' from positive phrases because
+ # - removed 'project requested delay' from positive phrases because
# projects always provide this, even if work was provided!
positive_phrases = [
"PROJECT HAS NO TASKS AVAILABLE",
@@ -3090,7 +3090,7 @@ async def get_attached_projects(
found_projects.append(project.master_url)
if isinstance(
project.project_name, bool
- ): # This happens if project is "attached" but unable to communicate
+ ): # This happens if project is "attached" but unable to communicate
# with the project due to it being down or some other issue
project_names[project.master_url] = project.master_url
else:
@@ -3338,8 +3338,8 @@ def get_project_mag_ratios_from_url(
loaded_json = json.loads(resp.text)
if not loaded_json:
raise Exception
- if len(loaded_json)==0:
- raise Exception
+ if len(loaded_json) == 0:
+ raise Exception
response = get_project_mag_ratios_from_response(
loaded_json, lookback_period, project_resolver_dict
)
@@ -3491,11 +3491,11 @@ def actual_save_stats(database: Any, path: str = None) -> None:
in which case it saves to stats.json
"""
if path:
- if path=='stats':
- path='stats.json'
+ if path == "stats":
+ path = "stats.json"
try:
if not path:
- with open(path+'.txt', "w") as fp:
+ with open(path + ".txt", "w") as fp:
json.dump(database, fp, default=json_default)
SAVE_STATS_DB["DATABASE"] = DATABASE
else:
@@ -3512,16 +3512,16 @@ def save_stats(database: Any, path: str = None) -> None:
has changed, save it, otherwise don't.
"""
if not path:
- path='stats'
+ path = "stats"
try:
if path in SAVE_STATS_DB:
if SAVE_STATS_DB[path] != database:
- log.debug('Saving DB {}'.format(path))
+ log.debug("Saving DB {}".format(path))
actual_save_stats(database, path)
else:
- log.debug('Skipping save of DB {}'.format(path))
+ log.debug("Skipping save of DB {}".format(path))
else:
- log.debug('Saving DB bc not in SAVE_STATS_DB {}'.format(path))
+ log.debug("Saving DB bc not in SAVE_STATS_DB {}".format(path))
actual_save_stats(database, path)
except Exception as e:
log.error("Error saving db {}{}".format(path, e))
@@ -3736,13 +3736,13 @@ def update_table(
status: str = None,
dev_status: bool = False,
dev_loop: bool = False,
- clear:bool = True,
+ clear: bool = True,
):
"""
Function to update table printed to user.
:param status = Most recent status "waiting for xfers, starting crunching on x, etc"
"""
- # Don't update table in dev loop because all our variables reference
+ # Don't update table in dev loop because all our variables reference
# dev install, not main one
if dev_loop or SKIP_TABLE_UPDATES:
return
@@ -3810,13 +3810,13 @@ def boinc_loop(
:param client_rpc_client client BOINC rpc client, as it must be accessed in dev mode and kept in suspend
:param time How long to crunch for. Only used by dev mode at the moment
"""
- # If we are not passed this variable, it means we are not crunching for dev,
+ # If we are not passed this variable, it means we are not crunching for dev,
# so we fallback to global BOINC rpc
if not client_rpc_client:
client_rpc_client = rpc_client
existing_cpu_mode = None
existing_gpu_mode = None
- # These variables are referenced outside the loop
+ # These variables are referenced outside the loop
# (or in recursive calls of the loop) so should be made global
global COMBINED_STATS
global COMBINED_STATS_DEV
@@ -3845,7 +3845,7 @@ def boinc_loop(
if mode not in DATABASE:
DATABASE[mode] = {}
if DUMP_DATABASE:
- save_stats(DATABASE,'DATABASE_DUMP')
+ save_stats(DATABASE, "DATABASE_DUMP")
# Note yoyo@home does not support weak auth so it can't be added here
# URLs must be in canonicalized database format
@@ -3869,7 +3869,7 @@ def boinc_loop(
while True:
discrepancy = owed_to_dev()
- # If we have done sufficient crunching in dev mode, exit dev loop.
+ # If we have done sufficient crunching in dev mode, exit dev loop.
# Closing dev client is done after exiting loop.
if discrepancy < 1 and not FORCE_DEV_MODE and dev_loop:
return None
@@ -3927,7 +3927,7 @@ def boinc_loop(
DATABASE["STATSLASTCALCULATED"] = datetime.datetime.now()
COMBINED_STATS = config_files_to_stats(BOINC_DATA_DIR)
# Not sure what this line did but commented out, we'll see if anything breaks
- #total_time = combined_stats_to_total_time(COMBINED_STATS)
+ # total_time = combined_stats_to_total_time(COMBINED_STATS)
if dev_loop:
(
COMBINED_STATS_DEV,
@@ -4004,7 +4004,7 @@ def boinc_loop(
DATABASE["GRCPRICE"] = grc_price
else:
grc_price = DATABASE["GRCPRICE"]
- # Check profitability of all projects, if none profitable
+ # Check profitability of all projects, if none profitable
# (and user doesn't want unprofitable crunching), sleep for 1hr
if ONLY_BOINC_IF_PROFITABLE and not dev_loop:
profitability_list = []
@@ -4044,7 +4044,7 @@ def boinc_loop(
sleep(60 * 60)
continue
- # If we have enabled temperature control, verify that crunching is
+ # If we have enabled temperature control, verify that crunching is
# allowed at current temp
if ENABLE_TEMP_CONTROL:
# Get BOINC's starting CPU and GPU modes
@@ -4080,7 +4080,7 @@ def boinc_loop(
if not temp_check():
while True: # Keep sleeping until we pass a temp check
log.debug("Sleeping due to temperature")
- # Put BOINC into sleep mode, automatically reverting if
+ # Put BOINC into sleep mode, automatically reverting if
# script closes unexpectedly
sleep_interval = str(int(((60 * TEMP_SLEEP_TIME) + 60)))
loop.run_until_complete(
@@ -4118,7 +4118,7 @@ def boinc_loop(
if dev_boinc_password == "ERROR":
log.error("Error setting up crunching to developer account")
else:
- # Setup dev RPC connection, it may take a few tries while we
+ # Setup dev RPC connection, it may take a few tries while we
# wait for it to come online
tries = 1
tries_max = 5
@@ -4146,11 +4146,11 @@ def boinc_loop(
if tries > tries_max:
log.error("Giving up on connecting to BOINC dev client")
if dev_rpc_client:
- # Set main BOINC to suspend until we're done crunching in dev mode.
- # It will automatically re-enable itself in 100x the time if nothing
+ # Set main BOINC to suspend until we're done crunching in dev mode.
+ # It will automatically re-enable itself in 100x the time if nothing
# is done.
- # This allows for non-graceful exits of this script to not brick
- # client's BOINC and considerations that dev account may not be
+ # This allows for non-graceful exits of this script to not brick
+ # client's BOINC and considerations that dev account may not be
# crunching full time if client is actively using computer.
existing_mode_info = loop.run_until_complete(
run_rpc_command(rpc_client, "get_cc_status")
@@ -4227,7 +4227,7 @@ def boinc_loop(
else:
log.error("Unable to start dev mode due to unknown last mode")
- # Loop through each project in order of priority and request new tasks if
+ # Loop through each project in order of priority and request new tasks if
# not backed off stopping looping if cache becomes full
dont_nnt = None
if dev_loop:
@@ -4290,7 +4290,7 @@ def boinc_loop(
)
)
continue
- # If user has set to only mine highest mag project if profitable and
+ # If user has set to only mine highest mag project if profitable and
# it's not profitable or in benchmarking mode, skip
if (
ONLY_MINE_IF_PROFITABLE
@@ -4312,7 +4312,7 @@ def boinc_loop(
continue
if database_url not in DATABASE[mode]:
DATABASE[mode][database_url] = {}
- # Skip checking project if we have a backoff counter going and it
+ # Skip checking project if we have a backoff counter going and it
# hasn't been long enough
last_project_check: datetime.datetime = DATABASE[mode][database_url].get(
"LAST_CHECKED", datetime.datetime(1997, 6, 21, 18, 25, 30)
@@ -4352,11 +4352,11 @@ def boinc_loop(
# On first run, there is no project list
if isinstance(get_project_list, list):
- # Convert to simple list of strings so we can check if
+ # Convert to simple list of strings so we can check if
# project URL is in list
converted_project_list = project_list_to_project_list(
get_project_list
- )
+ )
else:
log.warning(
"Dev BOINC shows empty project list, this is normal on first run"
@@ -4433,15 +4433,13 @@ def boinc_loop(
+ str(boincified_url)
)
log.debug("Update response is {}".format(update_response))
- # Give BOINC time to update w project, I don't know a less hacky way to
+ # Give BOINC time to update w project, I don't know a less hacky way to
# do this, suggestions are welcome
- sleep(
- 15
- )
+ sleep(15)
DATABASE[mode][database_url]["LAST_CHECKED"] = datetime.datetime.now()
# Check if project should be backed off. If so, back it off.
# This is an exponentially increasing backoff with a maximum time of 1 day
- # Projects are backed off if they request it, if they are
+ # Projects are backed off if they request it, if they are
# unresponsive/down, or if no work is available
backoff_response = loop.run_until_complete(
check_log_entries_for_backoff(rpc_client, project_name=project_name)
@@ -4478,7 +4476,7 @@ def boinc_loop(
break
# Allow highest priority project to be non-NNTd.
- # This enables BOINC to fetch work if it's needed before our
+ # This enables BOINC to fetch work if it's needed before our
# sleep period elapses
dont_nnt = resolve_url_database(project_loop[0])
allow_this_project = resolve_url_boinc_rpc(dont_nnt, dev_mode=dev_loop)
@@ -4488,9 +4486,7 @@ def boinc_loop(
)
)
# There's no reason to loop through all projects more than once every 30 minutes
- custom_sleep(
- 30, rpc_client, dev_loop=dev_loop
- )
+ custom_sleep(30, rpc_client, dev_loop=dev_loop)
def print_and_log(msg: str, log_level: str) -> None:
@@ -4549,11 +4545,9 @@ def create_default_database() -> Dict[str, Any]:
override_path = os.path.join(BOINC_DATA_DIR, "global_prefs_override.xml")
override_dest_path = os.path.join(os.getcwd(), "global_prefs_override_backup.xml")
- # Shut down dev client is it's running. This is useful if program shuts
+ # Shut down dev client is it's running. This is useful if program shuts
# down unexpectedly
- shutdown_dev_client(
- quiet=True
- )
+ shutdown_dev_client(quiet=True)
# Load long-term stats
if os.path.exists("stats.json"):
@@ -4581,7 +4575,7 @@ def create_default_database() -> Dict[str, Any]:
DATABASE = create_default_database()
save_stats(DATABASE)
else:
- log.warning('No stats file found, making new one...')
+ log.warning("No stats file found, making new one...")
DATABASE = create_default_database()
save_stats(DATABASE)
@@ -4796,11 +4790,11 @@ def create_default_database() -> Dict[str, Any]:
if not rpc_client:
print_and_log("Error: Unable to connect to BOINC client, quitting now", "ERROR")
quit()
- # Get project list from BOINC client directly. This is needed for
+ # Get project list from BOINC client directly. This is needed for
# correct capitalization
temp_project_set, temp_project_names = loop.run_until_complete(
get_attached_projects(rpc_client)
- )
+ )
if not temp_project_set or not temp_project_names:
print_and_log(
"Error connecting to BOINC client, unable to get project list.", "ERROR"
@@ -4822,7 +4816,7 @@ def create_default_database() -> Dict[str, Any]:
rpc_user=rpc_user, rpc_port=rpc_port, rpc_password=gridcoin_rpc_password
)
# Test if the client is connectable
- source_urls = grc_client.get_approved_project_urls()
+ source_urls = grc_client.get_approved_project_urls()
wait_till_synced(grc_client)
source_urls = grc_client.get_approved_project_urls()
log.debug("Got source_urls from wallet: {}".format(source_urls))
@@ -5027,11 +5021,9 @@ def create_default_database() -> Dict[str, Any]:
highest_priority_project = ""
highest_priority_projects = []
# Force calculation of stats at first run since they are not cached in DB
- DATABASE["STATSLASTCALCULATED"] = datetime.datetime(
- 1997, 3, 3
- )
- # While we don't have enough tasks, continue cycling through project list and
- # updating. If we have cycled through all projects, get_highest_priority_project
+ DATABASE["STATSLASTCALCULATED"] = datetime.datetime(1997, 3, 3)
+ # While we don't have enough tasks, continue cycling through project list and
+ # updating. If we have cycled through all projects, get_highest_priority_project
# will stall to prevent requesting too often
boinc_loop(False, rpc_client)
# Restore user prefs
From d65719a5295197a83532748b13bcd24bde822ef4 Mon Sep 17 00:00:00 2001
From: makeasnek
Date: Thu, 30 Nov 2023 18:42:11 -0800
Subject: [PATCH 21/23] - Fix bug where failure to get GRC price causes crash -
Change default window to 90 days
---
config.py | 2 +-
main.py | 9 ++++++++-
2 files changed, 9 insertions(+), 2 deletions(-)
diff --git a/config.py b/config.py
index 0c02ad2..fce43de 100644
--- a/config.py
+++ b/config.py
@@ -102,7 +102,7 @@ def TEMP_FUNCTION():
PRICE_CHECK_INTERVAL = 1440 # how often to check GRC price in minutes, minimum delay of 60 minutes between checks. Default is 1440 (24 hrs)
LOG_LEVEL = "WARNING" # Options are: 'DEBUG','INFO','WARNING','ERROR','NONE', default is 'WARNING'
MAX_LOGFILE_SIZE_IN_MB = 10 # Default: 10
-ROLLING_WEIGHT_WINDOW = 60 # Use stats up to x days old for calculating intended weights vs actual crunch time, Default: 60. Note that "benchmarking" is applied to total time, not windowed time. Benchmarking will take 1% of ALL crunching time across ALL time history. This enables you set smaller "windows" and get faster reaction to weight changes without over-doing benchmarking.
+ROLLING_WEIGHT_WINDOW = 90 # Use stats up to x days old for calculating intended weights vs actual crunch time, Default: 90. Note that "benchmarking" is applied to total time, not windowed time. Benchmarking will take 1% of ALL crunching time across ALL time history. This enables you set smaller "windows" and get faster reaction to weight changes without over-doing benchmarking.
# BENCHMARKING SETTINGS:
# Benchmarking is needed to determine profitability of a project. It is strongly suggested you keep these settings as they are, they are sane defaults.
diff --git a/main.py b/main.py
index bddfd1b..882e054 100644
--- a/main.py
+++ b/main.py
@@ -2858,6 +2858,11 @@ def cache_full(project_name: str, messages) -> bool:
if difference.seconds > 60 * 5: # If message is > 5 min old, skip
continue
uppered_message_body = message["body"].upper()
+ if (
+ """NOT REQUESTING TASKS: "NO NEW TASKS" REQUESTED VIA MANAGER"""
+ in uppered_message_body
+ ):
+ continue
if uppered_project == message["project"].upper():
if (
"CPU: JOB CACHE FULL" in uppered_message_body
@@ -3365,10 +3370,12 @@ def profitability_check(
combined_stats: dict,
) -> bool:
"""
- Returns True if crunching is profitable right now. False otherwise.
+ Returns True if crunching is profitable right now. False if otherwise or unable to determine.
"""
if not grc_sell_price:
grc_sell_price = 0.00
+ if not grc_price:
+ return False
combined_stats_extract = combined_stats.get(project)
if not combined_stats_extract:
log.error(
From 66df50320345cf743f31e582227a8e18db18b1ab Mon Sep 17 00:00:00 2001
From: makeasnek
Date: Thu, 30 Nov 2023 18:46:07 -0800
Subject: [PATCH 22/23] - Fix bug where failure to get GRC price causes crash -
Change default window to 90 days - 3.1 release
---
main.py | 2 +-
updates.txt | 3 ++-
2 files changed, 3 insertions(+), 2 deletions(-)
diff --git a/main.py b/main.py
index 882e054..d130289 100644
--- a/main.py
+++ b/main.py
@@ -3374,7 +3374,7 @@ def profitability_check(
"""
if not grc_sell_price:
grc_sell_price = 0.00
- if not grc_price:
+ if not isinstance(grc_price,float) and not isinstance(grc_price,int):
return False
combined_stats_extract = combined_stats.get(project)
if not combined_stats_extract:
diff --git a/updates.txt b/updates.txt
index 4a67a4a..3b86804 100644
--- a/updates.txt
+++ b/updates.txt
@@ -5,4 +5,5 @@
2.1,0,Update is strongly suggested fixes several major bugs in project handling
2.2,1,FindTheMag critical security update please see Github for more info
2.3,0,Various usability improvements and crash fixes
-3.0,0,Massive improvements in stability and new config file format
\ No newline at end of file
+3.0,0,Massive improvements in stability and new config file format
+3.1,0,Stability improvements to reduce crashes
\ No newline at end of file
From d3dd8eab22b1f8dd41ad881368136fd96de96819 Mon Sep 17 00:00:00 2001
From: makeasnek
Date: Thu, 30 Nov 2023 18:50:22 -0800
Subject: [PATCH 23/23] - Fix issue #35 to prevent crash
---
main.py | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/main.py b/main.py
index d130289..f5b674f 100644
--- a/main.py
+++ b/main.py
@@ -1883,8 +1883,11 @@ def add_mag_to_combined_stats(
list of projects which are being crunched but not on approved projects list.
"""
unapproved_list = []
+ if not mag_ratios:
+ log.error('In add_mag_to_combined_ratios but mag_ratios is empty. Setting all mag ratios to zero.')
+ mag_ratios={}
for project_url, project_stats in combined_stats.items():
- found_mag_ratio = mag_ratios.get(project_url)
+ found_mag_ratio = mag_ratios.get(project_url,0)
if not found_mag_ratio:
if project_url not in approved_projects:
if project_url not in preferred_projects: