Skip to content

Commit

Permalink
Add None type hints (#903)
Browse files Browse the repository at this point in the history
  • Loading branch information
singiamtel authored Feb 10, 2025
1 parent f150599 commit 0750117
Show file tree
Hide file tree
Showing 21 changed files with 88 additions and 88 deletions.
2 changes: 1 addition & 1 deletion aliBuild
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def doMain(args, parser):
error(e.message)
exit(1)

if args.action == "version" or args.action == None:
if args.action == "version" or args.action is None:
print("aliBuild version: {version} ({arch})".format(
version=__version__ or "unknown", arch=args.architecture or "unknown"))
sys.exit(0)
Expand Down
2 changes: 1 addition & 1 deletion alibuild_helpers/analytics.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def report_exception(e):
exd = e.__class__.__name__,
exf = "1")

def enable_analytics():
def enable_analytics() -> None:
if exists(expanduser("~/.config/alibuild/disable-analytics")):
unlink(expanduser("~/.config/alibuild/disable-analytics"))
if not exists(expanduser("~/.config/alibuild/analytics-uuid")):
Expand Down
2 changes: 1 addition & 1 deletion alibuild_helpers/args.py
Original file line number Diff line number Diff line change
Expand Up @@ -474,7 +474,7 @@ def finaliseArgs(args, parser):
args.writeStore = args.remoteStore

if args.action in ["build", "init"]:
if "develPrefix" in args and args.develPrefix == None:
if "develPrefix" in args and args.develPrefix is None:
if "chdir" in args:
args.develPrefix = basename(abspath(args.chdir))
else:
Expand Down
2 changes: 1 addition & 1 deletion alibuild_helpers/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
import time


def writeAll(fn, txt):
def writeAll(fn, txt) -> None:
f = open(fn, "w")
f.write(txt)
f.close()
Expand Down
2 changes: 1 addition & 1 deletion alibuild_helpers/cmd.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ class DockerRunner:
instead.
"""

def __init__(self, docker_image, docker_run_args=()):
def __init__(self, docker_image, docker_run_args=()) -> None:
self._docker_image = docker_image
self._docker_run_args = docker_run_args
self._container = None
Expand Down
4 changes: 2 additions & 2 deletions alibuild_helpers/doctor.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from alibuild_helpers.utilities import getPackageList, parseDefaults, readDefaults, validateDefaults
from alibuild_helpers.cmd import getstatusoutput, DockerRunner

def prunePaths(workDir):
def prunePaths(workDir) -> None:
for x in ["PATH", "LD_LIBRARY_PATH", "DYLD_LIBRARY_PATH"]:
if not x in os.environ:
continue
Expand Down Expand Up @@ -52,7 +52,7 @@ def checkRequirements(spec, cmd, homebrew_replacement, getstatusoutput_docker):
spec.get("system_requirement_missing"))
return (err, "")

def systemInfo():
def systemInfo() -> None:
_,out = getstatusoutput("env")
debug("Environment:\n%s", out)
_,out = getstatusoutput("uname -a")
Expand Down
12 changes: 6 additions & 6 deletions alibuild_helpers/log.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,13 @@

debug, error, warning, info, success = (None, None, None, None, None)

def dieOnError(err, msg):
def dieOnError(err, msg) -> None:
if err:
error("%s", msg)
sys.exit(1)

class LogFormatter(logging.Formatter):
def __init__(self, fmtstr):
def __init__(self, fmtstr) -> None:
self.fmtstr = fmtstr
self.COLOR_RESET = "\033[m" if sys.stdout.isatty() else ""
self.LEVEL_COLORS = { logging.WARNING: "\033[4;33m",
Expand All @@ -35,7 +35,7 @@ def format(self, record):
} for x in record.msg.split("\n"))


def log_current_package(package, main_package, specs, devel_prefix):
def log_current_package(package, main_package, specs, devel_prefix) -> None:
"""Show PACKAGE as the one currently being processed in future log messages."""
if logger_handler.level > logging.DEBUG:
return
Expand All @@ -55,14 +55,14 @@ def log_current_package(package, main_package, specs, devel_prefix):


class ProgressPrint:
def __init__(self, begin_msg=""):
def __init__(self, begin_msg="") -> None:
self.count = -1
self.lasttime = 0
self.STAGES = ".", "..", "...", "....", ".....", "....", "...", ".."
self.begin_msg = begin_msg
self.percent = -1

def __call__(self, txt, *args):
def __call__(self, txt, *args) -> None:
if logger.level <= logging.DEBUG or not sys.stdout.isatty():
debug(txt, *args)
return
Expand All @@ -88,7 +88,7 @@ def __call__(self, txt, *args):
self.lasttime = time.time()
sys.stderr.flush()

def erase(self):
def erase(self) -> None:
nerase = len(self.STAGES[self.count]) if self.count > -1 else 0
if self.percent > -1:
nerase = nerase + 7
Expand Down
52 changes: 26 additions & 26 deletions alibuild_helpers/sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,23 +31,23 @@ def remote_from_url(read_url, write_url, architecture, work_dir, insecure=False)

class NoRemoteSync:
"""Helper class which does not do anything to sync"""
def fetch_symlinks(self, spec):
def fetch_symlinks(self, spec) -> None:
pass
def fetch_tarball(self, spec):
def fetch_tarball(self, spec) -> None:
pass
def upload_symlinks_and_tarball(self, spec):
def upload_symlinks_and_tarball(self, spec) -> None:
pass

class PartialDownloadError(Exception):
def __init__(self, downloaded, size):
def __init__(self, downloaded, size) -> None:
self.downloaded = downloaded
self.size = size
def __str__(self):
return "only %d out of %d bytes downloaded" % (self.downloaded, self.size)


class HttpRemoteSync:
def __init__(self, remoteStore, architecture, workdir, insecure):
def __init__(self, remoteStore, architecture, workdir, insecure) -> None:
self.remoteStore = remoteStore
self.writeStore = ""
self.architecture = architecture
Expand Down Expand Up @@ -137,7 +137,7 @@ def getRetry(self, url, dest=None, returnResult=False, log=True, session=None, p
pass
return None

def fetch_tarball(self, spec):
def fetch_tarball(self, spec) -> None:
# Check for any existing tarballs we can use instead of fetching new ones.
for pkg_hash in spec["remote_hashes"]:
try:
Expand Down Expand Up @@ -184,7 +184,7 @@ def fetch_tarball(self, spec):
destPath, session=session, progress=progress)
progress.end("done")

def fetch_symlinks(self, spec):
def fetch_symlinks(self, spec) -> None:
links_path = resolve_links_path(self.architecture, spec["package"])
os.makedirs(os.path.join(self.workdir, links_path), exist_ok=True)

Expand Down Expand Up @@ -232,20 +232,20 @@ def fetch_symlinks(self, spec):
symlink("../../" + target.lstrip("./"),
os.path.join(self.workdir, links_path, linkname))

def upload_symlinks_and_tarball(self, spec):
def upload_symlinks_and_tarball(self, spec) -> None:
pass


class RsyncRemoteSync:
"""Helper class to sync package build directory using RSync."""

def __init__(self, remoteStore, writeStore, architecture, workdir):
def __init__(self, remoteStore, writeStore, architecture, workdir) -> None:
self.remoteStore = re.sub("^ssh://", "", remoteStore)
self.writeStore = re.sub("^ssh://", "", writeStore)
self.architecture = architecture
self.workdir = workdir

def fetch_tarball(self, spec):
def fetch_tarball(self, spec) -> None:
info("Downloading tarball for %s@%s, if available", spec["package"], spec["version"])
debug("Updating remote store for package %s with hashes %s", spec["package"],
", ".join(spec["remote_hashes"]))
Expand Down Expand Up @@ -273,7 +273,7 @@ def fetch_tarball(self, spec):
for pkg_hash in spec["remote_hashes"])))
dieOnError(err, "Unable to fetch tarball from specified store.")

def fetch_symlinks(self, spec):
def fetch_symlinks(self, spec) -> None:
links_path = resolve_links_path(self.architecture, spec["package"])
os.makedirs(os.path.join(self.workdir, links_path), exist_ok=True)
err = execute("rsync -rlvW --delete {remote_store}/{links_path}/ {workdir}/{links_path}/".format(
Expand All @@ -283,7 +283,7 @@ def fetch_symlinks(self, spec):
))
dieOnError(err, "Unable to fetch symlinks from specified store.")

def upload_symlinks_and_tarball(self, spec):
def upload_symlinks_and_tarball(self, spec) -> None:
if not self.writeStore:
return
dieOnError(execute("""\
Expand Down Expand Up @@ -313,16 +313,16 @@ class CVMFSRemoteSync:
means unpacking the symlink to the wanted package.
"""

def __init__(self, remoteStore, writeStore, architecture, workdir):
def __init__(self, remoteStore, writeStore, architecture, workdir) -> None:
self.remoteStore = re.sub("^cvmfs://", "", remoteStore)
# We do not support uploading directly to CVMFS, for obvious
# reasons.
assert(writeStore == None)
assert(writeStore is None)
self.writeStore = None
self.architecture = architecture
self.workdir = workdir

def fetch_tarball(self, spec):
def fetch_tarball(self, spec) -> None:
info("Downloading tarball for %s@%s-%s, if available", spec["package"], spec["version"], spec["revision"])
# If we already have a tarball with any equivalent hash, don't check S3.
for pkg_hash in spec["remote_hashes"] + spec["local_hashes"]:
Expand All @@ -334,7 +334,7 @@ def fetch_tarball(self, spec):
info("Could not find prebuilt tarball for %s@%s-%s, will be rebuilt",
spec["package"], spec["version"], spec["revision"])

def fetch_symlinks(self, spec):
def fetch_symlinks(self, spec) -> None:
# When using CVMFS, we create the symlinks grass by reading the .
info("Fetching available build hashes for %s, from %s", spec["package"], self.remoteStore)
links_path = resolve_links_path(self.architecture, spec["package"])
Expand Down Expand Up @@ -372,7 +372,7 @@ def fetch_symlinks(self, spec):
links_path=links_path,
))

def upload_symlinks_and_tarball(self, spec):
def upload_symlinks_and_tarball(self, spec) -> None:
dieOnError(True, "CVMFS backend does not support uploading directly")

class S3RemoteSync:
Expand All @@ -381,13 +381,13 @@ class S3RemoteSync:
s3cmd must be installed separately in order for this to work.
"""

def __init__(self, remoteStore, writeStore, architecture, workdir):
def __init__(self, remoteStore, writeStore, architecture, workdir) -> None:
self.remoteStore = re.sub("^s3://", "", remoteStore)
self.writeStore = re.sub("^s3://", "", writeStore)
self.architecture = architecture
self.workdir = workdir

def fetch_tarball(self, spec):
def fetch_tarball(self, spec) -> None:
info("Downloading tarball for %s@%s, if available", spec["package"], spec["version"])
debug("Updating remote store for package %s with hashes %s",
spec["package"], ", ".join(spec["remote_hashes"]))
Expand All @@ -410,7 +410,7 @@ def fetch_tarball(self, spec):
))
dieOnError(err, "Unable to fetch tarball from specified store.")

def fetch_symlinks(self, spec):
def fetch_symlinks(self, spec) -> None:
err = execute("""\
mkdir -p "{workDir}/{linksPath}"
find "{workDir}/{linksPath}" -type l -delete
Expand All @@ -432,7 +432,7 @@ def fetch_symlinks(self, spec):
))
dieOnError(err, "Unable to fetch symlinks from specified store.")

def upload_symlinks_and_tarball(self, spec):
def upload_symlinks_and_tarball(self, spec) -> None:
if not self.writeStore:
return
dieOnError(execute("""\
Expand Down Expand Up @@ -486,14 +486,14 @@ class Boto3RemoteSync:
time.
"""

def __init__(self, remoteStore, writeStore, architecture, workdir):
def __init__(self, remoteStore, writeStore, architecture, workdir) -> None:
self.remoteStore = re.sub("^b3://", "", remoteStore)
self.writeStore = re.sub("^b3://", "", writeStore)
self.architecture = architecture
self.workdir = workdir
self._s3_init()

def _s3_init(self):
def _s3_init(self) -> None:
# This is a separate method so that we can patch it out for unit tests.
# Import boto3 here, so that if we don't use this remote store, we don't
# have to install it in the first place.
Expand Down Expand Up @@ -530,7 +530,7 @@ def _s3_key_exists(self, key):
raise
return True

def fetch_tarball(self, spec):
def fetch_tarball(self, spec) -> None:
debug("Updating remote store for package %s with hashes %s", spec["package"],
", ".join(spec["remote_hashes"]))

Expand Down Expand Up @@ -568,7 +568,7 @@ def fetch_tarball(self, spec):
debug("Remote has no tarballs for %s with hashes %s", spec["package"],
", ".join(spec["remote_hashes"]))

def fetch_symlinks(self, spec):
def fetch_symlinks(self, spec) -> None:
from botocore.exceptions import ClientError
links_path = resolve_links_path(self.architecture, spec["package"])
os.makedirs(os.path.join(self.workdir, links_path), exist_ok=True)
Expand Down Expand Up @@ -614,7 +614,7 @@ def fetch_symlinks(self, spec):
target = "../../" + target
symlink(target, link_path)

def upload_symlinks_and_tarball(self, spec):
def upload_symlinks_and_tarball(self, spec) -> None:
if not self.writeStore:
return

Expand Down
2 changes: 1 addition & 1 deletion alibuild_helpers/templating_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from jinja2.sandbox import SandboxedEnvironment


def build_plugin(specs, args, build_order):
def build_plugin(specs, args, build_order) -> None:
"""Read a user-provided template from stdin and render it."""
print(SandboxedEnvironment(autoescape=False)
.from_string(sys.stdin.read())
Expand Down
6 changes: 3 additions & 3 deletions alibuild_helpers/utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -316,14 +316,14 @@ def getRecipeReader(url, dist=None):

# Read a recipe from a file
class FileReader(object):
def __init__(self, url):
def __init__(self, url) -> None:
self.url = url
def __call__(self):
return open(self.url).read()

# Read a recipe from a git repository using git show.
class GitReader(object):
def __init__(self, url, configDir):
def __init__(self, url, configDir) -> None:
self.url, self.configDir = url, configDir
def __call__(self):
m = re.search(r'^dist:(.*)@([^@]+)$', self.url)
Expand Down Expand Up @@ -600,7 +600,7 @@ def getPackageList(packages, specs, configDir, preferSystem, noSystem,


class Hasher:
def __init__(self):
def __init__(self) -> None:
self.h = hashlib.sha1()
def __call__(self, txt):
if not type(txt) == bytes:
Expand Down
4 changes: 2 additions & 2 deletions tests/test_analytics.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,11 @@ def noAnalytics():
def yesAnalytics():
return True

def notInvoked():
def notInvoked() -> None:
assert(False)

class TestAnalytics(unittest.TestCase):
def test_analytics(self):
def test_analytics(self) -> None:
self.assertEqual(False, decideAnalytics(hasDisableFile=False,
hasUuid=False,
isTty=False,
Expand Down
2 changes: 1 addition & 1 deletion tests/test_args.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def test_failingParsing(self, mock_print):
f"Expected '{args[0]}' matching '{pattern}' but it's not the case."
)

def test_validArchitectures(self):
def test_validArchitectures(self) -> None:
for arch in VALID_ARCHS:
self.assertTrue(matchValidArch(arch))
for arch in INVALID_ARCHS:
Expand Down
Loading

0 comments on commit 0750117

Please sign in to comment.