Skip to content

Commit

Permalink
Make the backup check interval configurable
Browse files Browse the repository at this point in the history
  • Loading branch information
sabeechen committed Mar 31, 2024
1 parent 19fd8cc commit 93eb7fc
Show file tree
Hide file tree
Showing 7 changed files with 59 additions and 17 deletions.
16 changes: 12 additions & 4 deletions hassio-google-drive-backup/backup/config/durationparser.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,15 @@
from datetime import timedelta
from injector import inject, singleton

MILLISECONDS_IDENTIFIERS = ["ms", "msec", "msecs", "millisecond", "milliseconds"]
SECOND_IDENTIFIERS = ["s", "sec", "secs", "second", "seconds"]
MINUTE_IDENTIFIERS = ["m", "min", "mins", "minute", "minutes"]
HOUR_IDENTIFIERS = ["h", "hr", "hour", "hours"]
DAY_IDENTIFIERS = ["d", "day", "days"]
ALL_IDENTIFIERS = MILLISECONDS_IDENTIFIERS + SECOND_IDENTIFIERS + MINUTE_IDENTIFIERS + HOUR_IDENTIFIERS + DAY_IDENTIFIERS
ALL_IDENTIFIERS.sort(key=lambda x: -len(x))
NUMBER_REGEX = "^([0-9]*[.])?[0-9]+"
VALID_REGEX = "^([ ]*([0-9]*[.])?[0-9]+[ ]*(seconds|second|secs|sec|s|minutes|minute|mins|min|m|hours|hour|hr|h|days|day|d)?[ ,]*)*"

VALID_REGEX = f"^([ ]*([0-9]*[.])?[0-9]+[ ]*({'|'.join(ALL_IDENTIFIERS)})?[ ,]*)*"

@singleton
class DurationParser():
Expand All @@ -35,7 +37,7 @@ def parse(self, source: str):

if i < len(parts):
next_part = parts[i].strip().strip(',')
if next_part in SECOND_IDENTIFIERS or next_part in MINUTE_IDENTIFIERS or next_part in HOUR_IDENTIFIERS or next_part in DAY_IDENTIFIERS:
if next_part in SECOND_IDENTIFIERS or next_part in MINUTE_IDENTIFIERS or next_part in HOUR_IDENTIFIERS or next_part in DAY_IDENTIFIERS or next_part in MILLISECONDS_IDENTIFIERS:
identifier = next_part
i += 1
else:
Expand All @@ -44,7 +46,9 @@ def parse(self, source: str):
identifier = "s"
else:
identifier = part[len(match.group(0)):]
if identifier in SECOND_IDENTIFIERS:
if identifier in MILLISECONDS_IDENTIFIERS:
total += timedelta(milliseconds=length)
elif identifier in SECOND_IDENTIFIERS:
total += timedelta(seconds=length)
elif identifier in MINUTE_IDENTIFIERS:
total += timedelta(minutes=length)
Expand Down Expand Up @@ -74,6 +78,10 @@ def format(self, duration: timedelta):
seconds = int(duration.seconds)
parts.append("{} seconds".format(seconds))
duration = duration - timedelta(seconds=seconds)
if duration >= timedelta(milliseconds=1):
seconds = int(duration.microseconds / 1000)
parts.append("{} milliseconds".format(seconds))
duration = duration - timedelta(milliseconds=seconds)

Check warning on line 84 in hassio-google-drive-backup/backup/config/durationparser.py

View check run for this annotation

Codecov / codecov/patch

hassio-google-drive-backup/backup/config/durationparser.py#L82-L84

Added lines #L82 - L84 were not covered by tests
if len(parts) > 0:
return ", ".join(parts)
else:
Expand Down
4 changes: 4 additions & 0 deletions hassio-google-drive-backup/backup/config/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,7 @@ class Setting(Enum):
CACHE_WARMUP_MAX_SECONDS = "cache_warmup_max_seconds"
CACHE_WARMUP_ERROR_TIMEOUT_SECONDS = "cache_warmup_error_timeout"
MAX_BACKOFF_SECONDS = "max_backoff_seconds"
BACKUP_CHECK_INTERVAL_SECONDS = "backup_check_interval_seconds"

# Old, deprecated settings
DEPRECTAED_MAX_BACKUPS_IN_HA = "max_snapshots_in_hassio"
Expand Down Expand Up @@ -300,6 +301,7 @@ def key(self):
Setting.CACHE_WARMUP_MAX_SECONDS: 15 * 60, # 30 minutes
Setting.CACHE_WARMUP_ERROR_TIMEOUT_SECONDS: 24 * 60 * 60, # 1 day
Setting.MAX_BACKOFF_SECONDS: 60 * 60 * 2, # 2 hours
Setting.BACKUP_CHECK_INTERVAL_SECONDS: 0.5,

Setting.UPLOAD_LIMIT_BYTES_PER_SECOND: 0,
}
Expand Down Expand Up @@ -443,6 +445,7 @@ def key(self):
Setting.CACHE_WARMUP_MAX_SECONDS: "float(0,)",
Setting.CACHE_WARMUP_ERROR_TIMEOUT_SECONDS: "float(0,)",
Setting.MAX_BACKOFF_SECONDS: "int(3600,)?",
Setting.BACKUP_CHECK_INTERVAL_SECONDS: "float(0.5,)?",

Setting.UPLOAD_LIMIT_BYTES_PER_SECOND: "float(0,)?",
}
Expand Down Expand Up @@ -522,6 +525,7 @@ def getValidator(name, schema):
_VALIDATORS[Setting.MAXIMUM_UPLOAD_CHUNK_BYTES] = BytesizeAsStringValidator(Setting.MAXIMUM_UPLOAD_CHUNK_BYTES.value, minimum=256 * 1024)
_VALIDATORS[Setting.PENDING_BACKUP_TIMEOUT_SECONDS] = DurationAsStringValidator(Setting.PENDING_BACKUP_TIMEOUT_SECONDS.value, minimum=1, maximum=None)
_VALIDATORS[Setting.UPLOAD_LIMIT_BYTES_PER_SECOND] = BytesizeAsStringValidator(Setting.UPLOAD_LIMIT_BYTES_PER_SECOND.value, minimum=0)
_VALIDATORS[Setting.BACKUP_CHECK_INTERVAL_SECONDS] = DurationAsStringValidator(Setting.BACKUP_CHECK_INTERVAL_SECONDS.value, minimum=0.5)
VERSION = addon_config["version"]


Expand Down
11 changes: 8 additions & 3 deletions hassio-google-drive-backup/backup/model/syncer.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,17 @@
from backup.worker import Worker, Trigger
from backup.logger import getLogger
from backup.exceptions import PleaseWait
from backup.config import Config, Setting

logger = getLogger(__name__)


@singleton
class Scyncer(Worker):
@inject
def __init__(self, time: Time, coord: Coordinator, triggers: List[Trigger]):
super().__init__("Sync Worker", self.checkforSync, time, 0.5)
def __init__(self, time: Time, coord: Coordinator, config: Config, triggers: List[Trigger]):
self._config = config
super().__init__("Sync Worker", self.checkforSync, time, self.getInterval)
self.coord = coord
self.triggers: List[Trigger] = triggers
self._time = time
Expand All @@ -32,5 +34,8 @@ async def checkforSync(self):
await self._time.sleepAsync(3)
await self.coord.sync()
except PleaseWait:
# Ignore this, since it means a sync already started (unavilable race condition)
# Ignore this, since it means a sync already started (unavoidable race condition)
pass

def getInterval(self):
return self._config.get(Setting.BACKUP_CHECK_INTERVAL_SECONDS)

Check warning on line 41 in hassio-google-drive-backup/backup/model/syncer.py

View check run for this annotation

Codecov / codecov/patch

hassio-google-drive-backup/backup/model/syncer.py#L41

Added line #L41 was not covered by tests
Original file line number Diff line number Diff line change
Expand Up @@ -621,7 +621,7 @@
<div class="input-field col s12 m12 s12">
<i class="material-icons prefix">refresh</i>
<input type="text" id="max_sync_interval_seconds" name="max_sync_interval_seconds"
pattern="^([ ]*([0-9]*[.])?[0-9]+[ ]*(seconds|second|secs|sec|s|minutes|minute|mins|min|m|hours|hour|hr|h|days|day|d)?[ ,]*)*$"
pattern="^([ ]*([0-9]*[.])?[0-9]+[ ]*(ms|msec|msecs|millisecond|milliseconds|seconds|second|secs|sec|s|minutes|minute|mins|min|m|hours|hour|hr|h|days|day|d)?[ ,]*)*$"
class="validate" />
<label for="max_sync_interval_seconds">Sync Interval</label>
<span class="helper-text">
Expand All @@ -636,7 +636,7 @@
<div class="input-field col s12 m12 s12">
<i class="material-icons prefix">sync</i>
<input type="text" id="ha_reporting_interval_seconds" name="ha_reporting_interval_seconds"
pattern="^([ ]*([0-9]*[.])?[0-9]+[ ]*(seconds|second|secs|sec|s|minutes|minute|mins|min|m|hours|hour|hr|h|days|day|d)?[ ,]*)*$"
pattern="^([ ]*([0-9]*[.])?[0-9]+[ ]*(ms|msec|msecs|millisecond|milliseconds|seconds|second|secs|sec|s|minutes|minute|mins|min|m|hours|hour|hr|h|days|day|d)?[ ,]*)*$"
class="validate" />
<label for="ha_reporting_interval_seconds">Home Assistant Reporting Interval</label>
<span class="helper-text">
Expand All @@ -657,22 +657,37 @@
class="validate" />
<label for="maximum_upload_chunk_bytes">Maximum Drive Upload Chunk Size</label>
<span class="helper-text">
Sets the maximum "chunk" size allowed for uploads to Google Drive. Larger sizes will upload faster but delay reports
of progress and may cause interruptions on some network hardware. Between 1MB and 20MB is recommended, minimum is 256 kB.
Sets the maximum "chunk" size allowed for uploads to Google Drive. Larger sizes will upload faster but delay reports
of progress and may cause interruptions on some network hardware. Between 1MB and 20MB is recommended, minimum is 256 kB.
The value can be provided in any binary-prefix format, e.g. '256 Kb', '10 Mb', '3000Kb', etc.</span>
</div>
</div>
<div class="col s11 offset-s1 row">
<div class="input-field col s12 m12 s12">
<i class="material-icons prefix">timelapse</i>
<input type="text" id="pending_backup_timeout_seconds" name="pending_backup_timeout_seconds"
pattern="^([ ]*([0-9]*[.])?[0-9]+[ ]*(seconds|second|secs|sec|s|minutes|minute|mins|min|m|hours|hour|hr|h|days|day|d)?[ ,]*)*$"
pattern="^([ ]*([0-9]*[.])?[0-9]+[ ]*(ms|msec|msecs|millisecond|milliseconds|seconds|second|secs|sec|s|minutes|minute|mins|min|m|hours|hour|hr|h|days|day|d)?[ ,]*)*$"
class="validate" />
<label for="pending_backup_timeout_seconds">New Backup Timeout</label>
<span class="helper-text">
How long the addon should wait after asking Home Assistant for a new backup before it considers the backup failed and
attempts a retry. If you have very large backups or your backups just take a long time, you might need to increase
this. It can be specified in terms of hours, minutes and seconds (eg 2 hours, 100 minutes, 9 hours 30 seconds, etc)
How long the addon should wait after asking Home Assistant for a new backup before it considers the backup failed and
attempts a retry. If you have very large backups or your backups just take a long time, you might need to increase
this. It can be specified in terms of hours, minutes and seconds (eg 2 hours, 100 minutes, 9 hours 30 seconds, etc)
</span>
</div>
</div>
<div class="col s11 offset-s1 row">
<div class="input-field col s12 m12 s12">
<i class="material-icons prefix">timelapse</i>
<input type="text" id="backup_check_interval_seconds" name="backup_check_interval_seconds"
pattern="^([ ]*([0-9]*[.])?[0-9]+[ ]*(ms|msec|msecs|millisecond|milliseconds|seconds|second|secs|sec|s|minutes|minute|mins|min|m|hours|hour|hr|h|days|day|d)?[ ,]*)*$"
class="validate" />
<label for="backup_check_interval_seconds">Backup Check Interval</label>
<span class="helper-text">
How frequently the addon should check its internal cache to see if a new backup should be made. You can raise this to check
less frequently if you have a lot of backups and just checking uses a lot of CPU. When doing so, creating new backups may not
happen exactly when they otherwise would and actions taken from the UI can be delayed. Setting this above a few seconds isn't
recommended.
</span>
</div>
</div>
Expand Down
3 changes: 2 additions & 1 deletion hassio-google-drive-backup/backup/worker/worker.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import asyncio

from typing import Callable
from ..time import Time
from ..config import Startable
from ..logger import getLogger
Expand All @@ -12,7 +13,7 @@ class StopWorkException(Exception):


class Worker(Startable):
def __init__(self, name, method, time: Time, interval=1):
def __init__(self, name, method, time: Time, interval: float|Callable=1):
super().__init__()
self._method = method
self._time = time
Expand Down
3 changes: 2 additions & 1 deletion hassio-google-drive-backup/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,8 @@
"maximum_upload_chunk_bytes": "float(262144,)?",
"ha_reporting_interval_seconds": "int(1,)?",

"upload_limit_bytes_per_second": "float(0,)?"
"upload_limit_bytes_per_second": "float(0,)?",
"backup_check_interval_seconds": "float(0.5,)?"
},
"ports": {
"1627/tcp": 1627
Expand Down
8 changes: 8 additions & 0 deletions hassio-google-drive-backup/tests/test_duration_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,14 @@ def test_parse_seconds():
assert parser.parse("5.0 secs") == timedelta(seconds=5)
assert parser.parse("5.5 s") == timedelta(seconds=5, milliseconds=500)

def test_parse_ms():
parser = DurationParser()
assert parser.parse("1 ms") == timedelta(milliseconds=1)
assert parser.parse("5 msec") == timedelta(milliseconds=5)
assert parser.parse("5 milliseconds") == timedelta(milliseconds=5)
assert parser.parse("5millisecond") == timedelta(milliseconds=5)
assert parser.parse("1 seconds 500 ms") == timedelta(seconds=1, milliseconds=500)


def test_parse_multiple():
parser = DurationParser()
Expand Down

0 comments on commit 93eb7fc

Please sign in to comment.