Skip to content

Commit

Permalink
Merge pull request #748 from sabeechen/dev
Browse files Browse the repository at this point in the history
Release v0.109.2
  • Loading branch information
sabeechen authored Nov 16, 2022
2 parents 2d85355 + a64de85 commit b32c237
Show file tree
Hide file tree
Showing 15 changed files with 92 additions and 52 deletions.
3 changes: 2 additions & 1 deletion .devcontainer/requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -27,4 +27,5 @@ aiohttp-jinja2
beautifulsoup4
firebase-admin
aiofile
grpcio
grpcio
aioping
3 changes: 2 additions & 1 deletion .vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -16,5 +16,6 @@
"editor.defaultFormatter": "vscode.html-language-features",
"editor.tabSize": 2,
},
"python.analysis.completeFunctionParens": true
"python.analysis.completeFunctionParens": true,
"files.eol": "\n"
}
8 changes: 3 additions & 5 deletions hassio-google-drive-backup/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
## v0.109.2 [2022-11-15]
* Fixed a bug where disabling deletion from Google Drive and enabling deltes after upload could cause backups in Google Drive to be deleted.

## v0.109.1 [2022-11-07]
* If configured from the browser, defaults to a "dark" theme if haven't already configured custom colors
* Makes the interval at which the addon publishes sensors to Home Assistant configurable (see the "Uncommon Options" settings)
Expand All @@ -15,8 +18,3 @@
* Fixed an error preventing stopped addons form being started if they hit errors while stopping.
* Fixed many, many, many gramatical errors thanks to [@markvader's](https://github.com/markvader) [#665](https://github.com/sabeechen/hassio-google-drive-backup/pull/665).
* Fixed a missing config option in the addon schema, maximum_upload_chunk_bytes.

## v0.108.2 [2022-06-03]
* Switched to ignoring 'upgrade' backups by default for new users.
* Added a warning for existing users if you're not ignoring upgrade backups.
* Added a warning about google's OOB deprecation for private credential users.
5 changes: 4 additions & 1 deletion hassio-google-drive-backup/backup/config/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,7 @@ class Setting(Enum):
EXCHANGER_TIMEOUT_SECONDS = "exchanger_timeout_seconds"
HA_REPORTING_INTERVAL_SECONDS = "ha_reporting_interval_seconds"
LONG_TERM_STALE_BACKUP_SECONDS = "long_term_stale_backup_seconds"
PING_TIMEOUT = "ping_timeout"

# Old, deprecated settings
DEPRECTAED_MAX_BACKUPS_IN_HA = "max_snapshots_in_hassio"
Expand Down Expand Up @@ -279,6 +280,7 @@ def key(self):
Setting.EXCHANGER_TIMEOUT_SECONDS: 10,
Setting.HA_REPORTING_INTERVAL_SECONDS: 10,
Setting.LONG_TERM_STALE_BACKUP_SECONDS: 60 * 60 * 24,
Setting.PING_TIMEOUT: 5
}

_STAGING_DEFAULTS = {
Expand Down Expand Up @@ -410,7 +412,8 @@ def key(self):
Setting.BACKUP_STARTUP_DELAY_MINUTES: "float(0,)?",
Setting.EXCHANGER_TIMEOUT_SECONDS: "float(0,)?",
Setting.HA_REPORTING_INTERVAL_SECONDS: "int(1,)?",
Setting.LONG_TERM_STALE_BACKUP_SECONDS: "int(1,)?"
Setting.LONG_TERM_STALE_BACKUP_SECONDS: "int(1,)?",
Setting.PING_TIMEOUT: "float(0,)?"
}

PRIVATE = [
Expand Down
35 changes: 15 additions & 20 deletions hassio-google-drive-backup/backup/debugworker.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import asyncio
import socket
import subprocess
import aioping
from datetime import datetime, timedelta

from aiohttp import ClientSession, ClientTimeout
Expand Down Expand Up @@ -184,26 +184,21 @@ def formatDate(self, date: datetime):
async def getPingInfo(self):
who = self.config.get(Setting.DRIVE_HOST_NAME)
ips = await self.resolve(who)
pings = {who: {}}
results = {who: {}}
tasks = {who: {}}
for ip in ips:
pings[who][ip] = "Unknown"
command = "fping -t 5000 " + " ".join(ips)

# fping each server
process = await asyncio.create_subprocess_shell(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout_data, stderr_data = await process.communicate()

for line in stdout_data.decode().split("\n"):
for host in pings.keys():
for address in pings[host].keys():
if line.startswith(address):
response = line[len(address):].strip()
if response.startswith(":"):
response = response[2:].strip()
if response.startswith("is"):
response = response[3:].strip()
pings[host][address] = response
return pings
results[who][ip] = "Unknown"
tasks[who][ip] = asyncio.create_task(aioping.ping(ip, timeout=self.config.get(Setting.PING_TIMEOUT)))

# ping each server
for server in tasks.keys():
for ip in tasks[server].keys():
try:
time = await tasks[server][ip]
results[server][ip] = f"{round(time * 1000, 0)} ms"
except Exception as e:
results[server][ip] = str(e)
return results

async def resolve(self, who: str):
try:
Expand Down
9 changes: 9 additions & 0 deletions hassio-google-drive-backup/backup/model/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,9 @@ def postSync(self) -> None:
def detail(self) -> str:
return ""

def isDestination(self) -> bool:
return False

# Gets called after reading state but before any changes are made
# to check for additional errors.
def checkBeforeChanges(self) -> None:
Expand All @@ -92,6 +95,9 @@ def isWorking(self):
def might_be_oob_creds(self) -> bool:
return False

def isDestination(self) -> bool:
return True


@singleton
class Model():
Expand Down Expand Up @@ -336,6 +342,9 @@ def _nextPurge(self, source: BackupSource, backups, findNext=False):
"""
if not source.enabled() or len(backups) == 0:
return None, None
if source.maxCount() == 0 and source.isDestination():
# When maxCount is zero for a destination, we should never delete from it.
return None, None
if source.maxCount() == 0 and not self.config.get(Setting.DELETE_AFTER_UPLOAD):
return None, None

Expand Down
6 changes: 5 additions & 1 deletion hassio-google-drive-backup/backup/model/simulatedsource.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@


class SimulatedSource(BackupDestination):
def __init__(self, name):
def __init__(self, name, is_destination=False):
self._name = name
self.current: Dict[str, DummyBackupSource] = {}
self.saved = []
Expand All @@ -25,6 +25,10 @@ def __init__(self, name):
self.backup_type = "Full"
self.working = False
self.needConfig = None
self.is_destination = is_destination

def isDestination(self):
return self.is_destination

def setEnabled(self, value):
self._enabled = value
Expand Down
2 changes: 1 addition & 1 deletion hassio-google-drive-backup/config.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "Home Assistant Google Drive Backup",
"version": "0.109.1",
"version": "0.109.2",
"slug": "hassio_google_drive_backup",
"description": "Automatically manage backups between Home Assistant and Google Drive",
"arch": ["armhf", "armv7", "aarch64", "amd64", "i386"],
Expand Down
24 changes: 18 additions & 6 deletions hassio-google-drive-backup/dev/request_interceptor.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
import re
from aiohttp.web import Request, Response
from asyncio import Event, sleep
from asyncio import Event
from aiohttp.web_response import json_response
from injector import singleton, inject
from backup.time import Time


class UrlMatch():
def __init__(self, url, fail_after=None, status=None, response=None, wait=False, sleep=None, fail_for=None):
def __init__(self, time: Time, url, fail_after=None, status=None, response=None, wait=False, sleep=None, fail_for=None):
self.time = time
self.url: str = url
self.fail_after: int = fail_after
self.status: int = status
Expand All @@ -21,10 +23,15 @@ def __init__(self, url, fail_after=None, status=None, response=None, wait=False,
self.fail_for = fail_for
self.responses = []
self._calls = 0
self.time = time

def addResponse(self, response):
self.responses.append(response)

def stop(self):
self.wait_event.set()
self.trigger_event.set()

def isMatch(self, request):
return re.match(self.url, request.url.path)

Expand All @@ -51,7 +58,7 @@ async def _doAction(self, request: Request):
self.trigger_event.set()
await self.wait_event.wait()
elif self.sleep is not None:
await sleep(self.sleep)
await self.time.sleepAsync(self.sleep, early_exit=self.wait_event)

async def called(self, request: Request):
if self.fail_after is None or self.fail_after <= 0:
Expand Down Expand Up @@ -82,9 +89,14 @@ class RequestInterceptor:
def __init__(self):
self._matchers = []
self._history = []
self.time = Time()

def stop(self):
for matcher in self._matchers:
matcher.stop()

def setError(self, url, status=None, fail_after=None, fail_for=None, response=None) -> UrlMatch:
matcher = UrlMatch(url, fail_after, status=status, response=response, fail_for=fail_for)
matcher = UrlMatch(self.time, url, fail_after, status=status, response=response, fail_for=fail_for)
self._matchers.append(matcher)
return matcher

Expand All @@ -93,12 +105,12 @@ def clear(self):
self._history.clear()

def setWaiter(self, url, attempts=None):
matcher = UrlMatch(url, attempts, wait=True)
matcher = UrlMatch(self.time, url, attempts, wait=True)
self._matchers.append(matcher)
return matcher

def setSleep(self, url, attempts=None, sleep=None, wait_for=None):
matcher = UrlMatch(url, attempts, sleep=sleep, fail_for=wait_for)
matcher = UrlMatch(self.time, url, attempts, sleep=sleep, fail_for=wait_for)
self._matchers.append(matcher)
return matcher

Expand Down
1 change: 1 addition & 0 deletions hassio-google-drive-backup/dev/simulationserver.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ async def start(self, port):
await site.start()

async def stop(self):
self.interceptor.stop()
await self.runner.shutdown()
await self.runner.cleanup()

Expand Down
1 change: 1 addition & 0 deletions hassio-google-drive-backup/requirements-addon.txt
Original file line number Diff line number Diff line change
Expand Up @@ -14,3 +14,4 @@ aiofiles
aiofile
colorlog
aiohttp-jinja2
aioping
1 change: 1 addition & 0 deletions hassio-google-drive-backup/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,7 @@ async def generate_config(server_url: URL, ports, cleandir):
Setting.PORT: ports.ui,
Setting.INGRESS_PORT: ports.ingress,
Setting.BACKUP_STARTUP_DELAY_MINUTES: 0,
Setting.PING_TIMEOUT: 0.1,
})


Expand Down
4 changes: 2 additions & 2 deletions hassio-google-drive-backup/tests/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,8 +162,8 @@ class IntentionalFailure(Exception):


class HelperTestSource(SimulatedSource):
def __init__(self, name):
super().__init__(name)
def __init__(self, name, is_destination=False):
super().__init__(name, is_destination=is_destination)
self.allow_create = True
self.allow_save = True

Expand Down
15 changes: 3 additions & 12 deletions hassio-google-drive-backup/tests/test_debugworker.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,25 +16,16 @@ async def test_dns_info(debug_worker: DebugWorker, config: Config):
config.override(Setting.SEND_ERROR_REPORTS, True)
config.override(Setting.DRIVE_HOST_NAME, "localhost")
await debug_worker.doWork()
assert debug_worker.dns_info == {
'localhost': {
'127.0.0.1': 'alive',
'localhost': 'alive'
}
}
assert '127.0.0.1' in debug_worker.dns_info['localhost']
assert 'localhost' in debug_worker.dns_info['localhost']


@pytest.mark.asyncio
async def test_bad_host(debug_worker: DebugWorker, config: Config):
skipForWindows()
config.override(Setting.DRIVE_HOST_NAME, "dasdfdfgvxcvvsoejbr.com")
await debug_worker.doWork()
assert debug_worker.dns_info == {
'dasdfdfgvxcvvsoejbr.com': {
'dasdfdfgvxcvvsoejbr.com': "Name or service not known"
}
}

assert "Name or service not known" in debug_worker.dns_info['dasdfdfgvxcvvsoejbr.com']['dasdfdfgvxcvvsoejbr.com']

@pytest.mark.asyncio
async def test_send_error_report(time, debug_worker: DebugWorker, config: Config, global_info: GlobalInfo, server, error_store: ErrorStore):
Expand Down
27 changes: 25 additions & 2 deletions hassio-google-drive-backup/tests/test_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,12 @@

@pytest.fixture
def source():
return HelperTestSource("Source")
return HelperTestSource("Source", is_destination=False)


@pytest.fixture
def dest():
return HelperTestSource("Dest")
return HelperTestSource("Dest", is_destination=True)


@pytest.fixture
Expand Down Expand Up @@ -855,6 +855,29 @@ async def test_delete_ignored_upgrade_backup_after_some_time(time: FakeTime, mod
dest.assertThat(current=1)


@pytest.mark.asyncio
async def test_zero_config_whiled_deleting_backups(time: FakeTime, model: Model, dest: HelperTestSource, source: HelperTestSource, simple_config: Config):
"""
Issue #745 identified that setting setting destination max backups to 0 and "delete after upload"=True would cause destination
backups to get deleted due to an error in the logic for handling purges. This test verifies that no longer happens.
"""
source.setMax(1)
dest.setMax(1)
simple_config.override(Setting.DAYS_BETWEEN_BACKUPS, 1)
simple_config.override(Setting.DELETE_AFTER_UPLOAD, True)
source.insert("Backup", time.now())
await model.sync(time.now())
source.assertThat(current=0, deleted=1)
dest.assertThat(current=1, saved=1)
source.reset()
dest.reset()

dest.setMax(0)
await model.sync(time.now())
dest.assertThat(current=1)
source.assertThat()


@pytest.mark.asyncio
async def test_generational_delete_issue602(time: FakeTime, model: Model, dest: HelperTestSource, source: HelperTestSource, simple_config: Config):
time.setTimeZone("Europe/Rome")
Expand Down

0 comments on commit b32c237

Please sign in to comment.