From 343393021f922e4e395156f2eb836019660beba5 Mon Sep 17 00:00:00 2001 From: Stephen Beechen Date: Thu, 16 Mar 2023 23:13:17 -0600 Subject: [PATCH 1/7] Always release request resources --- .../backup/creds/driverequester.py | 9 +- .../backup/creds/exchanger.py | 13 +- .../backup/drive/driverequests.py | 111 ++++++++---------- .../backup/util/asynchttpgetter.py | 5 +- 4 files changed, 62 insertions(+), 76 deletions(-) diff --git a/hassio-google-drive-backup/backup/creds/driverequester.py b/hassio-google-drive-backup/backup/creds/driverequester.py index e453f228..ed15ed3a 100644 --- a/hassio-google-drive-backup/backup/creds/driverequester.py +++ b/hassio-google-drive-backup/backup/creds/driverequester.py @@ -1,4 +1,4 @@ -from aiohttp import ClientSession, ContentTypeError, ClientConnectorError, ClientTimeout +from aiohttp import ClientSession, ContentTypeError, ClientConnectorError, ClientTimeout, ClientResponse from aiohttp.client_exceptions import ServerTimeoutError, ServerDisconnectedError, ClientOSError from backup.exceptions import GoogleUnexpectedError, GoogleInternalError, GoogleRateLimitError, GoogleCredentialsExpired, CredRefreshGoogleError, DriveQuotaExceeded, GoogleDrivePermissionDenied, GoogleDnsFailure, GoogleCantConnect, GoogleTimeoutError from backup.util import Resolver @@ -24,20 +24,23 @@ def __init__(self, config: Config, session: ClientSession, resolver: Resolver): self.resolver = resolver self.config = config - async def request(self, method, url, headers={}, json=None, data=None): + async def request(self, method, url, headers={}, json=None, data=None) -> ClientResponse: try: - # MAYBE: Exceptions here should clean up the response object response = await self.session.request(method, url, headers=headers, json=json, timeout=self.buildTimeout(), data=data) if response.status < 400: return response await self.raiseForKnownErrors(response) if response.status in PERMISSION_DENIED: + response.release() raise GoogleCredentialsExpired() elif response.status in INTERNAL_ERROR: + response.release() raise GoogleInternalError() elif response.status in RATE_LIMIT_EXCEEDED or response.status in TOO_MANY_REQUESTS: + response.release() raise GoogleRateLimitError() elif response.status in REQUEST_TIMEOUT: + response.release() raise GoogleTimeoutError() response.raise_for_status() return response diff --git a/hassio-google-drive-backup/backup/creds/exchanger.py b/hassio-google-drive-backup/backup/creds/exchanger.py index 2397541f..d7696925 100644 --- a/hassio-google-drive-backup/backup/creds/exchanger.py +++ b/hassio-google-drive-backup/backup/creds/exchanger.py @@ -71,12 +71,8 @@ async def exchange(self, code): KEY_GRANT_TYPE: 'authorization_code' } resp = None - try: - resp = await self.drive.request("post", self.config.get(Setting.DRIVE_TOKEN_URL), data=data) + async with await self.drive.request("post", self.config.get(Setting.DRIVE_TOKEN_URL), data=data) as resp: return Creds.load(self.time, await resp.json(), id=self._client_id, secret=self._client_secret) - finally: - if resp is not None: - resp.release() async def refresh(self, creds: Creds): if creds.secret is not None: @@ -91,9 +87,7 @@ async def _refresh_google(self, creds: Creds): KEY_REFRESH_TOKEN: creds.refresh_token, KEY_GRANT_TYPE: 'refresh_token' } - resp = None - try: - resp = await self.drive.request("post", self.config.get(Setting.DRIVE_REFRESH_URL), data=data) + async with await self.drive.request("post", self.config.get(Setting.DRIVE_REFRESH_URL), data=data) as resp: data = await resp.json() return Creds( self.time, @@ -103,9 +97,6 @@ async def _refresh_google(self, creds: Creds): refresh_token=creds.refresh_token, expiration=self._get_expiration(data), original_expiration=creds.original_expiration) - finally: - if resp is not None: - resp.release() async def _refresh_default(self, creds: Creds): data = { diff --git a/hassio-google-drive-backup/backup/drive/driverequests.py b/hassio-google-drive-backup/backup/drive/driverequests.py index d99fb19d..8e7fcc7f 100644 --- a/hassio-google-drive-backup/backup/drive/driverequests.py +++ b/hassio-google-drive-backup/backup/drive/driverequests.py @@ -5,7 +5,7 @@ from urllib.parse import urlencode from datetime import datetime, timedelta -from aiohttp import ClientSession, ClientTimeout +from aiohttp import ClientSession, ClientTimeout, ClientResponse from aiohttp.client_exceptions import ClientResponseError, ServerTimeoutError from injector import inject, singleton @@ -160,7 +160,8 @@ async def get(self, id): "fields": SELECT_FIELDS, "supportsAllDrives": "true" } - return await self.retryRequest("GET", URL_FILES + id + "/?" + urlencode(q), is_json=True) + async with await self.retryRequest("GET", URL_FILES + id + "/?" + urlencode(q)) as response: + return await response.json() async def download(self, id, size): ret = AsyncHttpGetter(self.config.get(Setting.DRIVE_URL) + URL_FILES + id + "/?alt=media&supportsAllDrives=true", @@ -189,27 +190,27 @@ async def query(self, query): } if continuation: q["pageToken"] = continuation - response = await self.retryRequest("GET", URL_FILES + "?" + urlencode(q), is_json=True) - for item in response['files']: - yield item - if "nextPageToken" not in response or len(response['nextPageToken']) <= 0: - break - else: - continuation = response['nextPageToken'] + async with await self.retryRequest("GET", URL_FILES + "?" + urlencode(q)) as response: + data = await response.json() + for item in data['files']: + yield item + if "nextPageToken" not in data or len(data['nextPageToken']) <= 0: + break + else: + continuation = data['nextPageToken'] async def update(self, id, update_metadata): - resp = await self.retryRequest("PATCH", URL_FILES + id + "/?supportsAllDrives=true", json=update_metadata) - async with resp: + async with await self.retryRequest("PATCH", URL_FILES + id + "/?supportsAllDrives=true", json=update_metadata): pass async def delete(self, id): - resp = await self.retryRequest("DELETE", URL_FILES + id + "/?supportsAllDrives=true") - async with resp: + async with await self.retryRequest("DELETE", URL_FILES + id + "/?supportsAllDrives=true"): pass async def getAboutInfo(self): q = {"fields": 'storageQuota,user'} - return await self.retryRequest("GET", URL_ABOUT + "?" + urlencode(q), is_json=True) + async with await self.retryRequest("GET", URL_ABOUT + "?" + urlencode(q)) as resp: + return await resp.json() async def create(self, stream, metadata, mime_type): # Upload logic is complicated. See https://developers.google.com/drive/api/v3/manage-uploads#resumable @@ -225,8 +226,7 @@ async def create(self, stream, metadata, mime_type): "Content-Range": "bytes */{0}".format(total_size) } try: - initial = await self.retryRequest("PUT", self.last_attempt_location, headers=headers, patch_url=False) - async with initial: + async with await self.retryRequest("PUT", self.last_attempt_location, headers=headers, patch_url=False) as initial: if initial.status == 308: # We can resume the upload, check where it left off if 'Range' in initial.headers: @@ -244,7 +244,7 @@ async def create(self, stream, metadata, mime_type): except ClientResponseError as e: if e.status == 410: # Drive doesn't recognize the resume token, so we'll just have to start over. - logger.debug("Drive upload session wasn't recognized, restarting upload form the beginning.") + logger.debug("Drive upload session wasn't recognized, restarting upload from the beginning.") location = None else: raise @@ -256,8 +256,7 @@ async def create(self, stream, metadata, mime_type): "X-Upload-Content-Type": mime_type, "X-Upload-Content-Length": str(total_size), } - initial = await self.retryRequest("POST", URL_START_UPLOAD, headers=headers, json=metadata) - async with initial: + async with await self.retryRequest("POST", URL_START_UPLOAD, headers=headers, json=metadata) as initial: # Google returns a url in the header "Location", which is where subsequent requests to upload # the backup's bytes should be sent. Logic below handles uploading the file bytes in chunks. location = ensureKey( @@ -277,7 +276,7 @@ async def create(self, stream, metadata, mime_type): current_chunk_size = BASE_CHUNK_SIZE while True: start = stream.position() - data: io.IOBaseBytesio = await stream.read(current_chunk_size) + data = await stream.read(current_chunk_size) chunk_size = len(data.getbuffer()) if chunk_size == 0: raise LogicError( @@ -286,19 +285,35 @@ async def create(self, stream, metadata, mime_type): "Content-Length": str(chunk_size), "Content-Range": "bytes {0}-{1}/{2}".format(start, start + chunk_size - 1, total_size) } + startTime = self.time.now() + logger.debug("Sending {0} bytes to Google Drive".format(current_chunk_size)) try: - startTime = self.time.now() - logger.debug("Sending {0} bytes to Google Drive".format( - current_chunk_size)) - partial = await self.retryRequest("PUT", location, headers=headers, data=data, patch_url=False) - - # Base the next chunk size on how long it took to send the last chunk. - current_chunk_size = self._getNextChunkSize( - current_chunk_size, (self.time.now() - startTime).total_seconds()) - - # any time a chunk gets uploaded, reset the retry counter. This lets very flaky connections - # complete eventually after enough retrying. - self.last_attempt_count = 1 + async with await self.retryRequest("PUT", location, headers=headers, data=data, patch_url=False) as partial: + # Base the next chunk size on how long it took to send the last chunk. + current_chunk_size = self._getNextChunkSize( + current_chunk_size, (self.time.now() - startTime).total_seconds()) + + # any time a chunk gets uploaded, reset the retry counter. This lets very flaky connections + # complete eventually after enough retrying. + self.last_attempt_count = 1 + yield float(start + chunk_size) / float(total_size) + if partial.status == 200 or partial.status == 201: + # Upload completed, return the object json + self.last_attempt_location = None + self.last_attempt_metadata = None + yield await self.get((await partial.json())['id']) + break + elif partial.status == 308: + # Upload partially complete, seek to the new requested position + range_bytes = ensureKey( + "Range", partial.headers, "Google Drive's upload response headers") + if not RANGE_RE.match(range_bytes): + raise ProtocolError( + "Range", partial.headers, "Google Drive's upload response headers") + position = int(partial.headers["Range"][len("bytes=0-"):]) + stream.position(position + 1) + else: + partial.raise_for_status() except ClientResponseError as e: if math.floor(e.status / 100) == 4: # clear the cached session location URI, since a 4XX error @@ -310,25 +325,6 @@ async def create(self, stream, metadata, mime_type): raise GoogleSessionError() else: raise e - yield float(start + chunk_size) / float(total_size) - if partial.status == 200 or partial.status == 201: - # Upload completed, return the object json - self.last_attempt_location = None - self.last_attempt_metadata = None - yield await self.get((await partial.json())['id']) - break - elif partial.status == 308: - # Upload partially complete, seek to the new requested position - range_bytes = ensureKey( - "Range", partial.headers, "Google Drive's upload response headers") - if not RANGE_RE.match(range_bytes): - raise ProtocolError( - "Range", partial.headers, "Google Drive's upload response headers") - position = int(partial.headers["Range"][len("bytes=0-"):]) - stream.position(position + 1) - else: - partial.raise_for_status() - await partial.release() def _getNextChunkSize(self, last_chunk_size, last_chunk_seconds): max = BASE_CHUNK_SIZE * math.floor(self.config.get(Setting.MAXIMUM_UPLOAD_CHUNK_BYTES) / BASE_CHUNK_SIZE) @@ -344,9 +340,10 @@ def _getNextChunkSize(self, last_chunk_size, last_chunk_seconds): return math.floor(next_chunk / BASE_CHUNK_SIZE) * BASE_CHUNK_SIZE async def createFolder(self, metadata): - return await self.retryRequest("POST", URL_FILES + "?supportsAllDrives=true", is_json=True, json=metadata) + async with await self.retryRequest("POST", URL_FILES + "?supportsAllDrives=true", json=metadata) as resp: + return await resp.json() - async def retryRequest(self, method, url, auth_headers: Optional[Dict[str, str]] = None, headers: Optional[Dict[str, str]] = None, json: Optional[Dict[str, Any]] = None, data: Any = None, is_json: bool = False, cred_retry: bool = True, patch_url: bool = True): + async def retryRequest(self, method, url, auth_headers: Optional[Dict[str, str]] = None, headers: Optional[Dict[str, str]] = None, json: Optional[Dict[str, Any]] = None, data: Any = None, cred_retry: bool = True, patch_url: bool = True) -> ClientResponse: backoff = Backoff(base=DRIVE_RETRY_INITIAL_SECONDS, attempts=DRIVE_MAX_RETRIES) if patch_url: url = self.config.get(Setting.DRIVE_URL) + url @@ -363,13 +360,7 @@ async def retryRequest(self, method, url, auth_headers: Optional[Dict[str, str]] # aiohttp complains if you pass it a large byte object data_to_use = io.BytesIO(data_to_use.getbuffer()) data_to_use.seek(0) - response = await self.drive.request(method, url, headers=headers_to_use, json=json, data=data_to_use) - if is_json: - ret = await response.json() - await response.release() - return ret - else: - return response + return await self.drive.request(method, url, headers=headers_to_use, json=json, data=data_to_use) except GoogleCredentialsExpired: # Get fresh credentials, then retry right away. logger.debug("Google Drive credentials have expired. We'll retry with new ones.") diff --git a/hassio-google-drive-backup/backup/util/asynchttpgetter.py b/hassio-google-drive-backup/backup/util/asynchttpgetter.py index b9c226f2..15827b71 100644 --- a/hassio-google-drive-backup/backup/util/asynchttpgetter.py +++ b/hassio-google-drive-backup/backup/util/asynchttpgetter.py @@ -162,7 +162,7 @@ async def _startReadRemoteAt(self, where: int): if where != 0: headers['range'] = "bytes=%s-%s" % (self._position, self._size - 1) if self._response is not None: - await self._response.release() + self._response.release() try: resp = await self._session.get(self._url, headers=headers, timeout=self.timeout) except TimeoutError: @@ -179,6 +179,7 @@ async def _startReadRemoteAt(self, where: int): raise resp.raise_for_status() if where == 0 and self._size is not None and CONTENT_LENGTH_HEADER in resp.headers and int(resp.headers[CONTENT_LENGTH_HEADER]) != self._size: + resp.release() raise LogicError(SERVER_CONTENT_LENGTH_ERROR) self._response = resp self._responseStart = where @@ -228,7 +229,7 @@ async def __aenter__(self): async def __aexit__(self, type, value, traceback): if self._response is not None: - await self._response.release() + self._response.release() def __aiter__(self): return self From 25c5d2b76d691ce1512329d46caa48fb855e7c57 Mon Sep 17 00:00:00 2001 From: Stephen Beechen Date: Thu, 16 Mar 2023 23:13:26 -0600 Subject: [PATCH 2/7] lint --- hassio-google-drive-backup/tests/test_addon_stopper.py | 1 - 1 file changed, 1 deletion(-) diff --git a/hassio-google-drive-backup/tests/test_addon_stopper.py b/hassio-google-drive-backup/tests/test_addon_stopper.py index 42755c37..6d468d74 100644 --- a/hassio-google-drive-backup/tests/test_addon_stopper.py +++ b/hassio-google-drive-backup/tests/test_addon_stopper.py @@ -22,7 +22,6 @@ def save(config: Config, to_start, to_watchdog_enable): with open(config.get(Setting.STOP_ADDON_STATE_PATH), "w") as f: json.dump({"start": list(to_start), "watchdog": list(to_watchdog_enable)}, f) - @pytest.mark.asyncio async def test_no_stop_config(supervisor: SimulatedSupervisor, addon_stopper: AddonStopper, config: Config) -> None: slug = "test_slug_1" From 1cede2fb2b86e93a8551a1f86268d045e9fb53a9 Mon Sep 17 00:00:00 2001 From: Stephen Beechen Date: Thu, 16 Mar 2023 23:13:50 -0600 Subject: [PATCH 3/7] Fix nagging test warnings --- hassio-google-drive-backup/backup/util/estimator.py | 2 ++ hassio-google-drive-backup/tests/conftest.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/hassio-google-drive-backup/backup/util/estimator.py b/hassio-google-drive-backup/backup/util/estimator.py index dbdfe640..0d787677 100644 --- a/hassio-google-drive-backup/backup/util/estimator.py +++ b/hassio-google-drive-backup/backup/util/estimator.py @@ -73,6 +73,8 @@ def _checkSpace(self, backups): int(self.getUsagePercent())), Estimator.asSizeString(self.getBytesFree())) def getUsagePercent(self): + if self.getBlocksTotal() == 0: + return 0 return 100.0 * float(self.getBlocksUsed()) / float(self.getBlocksTotal()) def getBlocksUsed(self): diff --git a/hassio-google-drive-backup/tests/conftest.py b/hassio-google-drive-backup/tests/conftest.py index 98c67dca..05ccd2a1 100644 --- a/hassio-google-drive-backup/tests/conftest.py +++ b/hassio-google-drive-backup/tests/conftest.py @@ -54,7 +54,7 @@ def stop(self): os.statvfs = self.old_method def _hijack(self, path): - return os.statvfs_result((0, 1, int(self.bytes_total), int(self.bytes_free), 0, 0, 0, 0, 0, 255)) + return os.statvfs_result((0, 1, int(self.bytes_total), int(self.bytes_free), int(self.bytes_free), 0, 0, 0, 0, 255)) def setFreeBytes(self, bytes_free, bytes_total=1): self.bytes_free = bytes_free From 42280a0dc9d68fdc2921d894b622391ab3e6893e Mon Sep 17 00:00:00 2001 From: Stephen Beechen Date: Tue, 21 Mar 2023 12:45:52 -0600 Subject: [PATCH 4/7] Fix broken tests --- hassio-google-drive-backup/tests/test_asynchttpgetter.py | 3 +-- hassio-google-drive-backup/tests/test_estimator.py | 5 +++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/hassio-google-drive-backup/tests/test_asynchttpgetter.py b/hassio-google-drive-backup/tests/test_asynchttpgetter.py index df56bfc4..dd8cd400 100644 --- a/hassio-google-drive-backup/tests/test_asynchttpgetter.py +++ b/hassio-google-drive-backup/tests/test_asynchttpgetter.py @@ -1,8 +1,7 @@ from datetime import timedelta -from aiohttp_jinja2 import asyncio import pytest from aiohttp import ClientSession -from aiohttp.web import Response, StreamResponse +from aiohttp.web import StreamResponse from .conftest import Uploader from backup.exceptions import LogicError from dev.request_interceptor import RequestInterceptor diff --git a/hassio-google-drive-backup/tests/test_estimator.py b/hassio-google-drive-backup/tests/test_estimator.py index 68de3ddf..d2f11eba 100644 --- a/hassio-google-drive-backup/tests/test_estimator.py +++ b/hassio-google-drive-backup/tests/test_estimator.py @@ -1,7 +1,7 @@ import pytest from backup.util import Estimator from backup.config import Config, Setting - +from backup.exceptions import LowSpaceError @pytest.mark.asyncio async def test_check_space(estimator: Estimator, coord, config: Config): @@ -9,4 +9,5 @@ async def test_check_space(estimator: Estimator, coord, config: Config): estimator.checkSpace(coord.backups()) config.override(Setting.LOW_SPACE_THRESHOLD, estimator.getBytesFree() + 1) - estimator.checkSpace(coord.backups()) + with pytest.raises(LowSpaceError): + estimator.checkSpace(coord.backups()) From 78c6c62f2c818c70715875898b23af17d32e46de Mon Sep 17 00:00:00 2001 From: Stephen Beechen Date: Thu, 23 Mar 2023 12:05:51 -0600 Subject: [PATCH 5/7] Properly handle DST transitions --- .devcontainer/requirements-dev.txt | 4 +- .../backup/model/backupscheme.py | 41 +- .../backup/model/model.py | 10 +- hassio-google-drive-backup/backup/time.py | 25 +- .../requirements-addon.txt | 2 + .../requirements-server.txt | 4 +- hassio-google-drive-backup/tests/faketime.py | 10 +- .../tests/test_model.py | 406 +++++++++++++++++- 8 files changed, 458 insertions(+), 44 deletions(-) diff --git a/.devcontainer/requirements-dev.txt b/.devcontainer/requirements-dev.txt index a88386cf..049fe5a2 100644 --- a/.devcontainer/requirements-dev.txt +++ b/.devcontainer/requirements-dev.txt @@ -28,4 +28,6 @@ beautifulsoup4 firebase-admin aiofile grpcio -aioping \ No newline at end of file +aioping +pytz +tzlocal \ No newline at end of file diff --git a/hassio-google-drive-backup/backup/model/backupscheme.py b/hassio-google-drive-backup/backup/model/backupscheme.py index 80cdb53a..0d0511f8 100644 --- a/hassio-google-drive-backup/backup/model/backupscheme.py +++ b/hassio-google-drive-backup/backup/model/backupscheme.py @@ -1,7 +1,7 @@ from abc import ABC, abstractmethod from calendar import monthrange -from datetime import datetime, timedelta -from typing import List, Optional, Sequence, Set, Tuple +from datetime import datetime, timedelta, date +from typing import List, Optional, Sequence, Set, Tuple, Any, Union from .backups import Backup from backup.util import RangeLookup @@ -29,7 +29,7 @@ def __init__(self, source: str, destinations: List[str]): self.source = source self.destinations = destinations - def getOldest(self, backups: Backup): + def getOldest(self, backups: List[Backup]): consider = [] for backup in backups: uploaded = True @@ -51,7 +51,7 @@ class OldestScheme(BackupScheme): def __init__(self, count=0): self.count = count - def getOldest(self, backups: Sequence[Backup]) -> Optional[Backup]: + def getOldest(self, backups: Sequence[Backup]) -> Tuple[Any, Union[Backup, None]]: if len(backups) <= self.count: return None, None return "default", min(backups, default=None, key=lambda s: s.date()) @@ -78,8 +78,10 @@ def select(self, backups: List[Backup]) -> Optional[Backup]: preferred = list(filter(searcher, options)) if len(preferred) > 0: + # If there is a backup on the "preferred" day, then use the latest backup on that day self.selected = max(preferred, default=None, key=Backup.date) else: + # Otherwise, use the earliest backup over the valid period. self.selected = min(options, default=None, key=Backup.date) return self.selected @@ -87,6 +89,7 @@ def delta(self) -> timedelta: return self.end - self.start def day(self, date: datetime): + # TODO: this conversion isn't time-zone safe, but is ok because we only use it to compare local day to local day. local = self.time.toLocal(date) return datetime(day=local.day, month=local.month, year=local.year) @@ -106,8 +109,8 @@ def __init__(self, time: Time, config: GenConfig, count=0): self.time: Time = time self.config = config - def _buildPartitions(self, backups): - backups: List[Backup] = list(backups) + def _buildPartitions(self, backups_input): + backups: List[Backup] = list(backups_input) # build the list of dates we should partition by day_of_week = 3 @@ -128,18 +131,24 @@ def _buildPartitions(self, backups): currentDay = self.day(last) if self.config.days > 0: for x in range(0, self.config.days + 1): - nextDay = currentDay + timedelta(days=1) + nextDay = self.day(currentDay, add_days=1) lookups.append( Partition(currentDay, nextDay, currentDay, self.time, "Day {0} of {1}".format(x + 1, self.config.days), delete_only=(x >= self.config.days))) - currentDay = self.day(currentDay - timedelta(hours=12)) + currentDay = self.day(currentDay, add_days=-1) if self.config.weeks > 0: for x in range(0, self.config.weeks + 1): + # Start at the first monday preceeding the last backup start = self.time.local(last.year, last.month, last.day) - start -= timedelta(days=last.weekday()) - start -= timedelta(weeks=x) - end = start + timedelta(days=7) - start += timedelta(days=day_of_week) + start = self.day(start, add_days=-1 * start.weekday()) + + # Move back x weeks + start = self.day(start, add_days=-7 * x) + end = self.day(start, add_days=7) + + # Only consider backups from that week after the start day + # TODO: should this actually "prefer" the day of week but start on monday? + start = self.day(start, add_days=day_of_week) lookups.append(Partition(start, end, start, self.time, "Week {0} of {1}".format(x + 1, self.config.weeks), delete_only=(x >= self.config.weeks))) if self.config.months > 0: @@ -220,6 +229,8 @@ def handleNaming(self, backups: Sequence[Backup]) -> None: part.selected.setStatusDetail([]) part.selected.getStatusDetail().append(part.details) - def day(self, date: datetime): - local = self.time.toLocal(date) - return datetime(day=local.day, month=local.month, year=local.year, tzinfo=local.tzinfo) + def day(self, utc_datetime: datetime, add_days=0): + local = self.time.toLocal(utc_datetime) + + local_date = date.fromordinal(date(local.year, local.month, local.day).toordinal() + add_days) + return self.time.localize(datetime(local_date.year, local_date.month, local_date.day, 0, 0)) diff --git a/hassio-google-drive-backup/backup/model/model.py b/hassio-google-drive-backup/backup/model/model.py index 106c781e..a81fd7c4 100644 --- a/hassio-google-drive-backup/backup/model/model.py +++ b/hassio-google-drive-backup/backup/model/model.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta +from datetime import datetime, timedelta, date from io import IOBase from typing import Dict, Generic, List, Optional, Tuple, TypeVar @@ -153,15 +153,15 @@ def _nextBackup(self, now: datetime, last_backup: Optional[datetime]) -> Optiona next = last_backup + timedelta(days=self.config.get(Setting.DAYS_BETWEEN_BACKUPS)) else: newest_local: datetime = self.time.toLocal(last_backup) - time_that_day_local = datetime(newest_local.year, newest_local.month, - newest_local.day, timeofDay[0], timeofDay[1], tzinfo=self.time.local_tz) + time_that_day_local = self.time.localize(datetime(newest_local.year, newest_local.month, newest_local.day, timeofDay[0], timeofDay[1])) if newest_local < time_that_day_local: # Latest backup is before the backup time for that day next = self.time.toUtc(time_that_day_local) else: # return the next backup after the delta - next = self.time.toUtc( - time_that_day_local + timedelta(days=self.config.get(Setting.DAYS_BETWEEN_BACKUPS))) + next_date = date.fromordinal(date(newest_local.year, newest_local.month, newest_local.day).toordinal() + 1) + next_datetime_local = self.time.localize(datetime(next_date.year, next_date.month, next_date.day, timeofDay[0], timeofDay[1])) + next = self.time.toUtc(next_datetime_local) if next is None: self.waiting_for_startup = False diff --git a/hassio-google-drive-backup/backup/time.py b/hassio-google-drive-backup/backup/time.py index ae0432ba..b053ca7f 100644 --- a/hassio-google-drive-backup/backup/time.py +++ b/hassio-google-drive-backup/backup/time.py @@ -1,11 +1,15 @@ import asyncio from datetime import datetime, timedelta -from dateutil.tz import tzlocal, tzutc + from injector import inject, singleton from dateutil.relativedelta import relativedelta from dateutil.parser import parse from .logger import getLogger +import pytz +from pytz import timezone, utc +from tzlocal import get_localzone_name + logger = getLogger(__name__) @@ -13,12 +17,12 @@ @singleton class Time(object): @inject - def __init__(self, local_tz=tzlocal()): + def __init__(self, local_tz=timezone(get_localzone_name())): self.local_tz = local_tz self._offset = timedelta(seconds=0) def now(self) -> datetime: - return datetime.now(tzutc()) + self._offset + return datetime.now(pytz.utc) + self._offset def nowLocal(self) -> datetime: return datetime.now(self.local_tz) + self._offset @@ -35,14 +39,17 @@ def offset(self, delta: timedelta): def parse(cls, text: str) -> datetime: ret = parse(text) if ret.tzinfo is None: - ret = ret.replace(tzinfo=tzutc()) + ret = ret.replace(tzinfo=utc) return ret def toLocal(self, dt: datetime) -> datetime: return dt.astimezone(self.local_tz) + def localize(self, dt: datetime) -> datetime: + return self.local_tz.localize(dt) + def toUtc(self, dt: datetime) -> datetime: - return dt.astimezone(tzutc()) + return dt.astimezone(utc) async def sleepAsync(self, seconds: float, early_exit: asyncio.Event = None) -> None: if early_exit is None: @@ -54,7 +61,7 @@ async def sleepAsync(self, seconds: float, early_exit: asyncio.Event = None) -> pass def local(self, year, month, day, hour=0, minute=0, second=0, ms=0): - return datetime(year, month, day, hour, minute, second, ms, tzinfo=self.local_tz) + return self.local_tz.localize(datetime(year, month, day, hour, minute, second, ms)) def formatDelta(self, time: datetime, now=None) -> str: if not now: @@ -105,14 +112,14 @@ def asRfc3339String(self, time: datetime) -> str: class AcceleratedTime(Time): def __init__(self, dialation=1.0): super().__init__() - self.start = datetime.now(tzutc()) + self.start = datetime.now(utc) self.dialation = dialation def now(self): - return self.start + relativedelta(seconds=(datetime.now(tzutc()) - self.start).total_seconds() * self.dialation) + return self.start + timedelta(seconds=(datetime.now(utc) - self.start).total_seconds() * self.dialation) def nowLocal(self) -> datetime: - return self.local(self.now()) + return self.localize(self.now()) async def sleepAsync(self, seconds: float) -> None: await asyncio.sleep(seconds / self.dialation) diff --git a/hassio-google-drive-backup/requirements-addon.txt b/hassio-google-drive-backup/requirements-addon.txt index 78ddeeae..ba9643ac 100644 --- a/hassio-google-drive-backup/requirements-addon.txt +++ b/hassio-google-drive-backup/requirements-addon.txt @@ -15,3 +15,5 @@ aiofile colorlog aiohttp-jinja2 aioping +pytz +tzlocal diff --git a/hassio-google-drive-backup/requirements-server.txt b/hassio-google-drive-backup/requirements-server.txt index 8ddbb012..3788d91a 100644 --- a/hassio-google-drive-backup/requirements-server.txt +++ b/hassio-google-drive-backup/requirements-server.txt @@ -14,4 +14,6 @@ python-dateutil pyyaml watchdog aiohttp-jinja2 -firebase-admin \ No newline at end of file +firebase-admin +pytz +tzlocal \ No newline at end of file diff --git a/hassio-google-drive-backup/tests/faketime.py b/hassio-google-drive-backup/tests/faketime.py index 60c123c2..a32408a4 100644 --- a/hassio-google-drive-backup/tests/faketime.py +++ b/hassio-google-drive-backup/tests/faketime.py @@ -1,23 +1,21 @@ import asyncio from datetime import datetime, timedelta - -from dateutil.tz import gettz - from backup.time import Time +from pytz import timezone class FakeTime(Time): def __init__(self, now: datetime = None): - super().__init__(local_tz=gettz('EST')) + super().__init__(local_tz=timezone('EST')) if now: self._now = now else: self._now = self.toUtc( - datetime(1985, 12, 6, 0, 0, 0, tzinfo=gettz('EST'))) + datetime(1985, 12, 6, 0, 0, 0, tzinfo=timezone('EST'))) self.sleeps = [] def setTimeZone(self, name): - self.local_tz = gettz(name) + self.local_tz = timezone(name) def setNow(self, now: datetime): self._now = now diff --git a/hassio-google-drive-backup/tests/test_model.py b/hassio-google-drive-backup/tests/test_model.py index fd962c75..88ebd24f 100644 --- a/hassio-google-drive-backup/tests/test_model.py +++ b/hassio-google-drive-backup/tests/test_model.py @@ -878,6 +878,17 @@ async def test_zero_config_whiled_deleting_backups(time: FakeTime, model: Model, source.assertThat() +async def simulate_backups_timeline(time: FakeTime, model: Model, start: datetime, end: datetime): + time.setNow(time.toUtc(start)) + await model.sync(time.now()) + while time.now() < end: + next = model.nextBackup(time.now()) + assert next is not None + assert next > time.now() + time.setNow(next) + await model.sync(time.now()) + + @pytest.mark.asyncio async def test_generational_delete_issue602(time: FakeTime, model: Model, dest: HelperTestSource, source: HelperTestSource, simple_config: Config): time.setTimeZone("Europe/Rome") @@ -899,13 +910,8 @@ async def test_generational_delete_issue602(time: FakeTime, model: Model, dest: source.setMax(1) dest.setMax(30) model.reinitialize() - time.setNow(time.toUtc(start)) - await model.sync(time.now()) - while time.now() < end: - next = model.nextBackup(time.now()) - assert next > time.now() - time.setNow(next) - await model.sync(time.now()) + + await simulate_backups_timeline(time, model, start, end) dates = list([x.date() for x in dest.current.values()]) dates.sort() @@ -938,3 +944,389 @@ async def test_generational_delete_issue602(time: FakeTime, model: Model, dest: time.parse('2023-04-29T22:00:00+00:00'), time.parse('2023-04-30T22:00:00+00:00'), time.parse('2023-05-01T22:00:00+00:00')] + + +@pytest.mark.asyncio +async def test_generational_delete_issue809(time: FakeTime, model: Model, dest: HelperTestSource, source: HelperTestSource, simple_config: Config): + time.setTimeZone("America/Los_Angeles") + + simple_config.override(Setting.MAX_BACKUPS_IN_HA, 10) + simple_config.override(Setting.MAX_BACKUPS_IN_GOOGLE_DRIVE, 32) + simple_config.override(Setting.DAYS_BETWEEN_BACKUPS, 1) + simple_config.override(Setting.BACKUP_TIME_OF_DAY, "02:00") + simple_config.override(Setting.GENERATIONAL_DAYS, 7) + simple_config.override(Setting.GENERATIONAL_WEEKS, 4) + simple_config.override(Setting.GENERATIONAL_MONTHS, 6) + simple_config.override(Setting.GENERATIONAL_YEARS, 10) + + source.setMax(simple_config.get(Setting.MAX_BACKUPS_IN_HA)) + dest.setMax(simple_config.get(Setting.MAX_BACKUPS_IN_GOOGLE_DRIVE)) + model.reinitialize() + + start = time.local(2021, 1, 1) + end = time.local(2024, 5, 1) + await simulate_backups_timeline(time, model, start, end) + + dates = list([x.date() for x in dest.current.values()]) + dates.sort() + assert dates == [time.parse('2021-01-01 10:00:00+00:00'), + time.parse('2022-01-01 10:00:00+00:00'), + time.parse('2023-01-01 10:00:00+00:00'), + time.parse('2023-12-01 10:00:00+00:00'), + time.parse('2024-01-01 10:00:00+00:00'), + time.parse('2024-02-01 10:00:00+00:00'), + time.parse('2024-03-01 10:00:00+00:00'), + time.parse('2024-04-01 09:00:00+00:00'), + time.parse('2024-04-08 09:00:00+00:00'), + time.parse('2024-04-09 09:00:00+00:00'), + time.parse('2024-04-10 09:00:00+00:00'), + time.parse('2024-04-11 09:00:00+00:00'), + time.parse('2024-04-12 09:00:00+00:00'), + time.parse('2024-04-13 09:00:00+00:00'), + time.parse('2024-04-14 09:00:00+00:00'), + time.parse('2024-04-15 09:00:00+00:00'), + time.parse('2024-04-16 09:00:00+00:00'), + time.parse('2024-04-17 09:00:00+00:00'), + time.parse('2024-04-18 09:00:00+00:00'), + time.parse('2024-04-19 09:00:00+00:00'), + time.parse('2024-04-20 09:00:00+00:00'), + time.parse('2024-04-21 09:00:00+00:00'), + time.parse('2024-04-22 09:00:00+00:00'), + time.parse('2024-04-23 09:00:00+00:00'), + time.parse('2024-04-24 09:00:00+00:00'), + time.parse('2024-04-25 09:00:00+00:00'), + time.parse('2024-04-26 09:00:00+00:00'), + time.parse('2024-04-27 09:00:00+00:00'), + time.parse('2024-04-28 09:00:00+00:00'), + time.parse('2024-04-29 09:00:00+00:00'), + time.parse('2024-04-30 09:00:00+00:00'), + time.parse('2024-05-01 09:00:00+00:00')] + + +async def test_generational_delete_dst_start_rome(time: FakeTime, model: Model, dest: HelperTestSource, source: HelperTestSource, simple_config: Config): + time.setTimeZone("Europe/Rome") + + simple_config.override(Setting.MAX_BACKUPS_IN_HA, 1) + simple_config.override(Setting.MAX_BACKUPS_IN_GOOGLE_DRIVE, 8) + simple_config.override(Setting.DAYS_BETWEEN_BACKUPS, 1) + simple_config.override(Setting.BACKUP_TIME_OF_DAY, "02:00") + simple_config.override(Setting.GENERATIONAL_DAYS, 3) + simple_config.override(Setting.GENERATIONAL_WEEKS, 3) + simple_config.override(Setting.GENERATIONAL_MONTHS, 2) + simple_config.override(Setting.GENERATIONAL_DELETE_EARLY, True) + + source.setMax(simple_config.get(Setting.MAX_BACKUPS_IN_HA)) + dest.setMax(simple_config.get(Setting.MAX_BACKUPS_IN_GOOGLE_DRIVE)) + model.reinitialize() + + start = time.local(2023, 1, 1) + end = time.local(2023, 3, 25) + await simulate_backups_timeline(time, model, start, end) + + dates = list([x.date() for x in dest.current.values()]) + dates.sort() + assert dates == [time.local(2023, 2, 1, 2), + time.local(2023, 3, 1, 2), + time.local(2023, 3, 6, 2), + time.local(2023, 3, 13, 2), + time.local(2023, 3, 20, 2), + time.local(2023, 3, 23, 2), + time.local(2023, 3, 24, 2), + time.local(2023, 3, 25, 2)] + + assert time.now() == time.local(2023, 3, 25, 2) + assert model.nextBackup(time.now()) == time.local(2023, 3, 26, 2) + + for x in range(0, 24 * 15): + time.advance(minutes=15) + assert model.nextBackup(time.now()) == time.local(2023, 3, 26, 2) + + time.setNow(time.toUtc(time.local(2023, 3, 26, 2))) + await model.sync(time.now()) + assert max([x.date() for x in dest.current.values()]) == time.local(2023, 3, 26, 2) + + for x in range(0, 24 * 15): + time.advance(minutes=15) + assert model.nextBackup(time.now()) == time.local(2023, 3, 27, 2) + + +async def test_generational_delete_dst_start_los_angeles(time: FakeTime, model: Model, dest: HelperTestSource, source: HelperTestSource, simple_config: Config): + time.setTimeZone("America/Los_Angeles") + + simple_config.override(Setting.MAX_BACKUPS_IN_HA, 1) + simple_config.override(Setting.MAX_BACKUPS_IN_GOOGLE_DRIVE, 8) + simple_config.override(Setting.DAYS_BETWEEN_BACKUPS, 1) + simple_config.override(Setting.BACKUP_TIME_OF_DAY, "02:00") + simple_config.override(Setting.GENERATIONAL_DAYS, 3) + simple_config.override(Setting.GENERATIONAL_WEEKS, 3) + simple_config.override(Setting.GENERATIONAL_MONTHS, 2) + simple_config.override(Setting.GENERATIONAL_DELETE_EARLY, True) + + source.setMax(simple_config.get(Setting.MAX_BACKUPS_IN_HA)) + dest.setMax(simple_config.get(Setting.MAX_BACKUPS_IN_GOOGLE_DRIVE)) + model.reinitialize() + + start = time.local(2023, 1, 1) + end = time.local(2023, 3, 11) + await simulate_backups_timeline(time, model, start, end) + + dates = list([x.date() for x in dest.current.values()]) + dates.sort() + assert dates == [time.local(2023, 2, 1, 2), + time.local(2023, 2, 20, 2), + time.local(2023, 2, 27, 2), + time.local(2023, 3, 1, 2), + time.local(2023, 3, 6, 2), + time.local(2023, 3, 9, 2), + time.local(2023, 3, 10, 2), + time.local(2023, 3, 11, 2)] + + assert time.now() == time.local(2023, 3, 11, 2) + assert model.nextBackup(time.now()) == time.local(2023, 3, 12, 2) + + for x in range(0, 24 * 15): + time.advance(minutes=15) + assert model.nextBackup(time.now()) == time.local(2023, 3, 12, 2) + + time.setNow(time.toUtc(time.local(2023, 3, 12, 2))) + await model.sync(time.now()) + assert max([x.date() for x in dest.current.values()]) == time.local(2023, 3, 12, 2) + + for x in range(0, 24 * 15): + time.advance(minutes=15) + assert model.nextBackup(time.now()) == time.local(2023, 3, 13, 2) + + +async def test_generational_delete_dst_start_rome_2_30(time: FakeTime, model: Model, dest: HelperTestSource, source: HelperTestSource, simple_config: Config): + time.setTimeZone("Europe/Rome") + + simple_config.override(Setting.MAX_BACKUPS_IN_HA, 1) + simple_config.override(Setting.MAX_BACKUPS_IN_GOOGLE_DRIVE, 8) + simple_config.override(Setting.DAYS_BETWEEN_BACKUPS, 1) + simple_config.override(Setting.BACKUP_TIME_OF_DAY, "02:30") + simple_config.override(Setting.GENERATIONAL_DAYS, 3) + simple_config.override(Setting.GENERATIONAL_WEEKS, 3) + simple_config.override(Setting.GENERATIONAL_MONTHS, 2) + simple_config.override(Setting.GENERATIONAL_DELETE_EARLY, True) + + source.setMax(simple_config.get(Setting.MAX_BACKUPS_IN_HA)) + dest.setMax(simple_config.get(Setting.MAX_BACKUPS_IN_GOOGLE_DRIVE)) + model.reinitialize() + + start = time.local(2023, 1, 1) + end = time.local(2023, 3, 25) + await simulate_backups_timeline(time, model, start, end) + + dates = list([x.date() for x in dest.current.values()]) + dates.sort() + assert dates == [time.local(2023, 2, 1, 2, 30), + time.local(2023, 3, 1, 2, 30), + time.local(2023, 3, 6, 2, 30), + time.local(2023, 3, 13, 2, 30), + time.local(2023, 3, 20, 2, 30), + time.local(2023, 3, 23, 2, 30), + time.local(2023, 3, 24, 2, 30), + time.local(2023, 3, 25, 2, 30)] + + assert time.now() == time.local(2023, 3, 25, 2, 30) + assert model.nextBackup(time.now()) == time.local(2023, 3, 26, 2, 30) + + for x in range(0, 24 * 15): + time.advance(minutes=15) + assert model.nextBackup(time.now()) == time.local(2023, 3, 26, 2, 30) + + time.setNow(time.toUtc(time.local(2023, 3, 26, 2, 30))) + await model.sync(time.now()) + assert max([x.date() for x in dest.current.values()]) == time.local(2023, 3, 26, 2, 30) + + for x in range(0, 24 * 15): + time.advance(minutes=15) + assert model.nextBackup(time.now()) == time.local(2023, 3, 27, 2, 30) + + +async def test_generational_delete_dst_start_rome_3_00(time: FakeTime, model: Model, dest: HelperTestSource, source: HelperTestSource, simple_config: Config): + time.setTimeZone("Europe/Rome") + + simple_config.override(Setting.MAX_BACKUPS_IN_HA, 1) + simple_config.override(Setting.MAX_BACKUPS_IN_GOOGLE_DRIVE, 8) + simple_config.override(Setting.DAYS_BETWEEN_BACKUPS, 1) + simple_config.override(Setting.BACKUP_TIME_OF_DAY, "03:00") + simple_config.override(Setting.GENERATIONAL_DAYS, 3) + simple_config.override(Setting.GENERATIONAL_WEEKS, 3) + simple_config.override(Setting.GENERATIONAL_MONTHS, 2) + simple_config.override(Setting.GENERATIONAL_DELETE_EARLY, True) + + source.setMax(simple_config.get(Setting.MAX_BACKUPS_IN_HA)) + dest.setMax(simple_config.get(Setting.MAX_BACKUPS_IN_GOOGLE_DRIVE)) + model.reinitialize() + + start = time.local(2023, 1, 1) + end = time.local(2023, 3, 25) + await simulate_backups_timeline(time, model, start, end) + + dates = list([x.date() for x in dest.current.values()]) + dates.sort() + assert dates == [time.local(2023, 2, 1, 3), + time.local(2023, 3, 1, 3), + time.local(2023, 3, 6, 3), + time.local(2023, 3, 13, 3), + time.local(2023, 3, 20, 3), + time.local(2023, 3, 23, 3), + time.local(2023, 3, 24, 3), + time.local(2023, 3, 25, 3)] + + assert time.now() == time.local(2023, 3, 25, 3) + assert model.nextBackup(time.now()) == time.local(2023, 3, 26, 3) + + for x in range(0, 24 * 15): + time.advance(minutes=15) + assert model.nextBackup(time.now()) == time.local(2023, 3, 26, 3) + + time.setNow(time.toUtc(time.local(2023, 3, 26, 3))) + await model.sync(time.now()) + assert max([x.date() for x in dest.current.values()]) == time.local(2023, 3, 26, 3) + + for x in range(0, 24 * 15): + time.advance(minutes=15) + assert model.nextBackup(time.now()) == time.local(2023, 3, 27, 3) + + +async def test_generational_delete_dst_end_rome(time: FakeTime, model: Model, dest: HelperTestSource, source: HelperTestSource, simple_config: Config): + time.setTimeZone("Europe/Rome") + + simple_config.override(Setting.MAX_BACKUPS_IN_HA, 1) + simple_config.override(Setting.MAX_BACKUPS_IN_GOOGLE_DRIVE, 8) + simple_config.override(Setting.DAYS_BETWEEN_BACKUPS, 1) + simple_config.override(Setting.BACKUP_TIME_OF_DAY, "02:00") + simple_config.override(Setting.GENERATIONAL_DAYS, 3) + simple_config.override(Setting.GENERATIONAL_WEEKS, 3) + simple_config.override(Setting.GENERATIONAL_MONTHS, 2) + simple_config.override(Setting.GENERATIONAL_DELETE_EARLY, True) + + source.setMax(simple_config.get(Setting.MAX_BACKUPS_IN_HA)) + dest.setMax(simple_config.get(Setting.MAX_BACKUPS_IN_GOOGLE_DRIVE)) + model.reinitialize() + + start = time.local(2023, 6, 1) + end = time.local(2023, 10, 28) + await simulate_backups_timeline(time, model, start, end) + + dates = list([x.date() for x in dest.current.values()]) + dates.sort() + assert dates == [time.local(2023, 9, 1, 2), + time.local(2023, 10, 1, 2), + time.local(2023, 10, 9, 2), + time.local(2023, 10, 16, 2), + time.local(2023, 10, 23, 2), + time.local(2023, 10, 26, 2), + time.local(2023, 10, 27, 2), + time.local(2023, 10, 28, 2)] + + assert time.now() == time.local(2023, 10, 28, 2) + assert model.nextBackup(time.now()) == time.local(2023, 10, 29, 2) + + for x in range(0, 24 * 15): + time.advance(minutes=15) + assert model.nextBackup(time.now()) == time.local(2023, 10, 29, 2) + + time.setNow(time.toUtc(time.local(2023, 10, 29, 2))) + await model.sync(time.now()) + assert max([x.date() for x in dest.current.values()]) == time.local(2023, 10, 29, 2) + + for x in range(0, 24 * 15): + time.advance(minutes=15) + assert model.nextBackup(time.now()) == time.local(2023, 10, 30, 2) + + +async def test_generational_delete_dst_end_rome_2_30(time: FakeTime, model: Model, dest: HelperTestSource, source: HelperTestSource, simple_config: Config): + time.setTimeZone("Europe/Rome") + + simple_config.override(Setting.MAX_BACKUPS_IN_HA, 1) + simple_config.override(Setting.MAX_BACKUPS_IN_GOOGLE_DRIVE, 8) + simple_config.override(Setting.DAYS_BETWEEN_BACKUPS, 1) + simple_config.override(Setting.BACKUP_TIME_OF_DAY, "02:30") + simple_config.override(Setting.GENERATIONAL_DAYS, 3) + simple_config.override(Setting.GENERATIONAL_WEEKS, 3) + simple_config.override(Setting.GENERATIONAL_MONTHS, 2) + simple_config.override(Setting.GENERATIONAL_DELETE_EARLY, True) + + source.setMax(simple_config.get(Setting.MAX_BACKUPS_IN_HA)) + dest.setMax(simple_config.get(Setting.MAX_BACKUPS_IN_GOOGLE_DRIVE)) + model.reinitialize() + + start = time.local(2023, 6, 1) + end = time.local(2023, 10, 28) + await simulate_backups_timeline(time, model, start, end) + + dates = list([x.date() for x in dest.current.values()]) + dates.sort() + assert dates == [time.local(2023, 9, 1, 2, 30), + time.local(2023, 10, 1, 2, 30), + time.local(2023, 10, 9, 2, 30), + time.local(2023, 10, 16, 2, 30), + time.local(2023, 10, 23, 2, 30), + time.local(2023, 10, 26, 2, 30), + time.local(2023, 10, 27, 2, 30), + time.local(2023, 10, 28, 2, 30)] + + assert time.now() == time.local(2023, 10, 28, 2, 30) + assert model.nextBackup(time.now()) == time.local(2023, 10, 29, 2, 30) + + for x in range(0, 24 * 15): + time.advance(minutes=15) + assert model.nextBackup(time.now()) == time.local(2023, 10, 29, 2, 30) + + time.setNow(time.toUtc(time.local(2023, 10, 29, 2, 30))) + await model.sync(time.now()) + assert max([x.date() for x in dest.current.values()]) == time.local(2023, 10, 29, 2, 30) + + for x in range(0, 24 * 15): + time.advance(minutes=15) + assert model.nextBackup(time.now()) == time.local(2023, 10, 30, 2, 30) + + +async def test_generational_delete_dst_end_rome_3_00(time: FakeTime, model: Model, dest: HelperTestSource, source: HelperTestSource, simple_config: Config): + time.setTimeZone("Europe/Rome") + + simple_config.override(Setting.MAX_BACKUPS_IN_HA, 1) + simple_config.override(Setting.MAX_BACKUPS_IN_GOOGLE_DRIVE, 8) + simple_config.override(Setting.DAYS_BETWEEN_BACKUPS, 1) + simple_config.override(Setting.BACKUP_TIME_OF_DAY, "03:00") + simple_config.override(Setting.GENERATIONAL_DAYS, 3) + simple_config.override(Setting.GENERATIONAL_WEEKS, 3) + simple_config.override(Setting.GENERATIONAL_MONTHS, 2) + simple_config.override(Setting.GENERATIONAL_DELETE_EARLY, True) + + source.setMax(simple_config.get(Setting.MAX_BACKUPS_IN_HA)) + dest.setMax(simple_config.get(Setting.MAX_BACKUPS_IN_GOOGLE_DRIVE)) + model.reinitialize() + + start = time.local(2023, 6, 1) + end = time.local(2023, 10, 28) + await simulate_backups_timeline(time, model, start, end) + + dates = list([x.date() for x in dest.current.values()]) + dates.sort() + assert dates == [time.local(2023, 9, 1, 3), + time.local(2023, 10, 1, 3), + time.local(2023, 10, 9, 3), + time.local(2023, 10, 16, 3), + time.local(2023, 10, 23, 3), + time.local(2023, 10, 26, 3), + time.local(2023, 10, 27, 3), + time.local(2023, 10, 28, 3)] + + assert time.now() == time.local(2023, 10, 28, 3) + assert model.nextBackup(time.now()) == time.local(2023, 10, 29, 3) + + for x in range(0, 24 * 15): + time.advance(minutes=15) + assert model.nextBackup(time.now()) == time.local(2023, 10, 29, 3) + + time.setNow(time.toUtc(time.local(2023, 10, 29, 3))) + await model.sync(time.now()) + assert max([x.date() for x in dest.current.values()]) == time.local(2023, 10, 29, 3) + + for x in range(0, 24 * 15): + time.advance(minutes=15) + assert model.nextBackup(time.now()) == time.local(2023, 10, 30, 3) From 127190c1920a43ae51a15ea5e9f6cb29149b41b2 Mon Sep 17 00:00:00 2001 From: Stephen Beechen Date: Thu, 23 Mar 2023 12:20:59 -0600 Subject: [PATCH 6/7] Version bump --- hassio-google-drive-backup/CHANGELOG.md | 4 ++++ hassio-google-drive-backup/config.json | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/hassio-google-drive-backup/CHANGELOG.md b/hassio-google-drive-backup/CHANGELOG.md index a22ee61d..c4c16e60 100644 --- a/hassio-google-drive-backup/CHANGELOG.md +++ b/hassio-google-drive-backup/CHANGELOG.md @@ -1,3 +1,7 @@ +## v0.110.2 [2023-03-24] +- Fix a potential cause of SSL errors when communicating with Google Drive +- Fix a bug causing backups to be requested indefinitely if scheduled during DST transitions. + ## v0.110.1 [2023-01-09] - Adds some additional options for donating - Mitgigates SD card corruption by redundantly storing config files needed for addon startup. diff --git a/hassio-google-drive-backup/config.json b/hassio-google-drive-backup/config.json index 71d85ce0..b699d225 100644 --- a/hassio-google-drive-backup/config.json +++ b/hassio-google-drive-backup/config.json @@ -1,6 +1,6 @@ { "name": "Home Assistant Google Drive Backup", - "version": "0.110.1", + "version": "0.110.2", "slug": "hassio_google_drive_backup", "description": "Automatically manage backups between Home Assistant and Google Drive", "arch": ["armhf", "armv7", "aarch64", "amd64", "i386"], From 414f11e288b6ab979da3d6928fbf73c079f33de9 Mon Sep 17 00:00:00 2001 From: Stephen Beechen Date: Thu, 23 Mar 2023 20:16:08 -0600 Subject: [PATCH 7/7] Squashed commit of the following: commit 3b5936e59769000823118edc2d4ebcc8f9e90200 Merge: 1711d2d 823f607 Author: Stephen Beechen Date: Thu Jan 19 13:31:24 2023 -0700 Merge branch 'master' of https://github.com/sabeechen/hassio-google-drive-backup commit 1711d2de86cd0621131fe3208c25d1cd329f0d7f Author: Stephen Beechen Date: Thu Jan 19 13:31:19 2023 -0700 Stop using Google's deprecated auth APIs commit 823f607e5648b59b9eca210bc6e200bbbe7a3221 Merge: 20fe0f8 9c3c63c Author: Stephen Beechen Date: Thu Jan 19 08:28:22 2023 -0700 Merge pull request #787 from sabeechen/dependabot/github_actions/frenck/action-addon-linter-2.11 Bump frenck/action-addon-linter from 2.10 to 2.11 commit 9c3c63c93f8d786853feb5d88f044f77a806025f Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu Jan 19 06:03:35 2023 +0000 Bump frenck/action-addon-linter from 2.10 to 2.11 Bumps [frenck/action-addon-linter](https://github.com/frenck/action-addon-linter) from 2.10 to 2.11. - [Release notes](https://github.com/frenck/action-addon-linter/releases) - [Commits](https://github.com/frenck/action-addon-linter/compare/v2.10...v2.11) --- updated-dependencies: - dependency-name: frenck/action-addon-linter dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] commit 20fe0f83b1b0b0bacb0620d3361e228249672551 Merge: df74524 a04ae21 Author: Stephen Beechen Date: Wed Jan 18 07:07:28 2023 -0700 Merge pull request #785 from LucaDiba/fix-error-message-link Fix link to FAQ commit a04ae211b1606c86933f3fc55fc0311b4560dd35 Author: Luca Dibattista <34377738+LucaDiba@users.noreply.github.com> Date: Wed Jan 18 14:26:02 2023 +0100 Fix link to FAQ commit df745248ac7e1a106cd308678933bc0b062c3a3c Author: Stephen Beechen Date: Mon Jan 9 17:04:43 2023 -0700 Release v0.110.1 commit a9451f50a7d17ad7f67e68a7799ce2887d4c1b93 Author: Stephen Beechen Date: Mon Jan 9 16:19:24 2023 -0700 Emergency revert commit afc596a87f1d46a4fefe6d13ff24c60f1090a075 Merge: 4e8466b ed70638 Author: Stephen Beechen Date: Mon Jan 9 16:16:24 2023 -0700 Merge branch 'dev' commit 4e8466b3bc80a7aa4d51809bf1d85945e8cf900f Merge: 2d12541 4352266 Author: Stephen Beechen Date: Sat Dec 3 12:14:54 2022 -0700 Merge pull request #761 from sabeechen/dev Dev --- .devcontainer/Dockerfile | 3 + .github/workflows/lint.yaml | 2 +- .../backup/static/layouts/base-server.jinja2 | 1 - .../layouts/partials/error-messages.jinja2 | 2 +- .../backup/static/picker.jinja2 | 72 +++++++++++-------- .../requirements-server.txt | 3 +- 6 files changed, 50 insertions(+), 33 deletions(-) diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index f5c20f36..35c44e4e 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -3,5 +3,8 @@ FROM python:3.9-buster WORKDIR /usr/src/install RUN apt-get update RUN apt-get install fping +# install gcloud api +RUN echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] http://packages.cloud.google.com/apt cloud-sdk main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list && curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key --keyring /usr/share/keyrings/cloud.google.gpg add - && apt-get update -y && apt-get install google-cloud-cli -y + COPY requirements-dev.txt ./ RUN pip install --no-cache-dir -r requirements-dev.txt \ No newline at end of file diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml index e5b5b39e..b1f54200 100644 --- a/.github/workflows/lint.yaml +++ b/.github/workflows/lint.yaml @@ -8,6 +8,6 @@ jobs: - name: ⤵️ Check out code from GitHub uses: actions/checkout@v3 - name: 🚀 Run Home Assistant Add-on Linter - uses: frenck/action-addon-linter@v2.10 + uses: frenck/action-addon-linter@v2.11 with: path: "./hassio-google-drive-backup" diff --git a/hassio-google-drive-backup/backup/static/layouts/base-server.jinja2 b/hassio-google-drive-backup/backup/static/layouts/base-server.jinja2 index 744ac800..411050cb 100644 --- a/hassio-google-drive-backup/backup/static/layouts/base-server.jinja2 +++ b/hassio-google-drive-backup/backup/static/layouts/base-server.jinja2 @@ -14,7 +14,6 @@ - diff --git a/hassio-google-drive-backup/backup/static/layouts/partials/error-messages.jinja2 b/hassio-google-drive-backup/backup/static/layouts/partials/error-messages.jinja2 index a2856f7b..8980f7b8 100644 --- a/hassio-google-drive-backup/backup/static/layouts/partials/error-messages.jinja2 +++ b/hassio-google-drive-backup/backup/static/layouts/partials/error-messages.jinja2 @@ -124,7 +124,7 @@ Buy more storage from Google
  • - Make your backups smaller (see this tip) + Make your backups smaller (see this tip)
  • {% endcall %} diff --git a/hassio-google-drive-backup/backup/static/picker.jinja2 b/hassio-google-drive-backup/backup/static/picker.jinja2 index 43a911f2..c8cc174e 100644 --- a/hassio-google-drive-backup/backup/static/picker.jinja2 +++ b/hassio-google-drive-backup/backup/static/picker.jinja2 @@ -3,32 +3,53 @@ {% block head %} {{ super() }} + + {% endblock %} {% block content %} @@ -124,14 +139,13 @@
    - Opening folder picker... -

    A window should be opening now asking you to log into your Google account and then to select a folder from your Google Drive. + Backup Folder Picker +

    Use the button below to log into your google account and select a folder from your Google Drive.

      -
    • If not, your popup blocker probably hates this page. Click the button below to open the dialog.
    • -
    • After you select a folder, you'll be redirected back to the addon.
    • -
    • If the folder you want to use doesn't exist yet, navigate to Google Drive and create it there first. The folder picker doesn't let you create folders.
    • -
    • The folder you select is where the addon will store backups from now on. This should be what you want.
    • -
    • If you'd like to know why this page has to be hosted on an external domain, click here.
    • +
    • The button opens a pop-up that creates a session with Google's services and then redirects back here. If your browser has extensions that prevent that kind of thing (eg aggressive pop-up blockers, cross-site cookie restriction, Firefox containers, etc) then you might need to disable them to make the authorization work. This is just how Google does authorization.
    • +
    • If the folder you want to use doesn't exist yet, navigate to Google Drive and create it there first. The folder picker doesn't let you create new folders.
    • +
    • The folder you select is where the addon will store backups from now on. This should be what you want if you're on this page.
    • +
    • If you'd like to know why this page has to be hosted on an external domain, click here to learn more.

    diff --git a/hassio-google-drive-backup/requirements-server.txt b/hassio-google-drive-backup/requirements-server.txt index 3788d91a..2104e540 100644 --- a/hassio-google-drive-backup/requirements-server.txt +++ b/hassio-google-drive-backup/requirements-server.txt @@ -16,4 +16,5 @@ watchdog aiohttp-jinja2 firebase-admin pytz -tzlocal \ No newline at end of file +tzlocal +aioping