Update HACS

This commit is contained in:
root
2022-05-23 17:48:47 -07:00
parent 3bdb8638a8
commit 1d83dd0c31
163 changed files with 862 additions and 11844 deletions

View File

@@ -1,33 +1,52 @@
"""Data handler for HACS."""
import asyncio
from datetime import datetime
import os
from homeassistant.core import callback
from homeassistant.util import json as json_util
from ..base import HacsBase
from ..enums import HacsGitHubRepo
from ..repositories.base import HacsManifest, HacsRepository
from ..enums import HacsDispatchEvent, HacsGitHubRepo
from ..repositories.base import TOPIC_FILTER, HacsManifest, HacsRepository
from .logger import get_hacs_logger
from .path import is_safe
from .store import (
async_load_from_store,
async_save_to_store,
async_save_to_store_default_encoder,
get_store_for_key,
from .store import async_load_from_store, async_save_to_store
DEFAULT_BASE_REPOSITORY_DATA = (
("authors", []),
("category", ""),
("description", ""),
("domain", None),
("downloads", 0),
("etag_repository", None),
("full_name", ""),
("last_updated", 0),
("hide", False),
("new", False),
("stargazers_count", 0),
("topics", []),
)
def update_repository_from_storage(repository, storage_data):
"""Merge in data from storage into the repo data."""
repository.data.memorize_storage(storage_data)
repository.data.update_data(storage_data)
if repository.data.installed:
return
repository.logger.debug("%s Should be installed but is not... Fixing that!", repository.string)
repository.data.installed = True
DEFAULT_EXTENDED_REPOSITORY_DATA = (
("archived", False),
("config_flow", False),
("default_branch", None),
("description", ""),
("first_install", False),
("installed_commit", None),
("installed", False),
("last_commit", None),
("last_version", None),
("manifest_name", None),
("open_issues", 0),
("published_tags", []),
("pushed_at", ""),
("releases", False),
("selected_tag", None),
("show_beta", False),
("stargazers_count", 0),
("topics", []),
)
class HacsData:
@@ -39,6 +58,10 @@ class HacsData:
self.hacs = hacs
self.content = {}
async def async_force_write(self, _=None):
"""Force write."""
await self.async_write(force=True)
async def async_write(self, force: bool = False) -> None:
"""Write content to the store files."""
if not force and self.hacs.system.disabled:
@@ -51,76 +74,45 @@ class HacsData:
self.hacs.hass,
"hacs",
{
"view": self.hacs.configuration.frontend_mode,
"compact": self.hacs.configuration.frontend_compact,
"onboarding_done": self.hacs.configuration.onboarding_done,
"archived_repositories": self.hacs.common.archived_repositories,
"renamed_repositories": self.hacs.common.renamed_repositories,
"ignored_repositories": self.hacs.common.ignored_repositories,
},
)
await self._async_store_content_and_repos()
for event in ("hacs/repository", "hacs/config"):
self.hacs.hass.bus.async_fire(event, {})
async def _async_store_content_and_repos(self): # bb: ignore
async def _async_store_content_and_repos(self, _=None): # bb: ignore
"""Store the main repos file and each repo that is out of date."""
# Repositories
self.content = {}
# Not run concurrently since this is bound by disk I/O
for repository in self.hacs.repositories.list_all:
await self.async_store_repository_data(repository)
if repository.data.category in self.hacs.common.categories:
self.async_store_repository_data(repository)
await async_save_to_store(self.hacs.hass, "repositories", self.content)
for event in (HacsDispatchEvent.REPOSITORY, HacsDispatchEvent.CONFIG):
self.hacs.async_dispatch(event, {})
@callback
def async_store_repository_data(self, repository: HacsRepository) -> dict:
"""Store the repository data."""
data = {"repository_manifest": repository.repository_manifest.manifest}
for key, default_value in DEFAULT_BASE_REPOSITORY_DATA:
if (value := repository.data.__getattribute__(key)) != default_value:
data[key] = value
if repository.data.installed:
for key, default_value in DEFAULT_EXTENDED_REPOSITORY_DATA:
if (value := repository.data.__getattribute__(key)) != default_value:
data[key] = value
data["version_installed"] = repository.data.installed_version
async def async_store_repository_data(self, repository: HacsRepository):
repository_manifest = repository.repository_manifest.manifest
data = {
"authors": repository.data.authors,
"category": repository.data.category,
"description": repository.data.description,
"domain": repository.data.domain,
"downloads": repository.data.downloads,
"etag_repository": repository.data.etag_repository,
"full_name": repository.data.full_name,
"first_install": repository.status.first_install,
"installed_commit": repository.data.installed_commit,
"installed": repository.data.installed,
"last_commit": repository.data.last_commit,
"last_release_tag": repository.data.last_version,
"last_updated": repository.data.last_updated,
"name": repository.data.name,
"new": repository.data.new,
"repository_manifest": repository_manifest,
"releases": repository.data.releases,
"selected_tag": repository.data.selected_tag,
"show_beta": repository.data.show_beta,
"stars": repository.data.stargazers_count,
"topics": repository.data.topics,
"version_installed": repository.data.installed_version,
}
if repository.data.last_fetched:
data["last_fetched"] = repository.data.last_fetched.timestamp()
self.content[str(repository.data.id)] = data
if (
repository.data.installed
and (repository.data.installed_commit or repository.data.installed_version)
and (export := repository.data.export_data())
):
# export_data will return `None` if the memorized
# data is already up to date which allows us to avoid
# writing data that is already up to date or generating
# executor jobs to check the data on disk to see
# if a write is needed.
await async_save_to_store_default_encoder(
self.hacs.hass,
f"hacs/{repository.data.id}.hacs",
export,
)
repository.data.memorize_storage(export)
async def restore(self):
"""Restore saved data."""
self.hacs.status.new = False
@@ -139,9 +131,6 @@ class HacsData:
self.logger.info("<HacsData restore> Restore started")
# Hacs
self.hacs.configuration.frontend_mode = hacs.get("view", "Grid")
self.hacs.configuration.frontend_compact = hacs.get("compact", False)
self.hacs.configuration.onboarding_done = hacs.get("onboarding_done", False)
self.hacs.common.archived_repositories = []
self.hacs.common.ignored_repositories = []
self.hacs.common.renamed_repositories = {}
@@ -163,9 +152,6 @@ class HacsData:
if entry not in self.hacs.common.ignored_repositories:
self.hacs.common.ignored_repositories.append(entry)
hass = self.hacs.hass
stores = {}
try:
await self.register_unknown_repositories(repositories)
@@ -176,25 +162,12 @@ class HacsData:
"<HacsData restore> Found repository with ID %s - %s", entry, repo_data
)
continue
if self.async_restore_repository(entry, repo_data):
stores[entry] = get_store_for_key(hass, f"hacs/{entry}.hacs")
self.async_restore_repository(entry, repo_data)
def _load_from_storage():
for entry, store in stores.items():
if os.path.exists(store.path) and (data := store.load()):
if (full_name := data.get("full_name")) and (
renamed := self.hacs.common.renamed_repositories.get(full_name)
) is not None:
data["full_name"] = renamed
update_repository_from_storage(
self.hacs.repositories.get_by_id(entry), data
)
await hass.async_add_executor_job(_load_from_storage)
self.logger.info("<HacsData restore> Restore done")
except BaseException as exception: # lgtm [py/catch-base-exception] pylint: disable=broad-except
self.logger.critical(
f"<HacsData restore> [{exception}] Restore Failed!", exc_info=exception
"<HacsData restore> [%s] Restore Failed!", exception, exc_info=exception
)
return False
return True
@@ -216,31 +189,37 @@ class HacsData:
@callback
def async_restore_repository(self, entry, repository_data):
"""Restore repository."""
full_name = repository_data["full_name"]
if not (repository := self.hacs.repositories.get_by_full_name(full_name)):
self.logger.error(f"<HacsData restore> Did not find {full_name} ({entry})")
return False
self.logger.error("<HacsData restore> Did not find %s (%s)", full_name, entry)
return
# Restore repository attributes
self.hacs.repositories.set_repository_id(repository, entry)
repository.data.authors = repository_data.get("authors", [])
repository.data.description = repository_data.get("description")
repository.releases.last_release_object_downloads = repository_data.get("downloads")
repository.data.last_updated = repository_data.get("last_updated")
repository.data.description = repository_data.get("description", "")
repository.data.downloads = repository_data.get("downloads", 0)
repository.data.last_updated = repository_data.get("last_updated", 0)
repository.data.etag_repository = repository_data.get("etag_repository")
repository.data.topics = repository_data.get("topics", [])
repository.data.domain = repository_data.get("domain", None)
repository.data.stargazers_count = repository_data.get("stars", 0)
repository.data.topics = [
topic for topic in repository_data.get("topics", []) if topic not in TOPIC_FILTER
]
repository.data.domain = repository_data.get("domain")
repository.data.stargazers_count = repository_data.get(
"stargazers_count"
) or repository_data.get("stars", 0)
repository.releases.last_release = repository_data.get("last_release_tag")
repository.data.releases = repository_data.get("releases")
repository.data.releases = repository_data.get("releases", False)
repository.data.hide = repository_data.get("hide", False)
repository.data.installed = repository_data.get("installed", False)
repository.data.new = repository_data.get("new", True)
repository.data.new = repository_data.get("new", False)
repository.data.selected_tag = repository_data.get("selected_tag")
repository.data.show_beta = repository_data.get("show_beta", False)
repository.data.last_version = repository_data.get("last_release_tag")
repository.data.last_commit = repository_data.get("last_commit")
repository.data.installed_version = repository_data.get("version_installed")
repository.data.installed_commit = repository_data.get("installed_commit")
repository.data.manifest_name = repository_data.get("manifest_name")
if last_fetched := repository_data.get("last_fetched"):
repository.data.last_fetched = datetime.fromtimestamp(last_fetched)
@@ -254,10 +233,8 @@ class HacsData:
repository.content.path.local = repository.localpath
if repository.data.installed:
repository.status.first_install = False
repository.data.first_install = False
if full_name == HacsGitHubRepo.INTEGRATION:
repository.data.installed_version = self.hacs.version
repository.data.installed = True
return True

View File

@@ -13,7 +13,7 @@ if TYPE_CHECKING:
def concurrent(
concurrenttasks: int = DEFAULT_CONCURRENT_TASKS,
backoff_time=DEFAULT_CONCURRENT_BACKOFF_TIME,
backoff_time: int = DEFAULT_CONCURRENT_BACKOFF_TIME,
) -> Coroutine[Any, Any, None]:
"""Return a modified function."""

File diff suppressed because one or more lines are too long

View File

@@ -42,7 +42,7 @@ class QueueManager:
async def execute(self, number_of_tasks: int | None = None) -> None:
"""Execute the tasks in the queue."""
if self.running:
_LOGGER.debug("<QueueManager> Execution is allreay running")
_LOGGER.debug("<QueueManager> Execution is already running")
raise HacsExecutionStillInProgress
if len(self.queue) == 0:
_LOGGER.debug("<QueueManager> The queue is empty")
@@ -65,7 +65,10 @@ class QueueManager:
_LOGGER.debug("<QueueManager> Starting queue execution for %s tasks", len(local_queue))
start = time.time()
await asyncio.gather(*local_queue)
result = await asyncio.gather(*local_queue, return_exceptions=True)
for entry in result:
if isinstance(entry, Exception):
_LOGGER.error("<QueueManager> %s", entry)
end = time.time() - start
_LOGGER.debug(

View File

@@ -36,7 +36,7 @@ def get_store_key(key):
def _get_store_for_key(hass, key, encoder):
"""Create a Store object for the key."""
return HACSStore(hass, VERSION_STORAGE, get_store_key(key), encoder=encoder)
return HACSStore(hass, VERSION_STORAGE, get_store_key(key), encoder=encoder, atomic_writes=True)
def get_store_for_key(hass, key):
@@ -49,16 +49,6 @@ async def async_load_from_store(hass, key):
return await get_store_for_key(hass, key).async_load() or {}
async def async_save_to_store_default_encoder(hass, key, data):
"""Generate store json safe data to the filesystem.
The data is expected to be encodable with the default
python json encoder. It should have already been passed through
JSONEncoder if needed.
"""
await _get_store_for_key(hass, key, None).async_save(data)
async def async_save_to_store(hass, key, data):
"""Generate dynamic data to store and save it to the filesystem.

View File

@@ -2,7 +2,6 @@
from __future__ import annotations
from functools import lru_cache
from typing import TYPE_CHECKING
from awesomeversion import (
AwesomeVersion,
@@ -10,12 +9,9 @@ from awesomeversion import (
AwesomeVersionStrategy,
)
if TYPE_CHECKING:
from ..repositories.base import HacsRepository
@lru_cache(maxsize=1024)
def version_left_higher_then_right(left: str, right: str) -> bool:
def version_left_higher_then_right(left: str, right: str) -> bool | None:
"""Return a bool if source is newer than target, will also be true if identical."""
try:
left_version = AwesomeVersion(left)
@@ -37,22 +33,3 @@ def version_left_higher_or_equal_then_right(left: str, right: str) -> bool:
return True
return version_left_higher_then_right(left, right)
def version_to_download(repository: HacsRepository) -> str:
"""Determine which version to download."""
if repository.data.last_version is not None:
if repository.data.selected_tag is not None:
if repository.data.selected_tag == repository.data.last_version:
repository.data.selected_tag = None
return repository.data.last_version
return repository.data.selected_tag
return repository.data.last_version
if repository.data.selected_tag is not None:
if repository.data.selected_tag == repository.data.default_branch:
return repository.data.default_branch
if repository.data.selected_tag in repository.data.published_tags:
return repository.data.selected_tag
return repository.data.default_branch or "main"