maint: Bump HACS to 2.0.0
This commit is contained in:
@@ -1,4 +1,5 @@
|
||||
"""Backup."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
@@ -27,7 +28,7 @@ class Backup:
|
||||
backup_path: str = DEFAULT_BACKUP_PATH,
|
||||
repository: HacsRepository | None = None,
|
||||
) -> None:
|
||||
"""initialize."""
|
||||
"""Initialize."""
|
||||
self.hacs = hacs
|
||||
self.repository = repository
|
||||
self.local_path = local_path or repository.content.path.local
|
||||
@@ -107,33 +108,3 @@ class Backup:
|
||||
while os.path.exists(self.backup_path):
|
||||
sleep(0.1)
|
||||
self.hacs.log.debug("Backup dir %s cleared", self.backup_path)
|
||||
|
||||
|
||||
class BackupNetDaemon(Backup):
|
||||
"""BackupNetDaemon."""
|
||||
|
||||
def create(self) -> None:
|
||||
"""Create a backup in /tmp"""
|
||||
if not self._init_backup_dir():
|
||||
return
|
||||
|
||||
for filename in os.listdir(self.repository.content.path.local):
|
||||
if not filename.endswith(".yaml"):
|
||||
continue
|
||||
|
||||
source_file_name = f"{self.repository.content.path.local}/{filename}"
|
||||
target_file_name = f"{self.backup_path}/{filename}"
|
||||
shutil.copyfile(source_file_name, target_file_name)
|
||||
|
||||
def restore(self) -> None:
|
||||
"""Create a backup in /tmp"""
|
||||
if not os.path.exists(self.backup_path):
|
||||
return
|
||||
|
||||
for filename in os.listdir(self.backup_path):
|
||||
if not filename.endswith(".yaml"):
|
||||
continue
|
||||
|
||||
source_file_name = f"{self.backup_path}/{filename}"
|
||||
target_file_name = f"{self.repository.content.path.local}/{filename}"
|
||||
shutil.copyfile(source_file_name, target_file_name)
|
||||
|
||||
@@ -1,74 +1,9 @@
|
||||
"""HACS Configuration Schemas."""
|
||||
# pylint: disable=dangerous-default-value
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import LOCALE
|
||||
|
||||
# Configuration:
|
||||
TOKEN = "token"
|
||||
SIDEPANEL_TITLE = "sidepanel_title"
|
||||
SIDEPANEL_ICON = "sidepanel_icon"
|
||||
FRONTEND_REPO = "frontend_repo"
|
||||
FRONTEND_REPO_URL = "frontend_repo_url"
|
||||
APPDAEMON = "appdaemon"
|
||||
NETDAEMON = "netdaemon"
|
||||
|
||||
# Options:
|
||||
COUNTRY = "country"
|
||||
DEBUG = "debug"
|
||||
RELEASE_LIMIT = "release_limit"
|
||||
EXPERIMENTAL = "experimental"
|
||||
|
||||
# Config group
|
||||
PATH_OR_URL = "frontend_repo_path_or_url"
|
||||
|
||||
|
||||
def hacs_base_config_schema(config: dict = {}) -> dict:
|
||||
"""Return a shcema configuration dict for HACS."""
|
||||
if not config:
|
||||
config = {
|
||||
TOKEN: "xxxxxxxxxxxxxxxxxxxxxxxxxxx",
|
||||
}
|
||||
return {
|
||||
vol.Required(TOKEN, default=config.get(TOKEN)): str,
|
||||
}
|
||||
|
||||
|
||||
def hacs_config_option_schema(options: dict = {}) -> dict:
|
||||
"""Return a shcema for HACS configuration options."""
|
||||
if not options:
|
||||
options = {
|
||||
APPDAEMON: False,
|
||||
COUNTRY: "ALL",
|
||||
DEBUG: False,
|
||||
EXPERIMENTAL: False,
|
||||
NETDAEMON: False,
|
||||
RELEASE_LIMIT: 5,
|
||||
SIDEPANEL_ICON: "hacs:hacs",
|
||||
SIDEPANEL_TITLE: "HACS",
|
||||
FRONTEND_REPO: "",
|
||||
FRONTEND_REPO_URL: "",
|
||||
}
|
||||
return {
|
||||
vol.Optional(SIDEPANEL_TITLE, default=options.get(SIDEPANEL_TITLE)): str,
|
||||
vol.Optional(SIDEPANEL_ICON, default=options.get(SIDEPANEL_ICON)): str,
|
||||
vol.Optional(RELEASE_LIMIT, default=options.get(RELEASE_LIMIT)): int,
|
||||
vol.Optional(COUNTRY, default=options.get(COUNTRY)): vol.In(LOCALE),
|
||||
vol.Optional(APPDAEMON, default=options.get(APPDAEMON)): bool,
|
||||
vol.Optional(NETDAEMON, default=options.get(NETDAEMON)): bool,
|
||||
vol.Optional(DEBUG, default=options.get(DEBUG)): bool,
|
||||
vol.Optional(EXPERIMENTAL, default=options.get(EXPERIMENTAL)): bool,
|
||||
vol.Exclusive(FRONTEND_REPO, PATH_OR_URL): str,
|
||||
vol.Exclusive(FRONTEND_REPO_URL, PATH_OR_URL): str,
|
||||
}
|
||||
|
||||
|
||||
def hacs_config_combined() -> dict:
|
||||
"""Combine the configuration options."""
|
||||
base = hacs_base_config_schema()
|
||||
options = hacs_config_option_schema()
|
||||
|
||||
for option in options:
|
||||
base[option] = options[option]
|
||||
|
||||
return base
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
"""Data handler for HACS."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.util import json as json_util
|
||||
|
||||
from ..base import HacsBase
|
||||
from ..const import HACS_REPOSITORY_ID
|
||||
@@ -47,6 +47,7 @@ EXPORTED_DOWNLOADED_REPOSITORY_DATA = EXPORTED_REPOSITORY_DATA + (
|
||||
("last_version", None),
|
||||
("manifest_name", None),
|
||||
("open_issues", 0),
|
||||
("prerelease", None),
|
||||
("published_tags", []),
|
||||
("releases", False),
|
||||
("selected_tag", None),
|
||||
@@ -84,8 +85,7 @@ class HacsData:
|
||||
"ignored_repositories": self.hacs.common.ignored_repositories,
|
||||
},
|
||||
)
|
||||
if self.hacs.configuration.experimental:
|
||||
await self._async_store_experimental_content_and_repos()
|
||||
await self._async_store_experimental_content_and_repos()
|
||||
await self._async_store_content_and_repos()
|
||||
|
||||
async def _async_store_content_and_repos(self, _=None): # bb: ignore
|
||||
@@ -100,7 +100,7 @@ class HacsData:
|
||||
for event in (HacsDispatchEvent.REPOSITORY, HacsDispatchEvent.CONFIG):
|
||||
self.hacs.async_dispatch(event, {})
|
||||
|
||||
async def _async_store_experimental_content_and_repos(self, _=None): # bb: ignore
|
||||
async def _async_store_experimental_content_and_repos(self, _=None):
|
||||
"""Store the main repos file and each repo that is out of date."""
|
||||
# Repositories
|
||||
self.content = {}
|
||||
@@ -165,29 +165,16 @@ class HacsData:
|
||||
pass
|
||||
|
||||
try:
|
||||
data = (
|
||||
await async_load_from_store(
|
||||
self.hacs.hass,
|
||||
"data" if self.hacs.configuration.experimental else "repositories",
|
||||
)
|
||||
or {}
|
||||
)
|
||||
if data and self.hacs.configuration.experimental:
|
||||
repositories = await async_load_from_store(self.hacs.hass, "repositories")
|
||||
if not repositories and (data := await async_load_from_store(self.hacs.hass, "data")):
|
||||
for category, entries in data.get("repositories", {}).items():
|
||||
for repository in entries:
|
||||
repositories[repository["id"]] = {"category": category, **repository}
|
||||
else:
|
||||
repositories = (
|
||||
data or await async_load_from_store(self.hacs.hass, "repositories") or {}
|
||||
)
|
||||
|
||||
except HomeAssistantError as exception:
|
||||
self.hacs.log.error(
|
||||
"Could not read %s, restore the file from a backup - %s",
|
||||
self.hacs.hass.config.path(
|
||||
".storage/hacs.data"
|
||||
if self.hacs.configuration.experimental
|
||||
else ".storage/hacs.repositories"
|
||||
),
|
||||
self.hacs.hass.config.path(".storage/hacs.data"),
|
||||
exception,
|
||||
)
|
||||
self.hacs.disable_hacs(HacsDisabledReason.RESTORE)
|
||||
@@ -196,13 +183,7 @@ class HacsData:
|
||||
if not hacs and not repositories:
|
||||
# Assume new install
|
||||
self.hacs.status.new = True
|
||||
if self.hacs.configuration.experimental:
|
||||
return True
|
||||
self.logger.info("<HacsData restore> Loading base repository information")
|
||||
repositories = await self.hacs.hass.async_add_executor_job(
|
||||
json_util.load_json,
|
||||
f"{self.hacs.core.config_path}/custom_components/hacs/utils/default.repositories",
|
||||
)
|
||||
return True
|
||||
|
||||
self.logger.info("<HacsData restore> Restore started")
|
||||
|
||||
@@ -242,7 +223,8 @@ class HacsData:
|
||||
|
||||
self.logger.info("<HacsData restore> Restore done")
|
||||
except (
|
||||
BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
# lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
BaseException
|
||||
) as exception:
|
||||
self.logger.critical(
|
||||
"<HacsData restore> [%s] Restore Failed!", exception, exc_info=exception
|
||||
@@ -250,22 +232,28 @@ class HacsData:
|
||||
return False
|
||||
return True
|
||||
|
||||
async def register_unknown_repositories(self, repositories, category: str | None = None):
|
||||
async def register_unknown_repositories(
|
||||
self, repositories: dict[str, dict[str, Any]], category: str | None = None
|
||||
):
|
||||
"""Registry any unknown repositories."""
|
||||
register_tasks = [
|
||||
self.hacs.async_register_repository(
|
||||
for repo_idx, (entry, repo_data) in enumerate(repositories.items()):
|
||||
# async_register_repository is awaited in a loop
|
||||
# since its unlikely to ever suspend at startup
|
||||
if (
|
||||
entry == "0"
|
||||
or repo_data.get("category", category) is None
|
||||
or self.hacs.repositories.is_registered(repository_id=entry)
|
||||
):
|
||||
continue
|
||||
await self.hacs.async_register_repository(
|
||||
repository_full_name=repo_data["full_name"],
|
||||
category=repo_data.get("category", category),
|
||||
check=False,
|
||||
repository_id=entry,
|
||||
)
|
||||
for entry, repo_data in repositories.items()
|
||||
if entry != "0"
|
||||
and not self.hacs.repositories.is_registered(repository_id=entry)
|
||||
and repo_data.get("category", category) is not None
|
||||
]
|
||||
if register_tasks:
|
||||
await asyncio.gather(*register_tasks)
|
||||
if repo_idx % 100 == 0:
|
||||
# yield to avoid blocking the event loop
|
||||
await asyncio.sleep(0)
|
||||
|
||||
@callback
|
||||
def async_restore_repository(self, entry: str, repository_data: dict[str, Any]):
|
||||
@@ -302,18 +290,22 @@ class HacsData:
|
||||
repository.data.selected_tag = repository_data.get("selected_tag")
|
||||
repository.data.show_beta = repository_data.get("show_beta", False)
|
||||
repository.data.last_version = repository_data.get("last_version")
|
||||
repository.data.prerelease = repository_data.get("prerelease")
|
||||
repository.data.last_commit = repository_data.get("last_commit")
|
||||
repository.data.installed_version = repository_data.get("version_installed")
|
||||
repository.data.installed_commit = repository_data.get("installed_commit")
|
||||
repository.data.manifest_name = repository_data.get("manifest_name")
|
||||
|
||||
if last_fetched := repository_data.get("last_fetched"):
|
||||
repository.data.last_fetched = datetime.fromtimestamp(last_fetched)
|
||||
repository.data.last_fetched = datetime.fromtimestamp(last_fetched, UTC)
|
||||
|
||||
repository.repository_manifest = HacsManifest.from_dict(
|
||||
repository_data.get("manifest") or repository_data.get("repository_manifest") or {}
|
||||
)
|
||||
|
||||
if repository.data.prerelease == repository.data.last_version:
|
||||
repository.data.prerelease = None
|
||||
|
||||
if repository.localpath is not None and is_safe(self.hacs, repository.localpath):
|
||||
# Set local path
|
||||
repository.content.path.local = repository.localpath
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Util to decode content from the github API."""
|
||||
|
||||
from base64 import b64decode
|
||||
|
||||
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
"""HACS Decorators."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Coroutine
|
||||
from functools import wraps
|
||||
from typing import TYPE_CHECKING, Any, Coroutine
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from ..const import DEFAULT_CONCURRENT_BACKOFF_TIME, DEFAULT_CONCURRENT_TASKS
|
||||
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,4 +1,5 @@
|
||||
"""Filter functions."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Custom logger for HACS."""
|
||||
|
||||
import logging
|
||||
|
||||
from ..const import PACKAGE_NAME
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
"""Path utils"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import lru_cache
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
@@ -8,14 +10,32 @@ if TYPE_CHECKING:
|
||||
from ..base import HacsBase
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def _get_safe_paths(
|
||||
config_path: str,
|
||||
appdaemon_path: str,
|
||||
plugin_path: str,
|
||||
python_script_path: str,
|
||||
theme_path: str,
|
||||
) -> set[str]:
|
||||
"""Get safe paths."""
|
||||
return {
|
||||
Path(f"{config_path}/{appdaemon_path}").as_posix(),
|
||||
Path(f"{config_path}/{plugin_path}").as_posix(),
|
||||
Path(f"{config_path}/{python_script_path}").as_posix(),
|
||||
Path(f"{config_path}/{theme_path}").as_posix(),
|
||||
Path(f"{config_path}/custom_components/").as_posix(),
|
||||
Path(f"{config_path}/custom_templates/").as_posix(),
|
||||
}
|
||||
|
||||
|
||||
def is_safe(hacs: HacsBase, path: str | Path) -> bool:
|
||||
"""Helper to check if path is safe to remove."""
|
||||
return Path(path).as_posix() not in (
|
||||
Path(f"{hacs.core.config_path}/{hacs.configuration.appdaemon_path}").as_posix(),
|
||||
Path(f"{hacs.core.config_path}/{hacs.configuration.netdaemon_path}").as_posix(),
|
||||
Path(f"{hacs.core.config_path}/{hacs.configuration.plugin_path}").as_posix(),
|
||||
Path(f"{hacs.core.config_path}/{hacs.configuration.python_script_path}").as_posix(),
|
||||
Path(f"{hacs.core.config_path}/{hacs.configuration.theme_path}").as_posix(),
|
||||
Path(f"{hacs.core.config_path}/custom_components/").as_posix(),
|
||||
Path(f"{hacs.core.config_path}/custom_templates/").as_posix(),
|
||||
configuration = hacs.configuration
|
||||
return Path(path).as_posix() not in _get_safe_paths(
|
||||
hacs.core.config_path,
|
||||
configuration.appdaemon_path,
|
||||
configuration.plugin_path,
|
||||
configuration.python_script_path,
|
||||
configuration.theme_path,
|
||||
)
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
"""The QueueManager class."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Coroutine
|
||||
import time
|
||||
from typing import Coroutine
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
@@ -60,9 +61,6 @@ class QueueManager:
|
||||
for task in self.queue:
|
||||
local_queue.append(task)
|
||||
|
||||
for task in local_queue:
|
||||
self.queue.remove(task)
|
||||
|
||||
_LOGGER.debug("<QueueManager> Starting queue execution for %s tasks", len(local_queue))
|
||||
start = time.time()
|
||||
result = await asyncio.gather(*local_queue, return_exceptions=True)
|
||||
@@ -71,6 +69,9 @@ class QueueManager:
|
||||
_LOGGER.error("<QueueManager> %s", entry)
|
||||
end = time.time() - start
|
||||
|
||||
for task in local_queue:
|
||||
self.queue.remove(task)
|
||||
|
||||
_LOGGER.debug(
|
||||
"<QueueManager> Queue execution finished for %s tasks finished in %.2f seconds",
|
||||
len(local_queue),
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Regex utils"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Storage handers."""
|
||||
|
||||
from homeassistant.helpers.json import JSONEncoder
|
||||
from homeassistant.helpers.storage import Store
|
||||
from homeassistant.util import json as json_util
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
"""Custom template support."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from jinja2 import Template
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..base import HacsBase
|
||||
from ..repositories.base import HacsRepository
|
||||
|
||||
|
||||
def render_template(hacs: HacsBase, content: str, context: HacsRepository) -> str:
|
||||
"""Render templates in content."""
|
||||
if hacs.configuration.experimental:
|
||||
# Do not render for experimental
|
||||
return content
|
||||
# Fix None issues
|
||||
if context.releases.last_release_object is not None:
|
||||
prerelease = context.releases.last_release_object.prerelease
|
||||
else:
|
||||
prerelease = False
|
||||
|
||||
# Render the template
|
||||
try:
|
||||
return Template(content).render(
|
||||
installed=context.data.installed,
|
||||
pending_update=context.pending_update,
|
||||
prerelease=prerelease,
|
||||
selected_tag=context.data.selected_tag,
|
||||
version_available=context.releases.last_release,
|
||||
version_installed=context.display_installed_version,
|
||||
)
|
||||
except (
|
||||
BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
) as exception:
|
||||
context.logger.debug(exception)
|
||||
return content
|
||||
@@ -1,7 +1,10 @@
|
||||
"""Validation utilities."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any
|
||||
|
||||
from awesomeversion import AwesomeVersion
|
||||
from homeassistant.helpers.config_validation import url as url_validator
|
||||
@@ -67,3 +70,146 @@ INTEGRATION_MANIFEST_JSON_SCHEMA = vol.Schema(
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
def validate_repo_data(schema: dict[str, Any], extra: int) -> Callable[[Any], Any]:
|
||||
"""Return a validator for repo data.
|
||||
|
||||
This is used instead of vol.All to always try both the repo schema and
|
||||
and the validate_version validator.
|
||||
"""
|
||||
_schema = vol.Schema(schema, extra=extra)
|
||||
|
||||
def validate_repo_data(data: Any) -> Any:
|
||||
"""Validate integration repo data."""
|
||||
schema_errors: vol.MultipleInvalid | None = None
|
||||
try:
|
||||
_schema(data)
|
||||
except vol.MultipleInvalid as err:
|
||||
schema_errors = err
|
||||
try:
|
||||
validate_version(data)
|
||||
except vol.Invalid as err:
|
||||
if schema_errors:
|
||||
schema_errors.add(err)
|
||||
else:
|
||||
raise
|
||||
if schema_errors:
|
||||
raise schema_errors
|
||||
return data
|
||||
|
||||
return validate_repo_data
|
||||
|
||||
|
||||
def validate_version(data: Any) -> Any:
|
||||
"""Ensure at least one of last_commit or last_version is present."""
|
||||
if "last_commit" not in data and "last_version" not in data:
|
||||
raise vol.Invalid("Expected at least one of [`last_commit`, `last_version`], got none")
|
||||
return data
|
||||
|
||||
|
||||
V2_COMMON_DATA_JSON_SCHEMA = {
|
||||
vol.Required("description"): vol.Any(str, None),
|
||||
vol.Optional("downloads"): int,
|
||||
vol.Optional("etag_releases"): str,
|
||||
vol.Required("etag_repository"): str,
|
||||
vol.Required("full_name"): str,
|
||||
vol.Optional("last_commit"): str,
|
||||
vol.Required("last_fetched"): vol.Any(int, float),
|
||||
vol.Required("last_updated"): str,
|
||||
vol.Optional("last_version"): str,
|
||||
vol.Optional("prerelease"): str,
|
||||
vol.Required("manifest"): {
|
||||
vol.Optional("country"): vol.Any([str], False),
|
||||
vol.Optional("name"): str,
|
||||
},
|
||||
vol.Optional("open_issues"): int,
|
||||
vol.Optional("stargazers_count"): int,
|
||||
vol.Optional("topics"): [str],
|
||||
}
|
||||
|
||||
V2_INTEGRATION_DATA_JSON_SCHEMA = {
|
||||
**V2_COMMON_DATA_JSON_SCHEMA,
|
||||
vol.Required("domain"): str,
|
||||
vol.Required("manifest_name"): str,
|
||||
}
|
||||
|
||||
_V2_REPO_SCHEMAS = {
|
||||
"appdaemon": V2_COMMON_DATA_JSON_SCHEMA,
|
||||
"integration": V2_INTEGRATION_DATA_JSON_SCHEMA,
|
||||
"plugin": V2_COMMON_DATA_JSON_SCHEMA,
|
||||
"python_script": V2_COMMON_DATA_JSON_SCHEMA,
|
||||
"template": V2_COMMON_DATA_JSON_SCHEMA,
|
||||
"theme": V2_COMMON_DATA_JSON_SCHEMA,
|
||||
}
|
||||
|
||||
# Used when validating repos in the hacs integration, discards extra keys
|
||||
VALIDATE_FETCHED_V2_REPO_DATA = {
|
||||
category: validate_repo_data(schema, vol.REMOVE_EXTRA)
|
||||
for category, schema in _V2_REPO_SCHEMAS.items()
|
||||
}
|
||||
|
||||
# Used when validating repos when generating data, fails on extra keys
|
||||
VALIDATE_GENERATED_V2_REPO_DATA = {
|
||||
category: vol.Schema({str: validate_repo_data(schema, vol.PREVENT_EXTRA)})
|
||||
for category, schema in _V2_REPO_SCHEMAS.items()
|
||||
}
|
||||
|
||||
V2_CRITICAL_REPO_DATA_SCHEMA = {
|
||||
vol.Required("link"): str,
|
||||
vol.Required("reason"): str,
|
||||
vol.Required("repository"): str,
|
||||
}
|
||||
|
||||
# Used when validating critical repos in the hacs integration, discards extra keys
|
||||
VALIDATE_FETCHED_V2_CRITICAL_REPO_SCHEMA = vol.Schema(
|
||||
V2_CRITICAL_REPO_DATA_SCHEMA,
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
# Used when validating critical repos when generating data, fails on extra keys
|
||||
VALIDATE_GENERATED_V2_CRITICAL_REPO_SCHEMA = vol.Schema(
|
||||
[
|
||||
vol.Schema(
|
||||
V2_CRITICAL_REPO_DATA_SCHEMA,
|
||||
extra=vol.PREVENT_EXTRA,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
V2_REMOVED_REPO_DATA_SCHEMA = {
|
||||
vol.Optional("link"): str,
|
||||
vol.Optional("reason"): str,
|
||||
vol.Required("removal_type"): vol.In(
|
||||
[
|
||||
"Integration is missing a version, and is abandoned.",
|
||||
"Remove",
|
||||
"archived",
|
||||
"blacklist",
|
||||
"critical",
|
||||
"deprecated",
|
||||
"removal",
|
||||
"remove",
|
||||
"removed",
|
||||
"replaced",
|
||||
"repository",
|
||||
]
|
||||
),
|
||||
vol.Required("repository"): str,
|
||||
}
|
||||
|
||||
# Used when validating removed repos in the hacs integration, discards extra keys
|
||||
VALIDATE_FETCHED_V2_REMOVED_REPO_SCHEMA = vol.Schema(
|
||||
V2_REMOVED_REPO_DATA_SCHEMA,
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
# Used when validating removed repos when generating data, fails on extra keys
|
||||
VALIDATE_GENERATED_V2_REMOVED_REPO_SCHEMA = vol.Schema(
|
||||
[
|
||||
vol.Schema(
|
||||
V2_REMOVED_REPO_DATA_SCHEMA,
|
||||
extra=vol.PREVENT_EXTRA,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Version utils."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import lru_cache
|
||||
|
||||
@@ -1,7 +1,37 @@
|
||||
"""Workarounds for issues that should not be fixed."""
|
||||
"""Workarounds."""
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
DOMAIN_OVERRIDES = {
|
||||
# https://github.com/hacs/integration/issues/2465
|
||||
"custom-components/sensor.custom_aftership": "custom_aftership"
|
||||
}
|
||||
|
||||
|
||||
try:
|
||||
from homeassistant.components.http import StaticPathConfig
|
||||
|
||||
async def async_register_static_path(
|
||||
hass: HomeAssistant,
|
||||
url_path: str,
|
||||
path: str,
|
||||
cache_headers: bool = True,
|
||||
) -> None:
|
||||
"""Register a static path with the HTTP component."""
|
||||
await hass.http.async_register_static_paths(
|
||||
[StaticPathConfig(url_path, path, cache_headers)]
|
||||
)
|
||||
except ImportError:
|
||||
|
||||
async def async_register_static_path(
|
||||
hass: HomeAssistant,
|
||||
url_path: str,
|
||||
path: str,
|
||||
cache_headers: bool = True,
|
||||
) -> None:
|
||||
"""Register a static path with the HTTP component.
|
||||
|
||||
Legacy: Can be removed when min version is 2024.7
|
||||
https://developers.home-assistant.io/blog/2024/06/18/async_register_static_paths/
|
||||
"""
|
||||
hass.http.register_static_path(url_path, path, cache_headers)
|
||||
|
||||
Reference in New Issue
Block a user