maint: Bump HACS to 2.0.0
This commit is contained in:
@@ -1,80 +1,58 @@
|
|||||||
"""
|
"""HACS gives you a powerful UI to handle downloads of all your custom needs.
|
||||||
HACS gives you a powerful UI to handle downloads of all your custom needs.
|
|
||||||
|
|
||||||
For more details about this integration, please refer to the documentation at
|
For more details about this integration, please refer to the documentation at
|
||||||
https://hacs.xyz/
|
https://hacs.xyz/
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import os
|
from __future__ import annotations
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from aiogithubapi import AIOGitHubAPIException, GitHub, GitHubAPI
|
from aiogithubapi import AIOGitHubAPIException, GitHub, GitHubAPI
|
||||||
from aiogithubapi.const import ACCEPT_HEADERS
|
from aiogithubapi.const import ACCEPT_HEADERS
|
||||||
from awesomeversion import AwesomeVersion
|
from awesomeversion import AwesomeVersion
|
||||||
|
from homeassistant.components.frontend import async_remove_panel
|
||||||
from homeassistant.components.lovelace.system_health import system_health_info
|
from homeassistant.components.lovelace.system_health import system_health_info
|
||||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||||
from homeassistant.const import Platform, __version__ as HAVERSION
|
from homeassistant.const import Platform, __version__ as HAVERSION
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
from homeassistant.helpers.discovery import async_load_platform
|
from homeassistant.helpers.entity_registry import async_get as async_get_entity_registry
|
||||||
from homeassistant.helpers.event import async_call_later
|
from homeassistant.helpers.event import async_call_later
|
||||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
|
||||||
from homeassistant.helpers.start import async_at_start
|
from homeassistant.helpers.start import async_at_start
|
||||||
from homeassistant.loader import async_get_integration
|
from homeassistant.loader import async_get_integration
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from .base import HacsBase
|
from .base import HacsBase
|
||||||
from .const import DOMAIN, MINIMUM_HA_VERSION, STARTUP
|
from .const import DOMAIN, HACS_SYSTEM_ID, MINIMUM_HA_VERSION, STARTUP
|
||||||
from .data_client import HacsDataClient
|
from .data_client import HacsDataClient
|
||||||
from .enums import ConfigurationType, HacsDisabledReason, HacsStage, LovelaceMode
|
from .enums import HacsDisabledReason, HacsStage, LovelaceMode
|
||||||
from .frontend import async_register_frontend
|
from .frontend import async_register_frontend
|
||||||
from .utils.configuration_schema import hacs_config_combined
|
|
||||||
from .utils.data import HacsData
|
from .utils.data import HacsData
|
||||||
from .utils.logger import LOGGER
|
|
||||||
from .utils.queue_manager import QueueManager
|
from .utils.queue_manager import QueueManager
|
||||||
from .utils.version import version_left_higher_or_equal_then_right
|
from .utils.version import version_left_higher_or_equal_then_right
|
||||||
from .websocket import async_register_websocket_commands
|
from .websocket import async_register_websocket_commands
|
||||||
|
|
||||||
CONFIG_SCHEMA = vol.Schema({DOMAIN: hacs_config_combined()}, extra=vol.ALLOW_EXTRA)
|
PLATFORMS = [Platform.SWITCH, Platform.UPDATE]
|
||||||
|
|
||||||
|
|
||||||
async def async_initialize_integration(
|
async def _async_initialize_integration(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
*,
|
config_entry: ConfigEntry,
|
||||||
config_entry: ConfigEntry | None = None,
|
|
||||||
config: dict[str, Any] | None = None,
|
|
||||||
) -> bool:
|
) -> bool:
|
||||||
"""Initialize the integration"""
|
"""Initialize the integration"""
|
||||||
hass.data[DOMAIN] = hacs = HacsBase()
|
hass.data[DOMAIN] = hacs = HacsBase()
|
||||||
hacs.enable_hacs()
|
hacs.enable_hacs()
|
||||||
|
|
||||||
if config is not None:
|
if config_entry.source == SOURCE_IMPORT:
|
||||||
if DOMAIN not in config:
|
# Import is not supported
|
||||||
return True
|
hass.async_create_task(hass.config_entries.async_remove(config_entry.entry_id))
|
||||||
if hacs.configuration.config_type == ConfigurationType.CONFIG_ENTRY:
|
return False
|
||||||
return True
|
|
||||||
hacs.configuration.update_from_dict(
|
|
||||||
{
|
|
||||||
"config_type": ConfigurationType.YAML,
|
|
||||||
**config[DOMAIN],
|
|
||||||
"config": config[DOMAIN],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
if config_entry is not None:
|
hacs.configuration.update_from_dict(
|
||||||
if config_entry.source == SOURCE_IMPORT:
|
{
|
||||||
hass.async_create_task(hass.config_entries.async_remove(config_entry.entry_id))
|
"config_entry": config_entry,
|
||||||
return False
|
**config_entry.data,
|
||||||
|
**config_entry.options,
|
||||||
hacs.configuration.update_from_dict(
|
},
|
||||||
{
|
)
|
||||||
"config_entry": config_entry,
|
|
||||||
"config_type": ConfigurationType.CONFIG_ENTRY,
|
|
||||||
**config_entry.data,
|
|
||||||
**config_entry.options,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
integration = await async_get_integration(hass, DOMAIN)
|
integration = await async_get_integration(hass, DOMAIN)
|
||||||
|
|
||||||
@@ -104,7 +82,6 @@ async def async_initialize_integration(
|
|||||||
except BaseException: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
except BaseException: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||||
# If this happens, the users YAML is not valid, we assume YAML mode
|
# If this happens, the users YAML is not valid, we assume YAML mode
|
||||||
pass
|
pass
|
||||||
hacs.log.debug("Configuration type: %s", hacs.configuration.config_type)
|
|
||||||
hacs.core.config_path = hacs.hass.config.path()
|
hacs.core.config_path = hacs.hass.config.path()
|
||||||
|
|
||||||
if hacs.core.ha_version is None:
|
if hacs.core.ha_version is None:
|
||||||
@@ -131,19 +108,18 @@ async def async_initialize_integration(
|
|||||||
"""HACS startup tasks."""
|
"""HACS startup tasks."""
|
||||||
hacs.enable_hacs()
|
hacs.enable_hacs()
|
||||||
|
|
||||||
for location in (
|
try:
|
||||||
hass.config.path("custom_components/custom_updater.py"),
|
import custom_components.custom_updater
|
||||||
hass.config.path("custom_components/custom_updater/__init__.py"),
|
except ImportError:
|
||||||
):
|
pass
|
||||||
if os.path.exists(location):
|
else:
|
||||||
hacs.log.critical(
|
hacs.log.critical(
|
||||||
"This cannot be used with custom_updater. "
|
"HACS cannot be used with custom_updater. "
|
||||||
"To use this you need to remove custom_updater form %s",
|
"To use HACS you need to remove custom_updater from `custom_components`",
|
||||||
location,
|
)
|
||||||
)
|
|
||||||
|
|
||||||
hacs.disable_hacs(HacsDisabledReason.CONSTRAINS)
|
hacs.disable_hacs(HacsDisabledReason.CONSTRAINS)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not version_left_higher_or_equal_then_right(
|
if not version_left_higher_or_equal_then_right(
|
||||||
hacs.core.ha_version.string,
|
hacs.core.ha_version.string,
|
||||||
@@ -160,39 +136,23 @@ async def async_initialize_integration(
|
|||||||
hacs.disable_hacs(HacsDisabledReason.RESTORE)
|
hacs.disable_hacs(HacsDisabledReason.RESTORE)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not hacs.configuration.experimental:
|
|
||||||
can_update = await hacs.async_can_update()
|
|
||||||
hacs.log.debug("Can update %s repositories", can_update)
|
|
||||||
|
|
||||||
hacs.set_active_categories()
|
hacs.set_active_categories()
|
||||||
|
|
||||||
async_register_websocket_commands(hass)
|
async_register_websocket_commands(hass)
|
||||||
async_register_frontend(hass, hacs)
|
await async_register_frontend(hass, hacs)
|
||||||
|
|
||||||
if hacs.configuration.config_type == ConfigurationType.YAML:
|
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||||
hass.async_create_task(
|
|
||||||
async_load_platform(hass, Platform.SENSOR, DOMAIN, {}, hacs.configuration.config)
|
|
||||||
)
|
|
||||||
hacs.log.info("Update entities are only supported when using UI configuration")
|
|
||||||
|
|
||||||
else:
|
|
||||||
await hass.config_entries.async_forward_entry_setups(
|
|
||||||
config_entry,
|
|
||||||
[Platform.SENSOR, Platform.UPDATE]
|
|
||||||
if hacs.configuration.experimental
|
|
||||||
else [Platform.SENSOR],
|
|
||||||
)
|
|
||||||
|
|
||||||
hacs.set_stage(HacsStage.SETUP)
|
hacs.set_stage(HacsStage.SETUP)
|
||||||
if hacs.system.disabled:
|
if hacs.system.disabled:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Schedule startup tasks
|
|
||||||
async_at_start(hass=hass, at_start_cb=hacs.startup_tasks)
|
|
||||||
|
|
||||||
hacs.set_stage(HacsStage.WAITING)
|
hacs.set_stage(HacsStage.WAITING)
|
||||||
hacs.log.info("Setup complete, waiting for Home Assistant before startup tasks starts")
|
hacs.log.info("Setup complete, waiting for Home Assistant before startup tasks starts")
|
||||||
|
|
||||||
|
# Schedule startup tasks
|
||||||
|
async_at_start(hass=hass, at_start_cb=hacs.startup_tasks)
|
||||||
|
|
||||||
return not hacs.system.disabled
|
return not hacs.system.disabled
|
||||||
|
|
||||||
async def async_try_startup(_=None):
|
async def async_try_startup(_=None):
|
||||||
@@ -202,10 +162,7 @@ async def async_initialize_integration(
|
|||||||
except AIOGitHubAPIException:
|
except AIOGitHubAPIException:
|
||||||
startup_result = False
|
startup_result = False
|
||||||
if not startup_result:
|
if not startup_result:
|
||||||
if (
|
if hacs.system.disabled_reason != HacsDisabledReason.INVALID_TOKEN:
|
||||||
hacs.configuration.config_type == ConfigurationType.YAML
|
|
||||||
or hacs.system.disabled_reason != HacsDisabledReason.INVALID_TOKEN
|
|
||||||
):
|
|
||||||
hacs.log.info("Could not setup HACS, trying again in 15 min")
|
hacs.log.info("Could not setup HACS, trying again in 15 min")
|
||||||
async_call_later(hass, 900, async_try_startup)
|
async_call_later(hass, 900, async_try_startup)
|
||||||
return
|
return
|
||||||
@@ -213,37 +170,19 @@ async def async_initialize_integration(
|
|||||||
|
|
||||||
await async_try_startup()
|
await async_try_startup()
|
||||||
|
|
||||||
|
# Remove old (v0-v1) sensor if it exists, can be removed in v3
|
||||||
|
er = async_get_entity_registry(hass)
|
||||||
|
if old_sensor := er.async_get_entity_id("sensor", DOMAIN, HACS_SYSTEM_ID):
|
||||||
|
er.async_remove(old_sensor)
|
||||||
|
|
||||||
# Mischief managed!
|
# Mischief managed!
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
async def async_setup(hass: HomeAssistant, config: dict[str, Any]) -> bool:
|
|
||||||
"""Set up this integration using yaml."""
|
|
||||||
if DOMAIN in config:
|
|
||||||
async_create_issue(
|
|
||||||
hass,
|
|
||||||
DOMAIN,
|
|
||||||
"deprecated_yaml_configuration",
|
|
||||||
is_fixable=False,
|
|
||||||
issue_domain=DOMAIN,
|
|
||||||
severity=IssueSeverity.WARNING,
|
|
||||||
translation_key="deprecated_yaml_configuration",
|
|
||||||
learn_more_url="https://hacs.xyz/docs/configuration/options",
|
|
||||||
)
|
|
||||||
LOGGER.warning(
|
|
||||||
"YAML configuration of HACS is deprecated and will be "
|
|
||||||
"removed in version 2.0.0, there will be no automatic "
|
|
||||||
"import of this. "
|
|
||||||
"Please remove it from your configuration, "
|
|
||||||
"restart Home Assistant and use the UI to configure it instead."
|
|
||||||
)
|
|
||||||
return await async_initialize_integration(hass=hass, config=config)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||||
"""Set up this integration using UI."""
|
"""Set up this integration using UI."""
|
||||||
config_entry.async_on_unload(config_entry.add_update_listener(async_reload_entry))
|
config_entry.async_on_unload(config_entry.add_update_listener(async_reload_entry))
|
||||||
setup_result = await async_initialize_integration(hass=hass, config_entry=config_entry)
|
setup_result = await _async_initialize_integration(hass=hass, config_entry=config_entry)
|
||||||
hacs: HacsBase = hass.data[DOMAIN]
|
hacs: HacsBase = hass.data[DOMAIN]
|
||||||
return setup_result and not hacs.system.disabled
|
return setup_result and not hacs.system.disabled
|
||||||
|
|
||||||
@@ -259,7 +198,7 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
|||||||
# Clear out pending queue
|
# Clear out pending queue
|
||||||
hacs.queue.clear()
|
hacs.queue.clear()
|
||||||
|
|
||||||
for task in hacs.recuring_tasks:
|
for task in hacs.recurring_tasks:
|
||||||
# Cancel all pending tasks
|
# Cancel all pending tasks
|
||||||
task()
|
task()
|
||||||
|
|
||||||
@@ -269,15 +208,11 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
|||||||
try:
|
try:
|
||||||
if hass.data.get("frontend_panels", {}).get("hacs"):
|
if hass.data.get("frontend_panels", {}).get("hacs"):
|
||||||
hacs.log.info("Removing sidepanel")
|
hacs.log.info("Removing sidepanel")
|
||||||
hass.components.frontend.async_remove_panel("hacs")
|
async_remove_panel(hass, "hacs")
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
platforms = ["sensor"]
|
unload_ok = await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS)
|
||||||
if hacs.configuration.experimental:
|
|
||||||
platforms.append("update")
|
|
||||||
|
|
||||||
unload_ok = await hass.config_entries.async_unload_platforms(config_entry, platforms)
|
|
||||||
|
|
||||||
hacs.set_stage(None)
|
hacs.set_stage(None)
|
||||||
hacs.disable_hacs(HacsDisabledReason.REMOVED)
|
hacs.disable_hacs(HacsDisabledReason.REMOVED)
|
||||||
|
|||||||
@@ -1,16 +1,17 @@
|
|||||||
"""Base HACS class."""
|
"""Base HACS class."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from collections.abc import Awaitable, Callable
|
||||||
from dataclasses import asdict, dataclass, field
|
from dataclasses import asdict, dataclass, field
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
import gzip
|
import gzip
|
||||||
import logging
|
|
||||||
import math
|
import math
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import shutil
|
import shutil
|
||||||
from typing import TYPE_CHECKING, Any, Awaitable, Callable
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
from aiogithubapi import (
|
from aiogithubapi import (
|
||||||
AIOGitHubAPIException,
|
AIOGitHubAPIException,
|
||||||
@@ -24,23 +25,22 @@ from aiogithubapi import (
|
|||||||
from aiogithubapi.objects.repository import AIOGitHubAPIRepository
|
from aiogithubapi.objects.repository import AIOGitHubAPIRepository
|
||||||
from aiohttp.client import ClientSession, ClientTimeout
|
from aiohttp.client import ClientSession, ClientTimeout
|
||||||
from awesomeversion import AwesomeVersion
|
from awesomeversion import AwesomeVersion
|
||||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
|
from homeassistant.components.persistent_notification import (
|
||||||
|
async_create as async_create_persistent_notification,
|
||||||
|
)
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import EVENT_HOMEASSISTANT_FINAL_WRITE, Platform
|
from homeassistant.const import EVENT_HOMEASSISTANT_FINAL_WRITE, Platform
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||||
|
from homeassistant.helpers.event import async_track_time_interval
|
||||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||||
from homeassistant.loader import Integration
|
from homeassistant.loader import Integration
|
||||||
from homeassistant.util import dt
|
from homeassistant.util import dt
|
||||||
|
|
||||||
from custom_components.hacs.repositories.base import (
|
|
||||||
HACS_MANIFEST_KEYS_TO_EXPORT,
|
|
||||||
REPOSITORY_KEYS_TO_EXPORT,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .const import DOMAIN, TV, URL_BASE
|
from .const import DOMAIN, TV, URL_BASE
|
||||||
|
from .coordinator import HacsUpdateCoordinator
|
||||||
from .data_client import HacsDataClient
|
from .data_client import HacsDataClient
|
||||||
from .enums import (
|
from .enums import (
|
||||||
ConfigurationType,
|
|
||||||
HacsCategory,
|
HacsCategory,
|
||||||
HacsDisabledReason,
|
HacsDisabledReason,
|
||||||
HacsDispatchEvent,
|
HacsDispatchEvent,
|
||||||
@@ -58,12 +58,14 @@ from .exceptions import (
|
|||||||
HacsRepositoryExistException,
|
HacsRepositoryExistException,
|
||||||
HomeAssistantCoreRepositoryException,
|
HomeAssistantCoreRepositoryException,
|
||||||
)
|
)
|
||||||
from .repositories import RERPOSITORY_CLASSES
|
from .repositories import REPOSITORY_CLASSES
|
||||||
from .utils.decode import decode_content
|
from .repositories.base import HACS_MANIFEST_KEYS_TO_EXPORT, REPOSITORY_KEYS_TO_EXPORT
|
||||||
|
from .utils.file_system import async_exists
|
||||||
from .utils.json import json_loads
|
from .utils.json import json_loads
|
||||||
from .utils.logger import LOGGER
|
from .utils.logger import LOGGER
|
||||||
from .utils.queue_manager import QueueManager
|
from .utils.queue_manager import QueueManager
|
||||||
from .utils.store import async_load_from_store, async_save_to_store
|
from .utils.store import async_load_from_store, async_save_to_store
|
||||||
|
from .utils.workarounds import async_register_static_path
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .repositories.base import HacsRepository
|
from .repositories.base import HacsRepository
|
||||||
@@ -113,15 +115,11 @@ class HacsConfiguration:
|
|||||||
appdaemon: bool = False
|
appdaemon: bool = False
|
||||||
config: dict[str, Any] = field(default_factory=dict)
|
config: dict[str, Any] = field(default_factory=dict)
|
||||||
config_entry: ConfigEntry | None = None
|
config_entry: ConfigEntry | None = None
|
||||||
config_type: ConfigurationType | None = None
|
|
||||||
country: str = "ALL"
|
country: str = "ALL"
|
||||||
debug: bool = False
|
debug: bool = False
|
||||||
dev: bool = False
|
dev: bool = False
|
||||||
experimental: bool = False
|
|
||||||
frontend_repo_url: str = ""
|
frontend_repo_url: str = ""
|
||||||
frontend_repo: str = ""
|
frontend_repo: str = ""
|
||||||
netdaemon_path: str = "netdaemon/apps/"
|
|
||||||
netdaemon: bool = False
|
|
||||||
plugin_path: str = "www/community/"
|
plugin_path: str = "www/community/"
|
||||||
python_script_path: str = "python_scripts/"
|
python_script_path: str = "python_scripts/"
|
||||||
python_script: bool = False
|
python_script: bool = False
|
||||||
@@ -142,6 +140,8 @@ class HacsConfiguration:
|
|||||||
raise HacsException("Configuration is not valid.")
|
raise HacsException("Configuration is not valid.")
|
||||||
|
|
||||||
for key in data:
|
for key in data:
|
||||||
|
if key in {"experimental", "netdaemon", "release_limit", "debug"}:
|
||||||
|
continue
|
||||||
self.__setattr__(key, data[key])
|
self.__setattr__(key, data[key])
|
||||||
|
|
||||||
|
|
||||||
@@ -355,9 +355,6 @@ class HacsRepositories:
|
|||||||
class HacsBase:
|
class HacsBase:
|
||||||
"""Base HACS class."""
|
"""Base HACS class."""
|
||||||
|
|
||||||
common = HacsCommon()
|
|
||||||
configuration = HacsConfiguration()
|
|
||||||
core = HacsCore()
|
|
||||||
data: HacsData | None = None
|
data: HacsData | None = None
|
||||||
data_client: HacsDataClient | None = None
|
data_client: HacsDataClient | None = None
|
||||||
frontend_version: str | None = None
|
frontend_version: str | None = None
|
||||||
@@ -365,18 +362,25 @@ class HacsBase:
|
|||||||
githubapi: GitHubAPI | None = None
|
githubapi: GitHubAPI | None = None
|
||||||
hass: HomeAssistant | None = None
|
hass: HomeAssistant | None = None
|
||||||
integration: Integration | None = None
|
integration: Integration | None = None
|
||||||
log: logging.Logger = LOGGER
|
|
||||||
queue: QueueManager | None = None
|
queue: QueueManager | None = None
|
||||||
recuring_tasks = []
|
|
||||||
repositories: HacsRepositories = HacsRepositories()
|
|
||||||
repository: AIOGitHubAPIRepository | None = None
|
repository: AIOGitHubAPIRepository | None = None
|
||||||
session: ClientSession | None = None
|
session: ClientSession | None = None
|
||||||
stage: HacsStage | None = None
|
stage: HacsStage | None = None
|
||||||
status = HacsStatus()
|
|
||||||
system = HacsSystem()
|
|
||||||
validation: ValidationManager | None = None
|
validation: ValidationManager | None = None
|
||||||
version: AwesomeVersion | None = None
|
version: AwesomeVersion | None = None
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
"""Initialize."""
|
||||||
|
self.common = HacsCommon()
|
||||||
|
self.configuration = HacsConfiguration()
|
||||||
|
self.coordinators: dict[HacsCategory, HacsUpdateCoordinator] = {}
|
||||||
|
self.core = HacsCore()
|
||||||
|
self.log = LOGGER
|
||||||
|
self.recurring_tasks: list[Callable[[], None]] = []
|
||||||
|
self.repositories = HacsRepositories()
|
||||||
|
self.status = HacsStatus()
|
||||||
|
self.system = HacsSystem()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def integration_dir(self) -> pathlib.Path:
|
def integration_dir(self) -> pathlib.Path:
|
||||||
"""Return the HACS integration dir."""
|
"""Return the HACS integration dir."""
|
||||||
@@ -401,12 +405,7 @@ class HacsBase:
|
|||||||
if reason != HacsDisabledReason.REMOVED:
|
if reason != HacsDisabledReason.REMOVED:
|
||||||
self.log.error("HACS is disabled - %s", reason)
|
self.log.error("HACS is disabled - %s", reason)
|
||||||
|
|
||||||
if (
|
if reason == HacsDisabledReason.INVALID_TOKEN:
|
||||||
reason == HacsDisabledReason.INVALID_TOKEN
|
|
||||||
and self.configuration.config_type == ConfigurationType.CONFIG_ENTRY
|
|
||||||
):
|
|
||||||
self.configuration.config_entry.state = ConfigEntryState.SETUP_ERROR
|
|
||||||
self.configuration.config_entry.reason = "Authentication failed"
|
|
||||||
self.hass.add_job(self.configuration.config_entry.async_start_reauth, self.hass)
|
self.hass.add_job(self.configuration.config_entry.async_start_reauth, self.hass)
|
||||||
|
|
||||||
def enable_hacs(self) -> None:
|
def enable_hacs(self) -> None:
|
||||||
@@ -420,12 +419,14 @@ class HacsBase:
|
|||||||
if category not in self.common.categories:
|
if category not in self.common.categories:
|
||||||
self.log.info("Enable category: %s", category)
|
self.log.info("Enable category: %s", category)
|
||||||
self.common.categories.add(category)
|
self.common.categories.add(category)
|
||||||
|
self.coordinators[category] = HacsUpdateCoordinator()
|
||||||
|
|
||||||
def disable_hacs_category(self, category: HacsCategory) -> None:
|
def disable_hacs_category(self, category: HacsCategory) -> None:
|
||||||
"""Disable HACS category."""
|
"""Disable HACS category."""
|
||||||
if category in self.common.categories:
|
if category in self.common.categories:
|
||||||
self.log.info("Disabling category: %s", category)
|
self.log.info("Disabling category: %s", category)
|
||||||
self.common.categories.pop(category)
|
self.common.categories.pop(category)
|
||||||
|
self.coordinators.pop(category)
|
||||||
|
|
||||||
async def async_save_file(self, file_path: str, content: Any) -> bool:
|
async def async_save_file(self, file_path: str, content: Any) -> bool:
|
||||||
"""Save a file."""
|
"""Save a file."""
|
||||||
@@ -458,12 +459,13 @@ class HacsBase:
|
|||||||
try:
|
try:
|
||||||
await self.hass.async_add_executor_job(_write_file)
|
await self.hass.async_add_executor_job(_write_file)
|
||||||
except (
|
except (
|
||||||
BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
# lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||||
|
BaseException
|
||||||
) as error:
|
) as error:
|
||||||
self.log.error("Could not write data to %s - %s", file_path, error)
|
self.log.error("Could not write data to %s - %s", file_path, error)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return os.path.exists(file_path)
|
return await async_exists(self.hass, file_path)
|
||||||
|
|
||||||
async def async_can_update(self) -> int:
|
async def async_can_update(self) -> int:
|
||||||
"""Helper to calculate the number of repositories we can fetch data for."""
|
"""Helper to calculate the number of repositories we can fetch data for."""
|
||||||
@@ -479,24 +481,13 @@ class HacsBase:
|
|||||||
)
|
)
|
||||||
self.disable_hacs(HacsDisabledReason.RATE_LIMIT)
|
self.disable_hacs(HacsDisabledReason.RATE_LIMIT)
|
||||||
except (
|
except (
|
||||||
BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
# lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||||
|
BaseException
|
||||||
) as exception:
|
) as exception:
|
||||||
self.log.exception(exception)
|
self.log.exception(exception)
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
async def async_github_get_hacs_default_file(self, filename: str) -> list:
|
|
||||||
"""Get the content of a default file."""
|
|
||||||
response = await self.async_github_api_method(
|
|
||||||
method=self.githubapi.repos.contents.get,
|
|
||||||
repository=HacsGitHubRepo.DEFAULT,
|
|
||||||
path=filename,
|
|
||||||
)
|
|
||||||
if response is None:
|
|
||||||
return []
|
|
||||||
|
|
||||||
return json_loads(decode_content(response.data.content))
|
|
||||||
|
|
||||||
async def async_github_api_method(
|
async def async_github_api_method(
|
||||||
self,
|
self,
|
||||||
method: Callable[[], Awaitable[TV]],
|
method: Callable[[], Awaitable[TV]],
|
||||||
@@ -520,7 +511,8 @@ class HacsBase:
|
|||||||
except GitHubException as exception:
|
except GitHubException as exception:
|
||||||
_exception = exception
|
_exception = exception
|
||||||
except (
|
except (
|
||||||
BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
# lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||||
|
BaseException
|
||||||
) as exception:
|
) as exception:
|
||||||
self.log.exception(exception)
|
self.log.exception(exception)
|
||||||
_exception = exception
|
_exception = exception
|
||||||
@@ -552,7 +544,7 @@ class HacsBase:
|
|||||||
):
|
):
|
||||||
raise AddonRepositoryException()
|
raise AddonRepositoryException()
|
||||||
|
|
||||||
if category not in RERPOSITORY_CLASSES:
|
if category not in REPOSITORY_CLASSES:
|
||||||
self.log.warning(
|
self.log.warning(
|
||||||
"%s is not a valid repository category, %s will not be registered.",
|
"%s is not a valid repository category, %s will not be registered.",
|
||||||
category,
|
category,
|
||||||
@@ -563,7 +555,7 @@ class HacsBase:
|
|||||||
if (renamed := self.common.renamed_repositories.get(repository_full_name)) is not None:
|
if (renamed := self.common.renamed_repositories.get(repository_full_name)) is not None:
|
||||||
repository_full_name = renamed
|
repository_full_name = renamed
|
||||||
|
|
||||||
repository: HacsRepository = RERPOSITORY_CLASSES[category](self, repository_full_name)
|
repository: HacsRepository = REPOSITORY_CLASSES[category](self, repository_full_name)
|
||||||
if check:
|
if check:
|
||||||
try:
|
try:
|
||||||
await repository.async_registration(ref)
|
await repository.async_registration(ref)
|
||||||
@@ -573,7 +565,8 @@ class HacsBase:
|
|||||||
self.log.error("Validation for %s failed.", repository_full_name)
|
self.log.error("Validation for %s failed.", repository_full_name)
|
||||||
if self.system.action:
|
if self.system.action:
|
||||||
raise HacsException(
|
raise HacsException(
|
||||||
f"::error:: Validation for {repository_full_name} failed."
|
f"::error:: Validation for {
|
||||||
|
repository_full_name} failed."
|
||||||
)
|
)
|
||||||
return repository.validate.errors
|
return repository.validate.errors
|
||||||
if self.system.action:
|
if self.system.action:
|
||||||
@@ -589,7 +582,8 @@ class HacsBase:
|
|||||||
except AIOGitHubAPIException as exception:
|
except AIOGitHubAPIException as exception:
|
||||||
self.common.skip.add(repository.data.full_name)
|
self.common.skip.add(repository.data.full_name)
|
||||||
raise HacsException(
|
raise HacsException(
|
||||||
f"Validation for {repository_full_name} failed with {exception}."
|
f"Validation for {
|
||||||
|
repository_full_name} failed with {exception}."
|
||||||
) from exception
|
) from exception
|
||||||
|
|
||||||
if self.status.new:
|
if self.status.new:
|
||||||
@@ -620,79 +614,64 @@ class HacsBase:
|
|||||||
for repo in critical:
|
for repo in critical:
|
||||||
if not repo["acknowledged"]:
|
if not repo["acknowledged"]:
|
||||||
self.log.critical("URGENT!: Check the HACS panel!")
|
self.log.critical("URGENT!: Check the HACS panel!")
|
||||||
self.hass.components.persistent_notification.create(
|
async_create_persistent_notification(
|
||||||
title="URGENT!", message="**Check the HACS panel!**"
|
self.hass, title="URGENT!", message="**Check the HACS panel!**"
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
|
|
||||||
if not self.configuration.experimental:
|
self.recurring_tasks.append(
|
||||||
self.recuring_tasks.append(
|
async_track_time_interval(
|
||||||
self.hass.helpers.event.async_track_time_interval(
|
self.hass,
|
||||||
self.async_update_downloaded_repositories, timedelta(hours=48)
|
self.async_load_hacs_from_github,
|
||||||
)
|
timedelta(hours=48),
|
||||||
)
|
|
||||||
self.recuring_tasks.append(
|
|
||||||
self.hass.helpers.event.async_track_time_interval(
|
|
||||||
self.async_update_all_repositories,
|
|
||||||
timedelta(hours=96),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self.recuring_tasks.append(
|
|
||||||
self.hass.helpers.event.async_track_time_interval(
|
|
||||||
self.async_load_hacs_from_github,
|
|
||||||
timedelta(hours=48),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
self.recuring_tasks.append(
|
|
||||||
self.hass.helpers.event.async_track_time_interval(
|
|
||||||
self.async_update_downloaded_custom_repositories, timedelta(hours=48)
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
self.recuring_tasks.append(
|
self.recurring_tasks.append(
|
||||||
self.hass.helpers.event.async_track_time_interval(
|
async_track_time_interval(
|
||||||
self.async_get_all_category_repositories, timedelta(hours=6)
|
self.hass, self.async_update_downloaded_custom_repositories, timedelta(hours=48)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
self.recuring_tasks.append(
|
self.recurring_tasks.append(
|
||||||
self.hass.helpers.event.async_track_time_interval(
|
async_track_time_interval(
|
||||||
self.async_check_rate_limit, timedelta(minutes=5)
|
self.hass, self.async_get_all_category_repositories, timedelta(hours=6)
|
||||||
)
|
|
||||||
)
|
|
||||||
self.recuring_tasks.append(
|
|
||||||
self.hass.helpers.event.async_track_time_interval(
|
|
||||||
self.async_prosess_queue, timedelta(minutes=10)
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
self.recuring_tasks.append(
|
self.recurring_tasks.append(
|
||||||
self.hass.helpers.event.async_track_time_interval(
|
async_track_time_interval(self.hass, self.async_check_rate_limit, timedelta(minutes=5))
|
||||||
self.async_handle_critical_repositories, timedelta(hours=6)
|
)
|
||||||
|
self.recurring_tasks.append(
|
||||||
|
async_track_time_interval(self.hass, self.async_process_queue, timedelta(minutes=10))
|
||||||
|
)
|
||||||
|
|
||||||
|
self.recurring_tasks.append(
|
||||||
|
async_track_time_interval(
|
||||||
|
self.hass, self.async_handle_critical_repositories, timedelta(hours=6)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
self.hass.bus.async_listen_once(
|
unsub = self.hass.bus.async_listen_once(
|
||||||
EVENT_HOMEASSISTANT_FINAL_WRITE, self.data.async_force_write
|
EVENT_HOMEASSISTANT_FINAL_WRITE, self.data.async_force_write
|
||||||
)
|
)
|
||||||
|
if config_entry := self.configuration.config_entry:
|
||||||
|
config_entry.async_on_unload(unsub)
|
||||||
|
|
||||||
self.log.debug("There are %s scheduled recurring tasks", len(self.recuring_tasks))
|
self.log.debug("There are %s scheduled recurring tasks", len(self.recurring_tasks))
|
||||||
|
|
||||||
self.status.startup = False
|
self.status.startup = False
|
||||||
self.async_dispatch(HacsDispatchEvent.STATUS, {})
|
self.async_dispatch(HacsDispatchEvent.STATUS, {})
|
||||||
|
|
||||||
await self.async_handle_removed_repositories()
|
await self.async_handle_removed_repositories()
|
||||||
await self.async_get_all_category_repositories()
|
await self.async_get_all_category_repositories()
|
||||||
await self.async_update_downloaded_repositories()
|
|
||||||
|
|
||||||
self.set_stage(HacsStage.RUNNING)
|
self.set_stage(HacsStage.RUNNING)
|
||||||
|
|
||||||
self.async_dispatch(HacsDispatchEvent.RELOAD, {"force": True})
|
self.async_dispatch(HacsDispatchEvent.RELOAD, {"force": True})
|
||||||
|
|
||||||
await self.async_handle_critical_repositories()
|
await self.async_handle_critical_repositories()
|
||||||
await self.async_prosess_queue()
|
await self.async_process_queue()
|
||||||
|
|
||||||
self.async_dispatch(HacsDispatchEvent.STATUS, {})
|
self.async_dispatch(HacsDispatchEvent.STATUS, {})
|
||||||
|
|
||||||
@@ -728,9 +707,10 @@ class HacsBase:
|
|||||||
return await request.read()
|
return await request.read()
|
||||||
|
|
||||||
raise HacsException(
|
raise HacsException(
|
||||||
f"Got status code {request.status} when trying to download {url}"
|
f"Got status code {
|
||||||
|
request.status} when trying to download {url}"
|
||||||
)
|
)
|
||||||
except asyncio.TimeoutError:
|
except TimeoutError:
|
||||||
self.log.warning(
|
self.log.warning(
|
||||||
"A timeout of 60! seconds was encountered while downloading %s, "
|
"A timeout of 60! seconds was encountered while downloading %s, "
|
||||||
"using over 60 seconds to download a single file is not normal. "
|
"using over 60 seconds to download a single file is not normal. "
|
||||||
@@ -746,7 +726,8 @@ class HacsBase:
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
except (
|
except (
|
||||||
BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
# lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||||
|
BaseException
|
||||||
) as exception:
|
) as exception:
|
||||||
if not nolog:
|
if not nolog:
|
||||||
self.log.exception("Download failed - %s", exception)
|
self.log.exception("Download failed - %s", exception)
|
||||||
@@ -755,15 +736,24 @@ class HacsBase:
|
|||||||
|
|
||||||
async def async_recreate_entities(self) -> None:
|
async def async_recreate_entities(self) -> None:
|
||||||
"""Recreate entities."""
|
"""Recreate entities."""
|
||||||
if self.configuration == ConfigurationType.YAML or not self.configuration.experimental:
|
platforms = [Platform.UPDATE]
|
||||||
return
|
|
||||||
|
|
||||||
platforms = [Platform.SENSOR, Platform.UPDATE]
|
# Workaround for core versions without https://github.com/home-assistant/core/pull/117084
|
||||||
|
if self.core.ha_version < AwesomeVersion("2024.6.0"):
|
||||||
await self.hass.config_entries.async_unload_platforms(
|
unload_platforms_lock = asyncio.Lock()
|
||||||
entry=self.configuration.config_entry,
|
async with unload_platforms_lock:
|
||||||
platforms=platforms,
|
on_unload = self.configuration.config_entry._on_unload
|
||||||
)
|
self.configuration.config_entry._on_unload = []
|
||||||
|
await self.hass.config_entries.async_unload_platforms(
|
||||||
|
entry=self.configuration.config_entry,
|
||||||
|
platforms=platforms,
|
||||||
|
)
|
||||||
|
self.configuration.config_entry._on_unload = on_unload
|
||||||
|
else:
|
||||||
|
await self.hass.config_entries.async_unload_platforms(
|
||||||
|
entry=self.configuration.config_entry,
|
||||||
|
platforms=platforms,
|
||||||
|
)
|
||||||
await self.hass.config_entries.async_forward_entry_setups(
|
await self.hass.config_entries.async_forward_entry_setups(
|
||||||
self.configuration.config_entry, platforms
|
self.configuration.config_entry, platforms
|
||||||
)
|
)
|
||||||
@@ -776,12 +766,9 @@ class HacsBase:
|
|||||||
def set_active_categories(self) -> None:
|
def set_active_categories(self) -> None:
|
||||||
"""Set the active categories."""
|
"""Set the active categories."""
|
||||||
self.common.categories = set()
|
self.common.categories = set()
|
||||||
for category in (HacsCategory.INTEGRATION, HacsCategory.PLUGIN):
|
for category in (HacsCategory.INTEGRATION, HacsCategory.PLUGIN, HacsCategory.TEMPLATE):
|
||||||
self.enable_hacs_category(HacsCategory(category))
|
self.enable_hacs_category(HacsCategory(category))
|
||||||
|
|
||||||
if self.configuration.experimental:
|
|
||||||
self.enable_hacs_category(HacsCategory.TEMPLATE)
|
|
||||||
|
|
||||||
if (
|
if (
|
||||||
HacsCategory.PYTHON_SCRIPT in self.hass.config.components
|
HacsCategory.PYTHON_SCRIPT in self.hass.config.components
|
||||||
or self.repositories.category_downloaded(HacsCategory.PYTHON_SCRIPT)
|
or self.repositories.category_downloaded(HacsCategory.PYTHON_SCRIPT)
|
||||||
@@ -795,30 +782,24 @@ class HacsBase:
|
|||||||
|
|
||||||
if self.configuration.appdaemon:
|
if self.configuration.appdaemon:
|
||||||
self.enable_hacs_category(HacsCategory.APPDAEMON)
|
self.enable_hacs_category(HacsCategory.APPDAEMON)
|
||||||
if self.configuration.netdaemon:
|
|
||||||
if self.repositories.category_downloaded(HacsCategory.NETDAEMON):
|
|
||||||
self.log.warning(
|
|
||||||
"NetDaemon in HACS is deprectaded. It will stop working in the future. "
|
|
||||||
"Please remove all your current NetDaemon repositories from HACS "
|
|
||||||
"and download them manually if you want to continue using them."
|
|
||||||
)
|
|
||||||
self.enable_hacs_category(HacsCategory.NETDAEMON)
|
|
||||||
|
|
||||||
async def async_load_hacs_from_github(self, _=None) -> None:
|
async def async_load_hacs_from_github(self, _=None) -> None:
|
||||||
"""Load HACS from GitHub."""
|
"""Load HACS from GitHub."""
|
||||||
if self.configuration.experimental and self.status.inital_fetch_done:
|
if self.status.inital_fetch_done:
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
repository = self.repositories.get_by_full_name(HacsGitHubRepo.INTEGRATION)
|
repository = self.repositories.get_by_full_name(HacsGitHubRepo.INTEGRATION)
|
||||||
|
should_recreate_entities = False
|
||||||
if repository is None:
|
if repository is None:
|
||||||
|
should_recreate_entities = True
|
||||||
await self.async_register_repository(
|
await self.async_register_repository(
|
||||||
repository_full_name=HacsGitHubRepo.INTEGRATION,
|
repository_full_name=HacsGitHubRepo.INTEGRATION,
|
||||||
category=HacsCategory.INTEGRATION,
|
category=HacsCategory.INTEGRATION,
|
||||||
default=True,
|
default=True,
|
||||||
)
|
)
|
||||||
repository = self.repositories.get_by_full_name(HacsGitHubRepo.INTEGRATION)
|
repository = self.repositories.get_by_full_name(HacsGitHubRepo.INTEGRATION)
|
||||||
elif self.configuration.experimental and not self.status.startup:
|
elif not self.status.startup:
|
||||||
self.log.error("Scheduling update of hacs/integration")
|
self.log.error("Scheduling update of hacs/integration")
|
||||||
self.queue.add(repository.common_update())
|
self.queue.add(repository.common_update())
|
||||||
if repository is None:
|
if repository is None:
|
||||||
@@ -829,6 +810,9 @@ class HacsBase:
|
|||||||
repository.data.new = False
|
repository.data.new = False
|
||||||
repository.data.releases = True
|
repository.data.releases = True
|
||||||
|
|
||||||
|
if should_recreate_entities:
|
||||||
|
await self.async_recreate_entities()
|
||||||
|
|
||||||
self.repository = repository.repository_object
|
self.repository = repository.repository_object
|
||||||
self.repositories.mark_default(repository)
|
self.repositories.mark_default(repository)
|
||||||
except HacsException as exception:
|
except HacsException as exception:
|
||||||
@@ -848,8 +832,6 @@ class HacsBase:
|
|||||||
await asyncio.gather(
|
await asyncio.gather(
|
||||||
*[
|
*[
|
||||||
self.async_get_category_repositories_experimental(category)
|
self.async_get_category_repositories_experimental(category)
|
||||||
if self.configuration.experimental
|
|
||||||
else self.async_get_category_repositories(HacsCategory(category))
|
|
||||||
for category in self.common.categories or []
|
for category in self.common.categories or []
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
@@ -858,7 +840,7 @@ class HacsBase:
|
|||||||
"""Update all category repositories."""
|
"""Update all category repositories."""
|
||||||
self.log.debug("Fetching updated content for %s", category)
|
self.log.debug("Fetching updated content for %s", category)
|
||||||
try:
|
try:
|
||||||
category_data = await self.data_client.get_data(category)
|
category_data = await self.data_client.get_data(category, validate=True)
|
||||||
except HacsNotModifiedException:
|
except HacsNotModifiedException:
|
||||||
self.log.debug("No updates for %s", category)
|
self.log.debug("No updates for %s", category)
|
||||||
return
|
return
|
||||||
@@ -869,14 +851,14 @@ class HacsBase:
|
|||||||
await self.data.register_unknown_repositories(category_data, category)
|
await self.data.register_unknown_repositories(category_data, category)
|
||||||
|
|
||||||
for repo_id, repo_data in category_data.items():
|
for repo_id, repo_data in category_data.items():
|
||||||
repo = repo_data["full_name"]
|
repo_name = repo_data["full_name"]
|
||||||
if self.common.renamed_repositories.get(repo):
|
if self.common.renamed_repositories.get(repo_name):
|
||||||
repo = self.common.renamed_repositories[repo]
|
repo_name = self.common.renamed_repositories[repo_name]
|
||||||
if self.repositories.is_removed(repo):
|
if self.repositories.is_removed(repo_name):
|
||||||
continue
|
continue
|
||||||
if repo in self.common.archived_repositories:
|
if repo_name in self.common.archived_repositories:
|
||||||
continue
|
continue
|
||||||
if repository := self.repositories.get_by_full_name(repo):
|
if repository := self.repositories.get_by_full_name(repo_name):
|
||||||
self.repositories.set_repository_id(repository, repo_id)
|
self.repositories.set_repository_id(repository, repo_id)
|
||||||
self.repositories.mark_default(repository)
|
self.repositories.mark_default(repository)
|
||||||
if repository.data.last_fetched is None or (
|
if repository.data.last_fetched is None or (
|
||||||
@@ -904,51 +886,7 @@ class HacsBase:
|
|||||||
self.repositories.unregister(repository)
|
self.repositories.unregister(repository)
|
||||||
|
|
||||||
self.async_dispatch(HacsDispatchEvent.REPOSITORY, {})
|
self.async_dispatch(HacsDispatchEvent.REPOSITORY, {})
|
||||||
|
self.coordinators[category].async_update_listeners()
|
||||||
async def async_get_category_repositories(self, category: HacsCategory) -> None:
|
|
||||||
"""Get repositories from category."""
|
|
||||||
if self.system.disabled:
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
repositories = await self.async_github_get_hacs_default_file(category)
|
|
||||||
except HacsException:
|
|
||||||
return
|
|
||||||
|
|
||||||
for repo in repositories:
|
|
||||||
if self.common.renamed_repositories.get(repo):
|
|
||||||
repo = self.common.renamed_repositories[repo]
|
|
||||||
if self.repositories.is_removed(repo):
|
|
||||||
continue
|
|
||||||
if repo in self.common.archived_repositories:
|
|
||||||
continue
|
|
||||||
repository = self.repositories.get_by_full_name(repo)
|
|
||||||
if repository is not None:
|
|
||||||
self.repositories.mark_default(repository)
|
|
||||||
if self.status.new and self.configuration.dev:
|
|
||||||
# Force update for new installations
|
|
||||||
self.queue.add(repository.common_update())
|
|
||||||
continue
|
|
||||||
|
|
||||||
self.queue.add(
|
|
||||||
self.async_register_repository(
|
|
||||||
repository_full_name=repo,
|
|
||||||
category=category,
|
|
||||||
default=True,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
async def async_update_all_repositories(self, _=None) -> None:
|
|
||||||
"""Update all repositories."""
|
|
||||||
if self.system.disabled:
|
|
||||||
return
|
|
||||||
self.log.debug("Starting recurring background task for all repositories")
|
|
||||||
|
|
||||||
for repository in self.repositories.list_all:
|
|
||||||
if repository.data.category in self.common.categories:
|
|
||||||
self.queue.add(repository.common_update())
|
|
||||||
|
|
||||||
self.async_dispatch(HacsDispatchEvent.REPOSITORY, {"action": "reload"})
|
|
||||||
self.log.debug("Recurring background task for all repositories done")
|
|
||||||
|
|
||||||
async def async_check_rate_limit(self, _=None) -> None:
|
async def async_check_rate_limit(self, _=None) -> None:
|
||||||
"""Check rate limit."""
|
"""Check rate limit."""
|
||||||
@@ -960,9 +898,9 @@ class HacsBase:
|
|||||||
self.log.debug("Ratelimit indicate we can update %s", can_update)
|
self.log.debug("Ratelimit indicate we can update %s", can_update)
|
||||||
if can_update > 0:
|
if can_update > 0:
|
||||||
self.enable_hacs()
|
self.enable_hacs()
|
||||||
await self.async_prosess_queue()
|
await self.async_process_queue()
|
||||||
|
|
||||||
async def async_prosess_queue(self, _=None) -> None:
|
async def async_process_queue(self, _=None) -> None:
|
||||||
"""Process the queue."""
|
"""Process the queue."""
|
||||||
if self.system.disabled:
|
if self.system.disabled:
|
||||||
self.log.debug("HACS is disabled")
|
self.log.debug("HACS is disabled")
|
||||||
@@ -1002,12 +940,7 @@ class HacsBase:
|
|||||||
self.log.info("Loading removed repositories")
|
self.log.info("Loading removed repositories")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if self.configuration.experimental:
|
removed_repositories = await self.data_client.get_data("removed", validate=True)
|
||||||
removed_repositories = await self.data_client.get_data("removed")
|
|
||||||
else:
|
|
||||||
removed_repositories = await self.async_github_get_hacs_default_file(
|
|
||||||
HacsCategory.REMOVED
|
|
||||||
)
|
|
||||||
except HacsException:
|
except HacsException:
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -1022,21 +955,20 @@ class HacsBase:
|
|||||||
continue
|
continue
|
||||||
if repository.data.installed:
|
if repository.data.installed:
|
||||||
if removed.removal_type != "critical":
|
if removed.removal_type != "critical":
|
||||||
if self.configuration.experimental:
|
async_create_issue(
|
||||||
async_create_issue(
|
hass=self.hass,
|
||||||
hass=self.hass,
|
domain=DOMAIN,
|
||||||
domain=DOMAIN,
|
issue_id=f"removed_{repository.data.id}",
|
||||||
issue_id=f"removed_{repository.data.id}",
|
is_fixable=False,
|
||||||
is_fixable=False,
|
issue_domain=DOMAIN,
|
||||||
issue_domain=DOMAIN,
|
severity=IssueSeverity.WARNING,
|
||||||
severity=IssueSeverity.WARNING,
|
translation_key="removed",
|
||||||
translation_key="removed",
|
translation_placeholders={
|
||||||
translation_placeholders={
|
"name": repository.data.full_name,
|
||||||
"name": repository.data.full_name,
|
"reason": removed.reason,
|
||||||
"reason": removed.reason,
|
"repositry_id": repository.data.id,
|
||||||
"repositry_id": repository.data.id,
|
},
|
||||||
},
|
)
|
||||||
)
|
|
||||||
self.log.warning(
|
self.log.warning(
|
||||||
"You have '%s' installed with HACS "
|
"You have '%s' installed with HACS "
|
||||||
"this repository has been removed from HACS, please consider removing it. "
|
"this repository has been removed from HACS, please consider removing it. "
|
||||||
@@ -1051,30 +983,43 @@ class HacsBase:
|
|||||||
if need_to_save:
|
if need_to_save:
|
||||||
await self.data.async_write()
|
await self.data.async_write()
|
||||||
|
|
||||||
async def async_update_downloaded_repositories(self, _=None) -> None:
|
|
||||||
"""Execute the task."""
|
|
||||||
if self.system.disabled or self.configuration.experimental:
|
|
||||||
return
|
|
||||||
self.log.info("Starting recurring background task for downloaded repositories")
|
|
||||||
|
|
||||||
for repository in self.repositories.list_downloaded:
|
|
||||||
if repository.data.category in self.common.categories:
|
|
||||||
self.queue.add(repository.update_repository(ignore_issues=True))
|
|
||||||
|
|
||||||
self.log.debug("Recurring background task for downloaded repositories done")
|
|
||||||
|
|
||||||
async def async_update_downloaded_custom_repositories(self, _=None) -> None:
|
async def async_update_downloaded_custom_repositories(self, _=None) -> None:
|
||||||
"""Execute the task."""
|
"""Execute the task."""
|
||||||
if self.system.disabled or not self.configuration.experimental:
|
if self.system.disabled:
|
||||||
return
|
return
|
||||||
self.log.info("Starting recurring background task for downloaded custom repositories")
|
self.log.info("Starting recurring background task for downloaded custom repositories")
|
||||||
|
|
||||||
|
repositories_to_update = 0
|
||||||
|
repositories_updated = asyncio.Event()
|
||||||
|
|
||||||
|
async def update_repository(repository: HacsRepository) -> None:
|
||||||
|
"""Update a repository"""
|
||||||
|
nonlocal repositories_to_update
|
||||||
|
await repository.update_repository(ignore_issues=True)
|
||||||
|
repositories_to_update -= 1
|
||||||
|
if not repositories_to_update:
|
||||||
|
repositories_updated.set()
|
||||||
|
|
||||||
for repository in self.repositories.list_downloaded:
|
for repository in self.repositories.list_downloaded:
|
||||||
if (
|
if (
|
||||||
repository.data.category in self.common.categories
|
repository.data.category in self.common.categories
|
||||||
and not self.repositories.is_default(repository.data.id)
|
and not self.repositories.is_default(repository.data.id)
|
||||||
):
|
):
|
||||||
self.queue.add(repository.update_repository(ignore_issues=True))
|
repositories_to_update += 1
|
||||||
|
self.queue.add(update_repository(repository))
|
||||||
|
|
||||||
|
async def update_coordinators() -> None:
|
||||||
|
"""Update all coordinators."""
|
||||||
|
await repositories_updated.wait()
|
||||||
|
for coordinator in self.coordinators.values():
|
||||||
|
coordinator.async_update_listeners()
|
||||||
|
|
||||||
|
if config_entry := self.configuration.config_entry:
|
||||||
|
config_entry.async_create_background_task(
|
||||||
|
self.hass, update_coordinators(), "update_coordinators"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.hass.async_create_background_task(update_coordinators(), "update_coordinators")
|
||||||
|
|
||||||
self.log.debug("Recurring background task for downloaded custom repositories done")
|
self.log.debug("Recurring background task for downloaded custom repositories done")
|
||||||
|
|
||||||
@@ -1086,10 +1031,7 @@ class HacsBase:
|
|||||||
was_installed = False
|
was_installed = False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if self.configuration.experimental:
|
critical = await self.data_client.get_data("critical", validate=True)
|
||||||
critical = await self.data_client.get_data("critical")
|
|
||||||
else:
|
|
||||||
critical = await self.async_github_get_hacs_default_file("critical")
|
|
||||||
except (GitHubNotModifiedException, HacsNotModifiedException):
|
except (GitHubNotModifiedException, HacsNotModifiedException):
|
||||||
return
|
return
|
||||||
except HacsException:
|
except HacsException:
|
||||||
@@ -1143,11 +1085,10 @@ class HacsBase:
|
|||||||
self.log.critical("Restarting Home Assistant")
|
self.log.critical("Restarting Home Assistant")
|
||||||
self.hass.async_create_task(self.hass.async_stop(100))
|
self.hass.async_create_task(self.hass.async_stop(100))
|
||||||
|
|
||||||
@callback
|
async def async_setup_frontend_endpoint_plugin(self) -> None:
|
||||||
def async_setup_frontend_endpoint_plugin(self) -> None:
|
|
||||||
"""Setup the http endpoints for plugins if its not already handled."""
|
"""Setup the http endpoints for plugins if its not already handled."""
|
||||||
if self.status.active_frontend_endpoint_plugin or not os.path.exists(
|
if self.status.active_frontend_endpoint_plugin or not await async_exists(
|
||||||
self.hass.config.path("www/community")
|
self.hass, self.hass.config.path("www/community")
|
||||||
):
|
):
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -1159,26 +1100,11 @@ class HacsBase:
|
|||||||
use_cache,
|
use_cache,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.hass.http.register_static_path(
|
await async_register_static_path(
|
||||||
|
self.hass,
|
||||||
URL_BASE,
|
URL_BASE,
|
||||||
self.hass.config.path("www/community"),
|
self.hass.config.path("www/community"),
|
||||||
cache_headers=use_cache,
|
cache_headers=use_cache,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.status.active_frontend_endpoint_plugin = True
|
self.status.active_frontend_endpoint_plugin = True
|
||||||
|
|
||||||
@callback
|
|
||||||
def async_setup_frontend_endpoint_themes(self) -> None:
|
|
||||||
"""Setup the http endpoints for themes if its not already handled."""
|
|
||||||
if (
|
|
||||||
self.configuration.experimental
|
|
||||||
or self.status.active_frontend_endpoint_theme
|
|
||||||
or not os.path.exists(self.hass.config.path("themes"))
|
|
||||||
):
|
|
||||||
return
|
|
||||||
|
|
||||||
self.log.info("Setting up themes endpoint")
|
|
||||||
# Register themes
|
|
||||||
self.hass.http.register_static_path(f"{URL_BASE}/themes", self.hass.config.path("themes"))
|
|
||||||
|
|
||||||
self.status.active_frontend_endpoint_theme = True
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Adds config flow for HACS."""
|
"""Adds config flow for HACS."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
@@ -23,14 +24,9 @@ import voluptuous as vol
|
|||||||
|
|
||||||
from .base import HacsBase
|
from .base import HacsBase
|
||||||
from .const import CLIENT_ID, DOMAIN, LOCALE, MINIMUM_HA_VERSION
|
from .const import CLIENT_ID, DOMAIN, LOCALE, MINIMUM_HA_VERSION
|
||||||
from .enums import ConfigurationType
|
|
||||||
from .utils.configuration_schema import (
|
from .utils.configuration_schema import (
|
||||||
APPDAEMON,
|
APPDAEMON,
|
||||||
COUNTRY,
|
COUNTRY,
|
||||||
DEBUG,
|
|
||||||
EXPERIMENTAL,
|
|
||||||
NETDAEMON,
|
|
||||||
RELEASE_LIMIT,
|
|
||||||
SIDEPANEL_ICON,
|
SIDEPANEL_ICON,
|
||||||
SIDEPANEL_TITLE,
|
SIDEPANEL_TITLE,
|
||||||
)
|
)
|
||||||
@@ -75,15 +71,9 @@ class HacsFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
|
|
||||||
return await self.async_step_device(user_input)
|
return await self.async_step_device(user_input)
|
||||||
|
|
||||||
## Initial form
|
# Initial form
|
||||||
return await self._show_config_form(user_input)
|
return await self._show_config_form(user_input)
|
||||||
|
|
||||||
@callback
|
|
||||||
def async_remove(self):
|
|
||||||
"""Cleanup."""
|
|
||||||
if self.activation_task and not self.activation_task.done():
|
|
||||||
self.activation_task.cancel()
|
|
||||||
|
|
||||||
async def async_step_device(self, _user_input):
|
async def async_step_device(self, _user_input):
|
||||||
"""Handle device steps."""
|
"""Handle device steps."""
|
||||||
|
|
||||||
@@ -97,8 +87,6 @@ class HacsFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
with suppress(UnknownFlow):
|
with suppress(UnknownFlow):
|
||||||
await self.hass.config_entries.flow.async_configure(flow_id=self.flow_id)
|
await self.hass.config_entries.flow.async_configure(flow_id=self.flow_id)
|
||||||
|
|
||||||
self.hass.async_create_task(_progress())
|
|
||||||
|
|
||||||
if not self.device:
|
if not self.device:
|
||||||
integration = await async_get_integration(self.hass, DOMAIN)
|
integration = await async_get_integration(self.hass, DOMAIN)
|
||||||
self.device = GitHubDeviceAPI(
|
self.device = GitHubDeviceAPI(
|
||||||
@@ -122,14 +110,16 @@ class HacsFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
return self.async_show_progress_done(next_step_id="could_not_register")
|
return self.async_show_progress_done(next_step_id="could_not_register")
|
||||||
return self.async_show_progress_done(next_step_id="device_done")
|
return self.async_show_progress_done(next_step_id="device_done")
|
||||||
|
|
||||||
return self.async_show_progress(
|
show_progress_kwargs = {
|
||||||
step_id="device",
|
"step_id": "device",
|
||||||
progress_action="wait_for_device",
|
"progress_action": "wait_for_device",
|
||||||
description_placeholders={
|
"description_placeholders": {
|
||||||
"url": OAUTH_USER_LOGIN,
|
"url": OAUTH_USER_LOGIN,
|
||||||
"code": self._registration.user_code,
|
"code": self._registration.user_code,
|
||||||
},
|
},
|
||||||
)
|
"progress_task": self.activation_task,
|
||||||
|
}
|
||||||
|
return self.async_show_progress(**show_progress_kwargs)
|
||||||
|
|
||||||
async def _show_config_form(self, user_input):
|
async def _show_config_form(self, user_input):
|
||||||
"""Show the configuration form to edit location data."""
|
"""Show the configuration form to edit location data."""
|
||||||
@@ -152,9 +142,6 @@ class HacsFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
"acc_untested", default=user_input.get("acc_untested", False)
|
"acc_untested", default=user_input.get("acc_untested", False)
|
||||||
): bool,
|
): bool,
|
||||||
vol.Required("acc_disable", default=user_input.get("acc_disable", False)): bool,
|
vol.Required("acc_disable", default=user_input.get("acc_disable", False)): bool,
|
||||||
vol.Optional(
|
|
||||||
"experimental", default=user_input.get("experimental", False)
|
|
||||||
): bool,
|
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
errors=self._errors,
|
errors=self._errors,
|
||||||
@@ -176,7 +163,7 @@ class HacsFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
"token": self._activation.access_token,
|
"token": self._activation.access_token,
|
||||||
},
|
},
|
||||||
options={
|
options={
|
||||||
"experimental": self._user_input.get("experimental", False),
|
"experimental": True,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -219,10 +206,7 @@ class HacsOptionsFlowHandler(OptionsFlow):
|
|||||||
"""Handle a flow initialized by the user."""
|
"""Handle a flow initialized by the user."""
|
||||||
hacs: HacsBase = self.hass.data.get(DOMAIN)
|
hacs: HacsBase = self.hass.data.get(DOMAIN)
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
limit = int(user_input.get(RELEASE_LIMIT, 5))
|
return self.async_create_entry(title="", data={**user_input, "experimental": True})
|
||||||
if limit <= 0 or limit > 100:
|
|
||||||
return self.async_abort(reason="release_limit_value")
|
|
||||||
return self.async_create_entry(title="", data=user_input)
|
|
||||||
|
|
||||||
if hacs is None or hacs.configuration is None:
|
if hacs is None or hacs.configuration is None:
|
||||||
return self.async_abort(reason="not_setup")
|
return self.async_abort(reason="not_setup")
|
||||||
@@ -230,18 +214,11 @@ class HacsOptionsFlowHandler(OptionsFlow):
|
|||||||
if hacs.queue.has_pending_tasks:
|
if hacs.queue.has_pending_tasks:
|
||||||
return self.async_abort(reason="pending_tasks")
|
return self.async_abort(reason="pending_tasks")
|
||||||
|
|
||||||
if hacs.configuration.config_type == ConfigurationType.YAML:
|
schema = {
|
||||||
schema = {vol.Optional("not_in_use", default=""): str}
|
vol.Optional(SIDEPANEL_TITLE, default=hacs.configuration.sidepanel_title): str,
|
||||||
else:
|
vol.Optional(SIDEPANEL_ICON, default=hacs.configuration.sidepanel_icon): str,
|
||||||
schema = {
|
vol.Optional(COUNTRY, default=hacs.configuration.country): vol.In(LOCALE),
|
||||||
vol.Optional(SIDEPANEL_TITLE, default=hacs.configuration.sidepanel_title): str,
|
vol.Optional(APPDAEMON, default=hacs.configuration.appdaemon): bool,
|
||||||
vol.Optional(SIDEPANEL_ICON, default=hacs.configuration.sidepanel_icon): str,
|
}
|
||||||
vol.Optional(RELEASE_LIMIT, default=hacs.configuration.release_limit): int,
|
|
||||||
vol.Optional(COUNTRY, default=hacs.configuration.country): vol.In(LOCALE),
|
|
||||||
vol.Optional(APPDAEMON, default=hacs.configuration.appdaemon): bool,
|
|
||||||
vol.Optional(NETDAEMON, default=hacs.configuration.netdaemon): bool,
|
|
||||||
vol.Optional(DEBUG, default=hacs.configuration.debug): bool,
|
|
||||||
vol.Optional(EXPERIMENTAL, default=hacs.configuration.experimental): bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
return self.async_show_form(step_id="user", data_schema=vol.Schema(schema))
|
return self.async_show_form(step_id="user", data_schema=vol.Schema(schema))
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Constants for HACS"""
|
"""Constants for HACS"""
|
||||||
|
|
||||||
from typing import TypeVar
|
from typing import TypeVar
|
||||||
|
|
||||||
from aiogithubapi.common.const import ACCEPT_HEADERS
|
from aiogithubapi.common.const import ACCEPT_HEADERS
|
||||||
@@ -6,7 +7,7 @@ from aiogithubapi.common.const import ACCEPT_HEADERS
|
|||||||
NAME_SHORT = "HACS"
|
NAME_SHORT = "HACS"
|
||||||
DOMAIN = "hacs"
|
DOMAIN = "hacs"
|
||||||
CLIENT_ID = "395a8e669c5de9f7c6e8"
|
CLIENT_ID = "395a8e669c5de9f7c6e8"
|
||||||
MINIMUM_HA_VERSION = "2023.6.0"
|
MINIMUM_HA_VERSION = "2024.4.1"
|
||||||
|
|
||||||
URL_BASE = "/hacsfiles"
|
URL_BASE = "/hacsfiles"
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Diagnostics support for HACS."""
|
"""Diagnostics support for HACS."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
@@ -10,7 +11,6 @@ from homeassistant.core import HomeAssistant
|
|||||||
|
|
||||||
from .base import HacsBase
|
from .base import HacsBase
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
from .utils.configuration_schema import TOKEN
|
|
||||||
|
|
||||||
|
|
||||||
async def async_get_config_entry_diagnostics(
|
async def async_get_config_entry_diagnostics(
|
||||||
@@ -48,8 +48,6 @@ async def async_get_config_entry_diagnostics(
|
|||||||
"country",
|
"country",
|
||||||
"debug",
|
"debug",
|
||||||
"dev",
|
"dev",
|
||||||
"experimental",
|
|
||||||
"netdaemon",
|
|
||||||
"python_script",
|
"python_script",
|
||||||
"release_limit",
|
"release_limit",
|
||||||
"theme",
|
"theme",
|
||||||
@@ -79,4 +77,4 @@ async def async_get_config_entry_diagnostics(
|
|||||||
except GitHubException as exception:
|
except GitHubException as exception:
|
||||||
data["rate_limit"] = str(exception)
|
data["rate_limit"] = str(exception)
|
||||||
|
|
||||||
return async_redact_data(data, (TOKEN,))
|
return async_redact_data(data, ("token",))
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""HACS Base entities."""
|
"""HACS Base entities."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING, Any
|
||||||
@@ -7,8 +8,10 @@ from homeassistant.core import callback
|
|||||||
from homeassistant.helpers.device_registry import DeviceEntryType
|
from homeassistant.helpers.device_registry import DeviceEntryType
|
||||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||||
from homeassistant.helpers.entity import Entity
|
from homeassistant.helpers.entity import Entity
|
||||||
|
from homeassistant.helpers.update_coordinator import BaseCoordinatorEntity
|
||||||
|
|
||||||
from .const import DOMAIN, HACS_SYSTEM_ID, NAME_SHORT
|
from .const import DOMAIN, HACS_SYSTEM_ID, NAME_SHORT
|
||||||
|
from .coordinator import HacsUpdateCoordinator
|
||||||
from .enums import HacsDispatchEvent, HacsGitHubRepo
|
from .enums import HacsDispatchEvent, HacsGitHubRepo
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
@@ -39,6 +42,10 @@ class HacsBaseEntity(Entity):
|
|||||||
"""Initialize."""
|
"""Initialize."""
|
||||||
self.hacs = hacs
|
self.hacs = hacs
|
||||||
|
|
||||||
|
|
||||||
|
class HacsDispatcherEntity(HacsBaseEntity):
|
||||||
|
"""Base HACS entity listening to dispatcher signals."""
|
||||||
|
|
||||||
async def async_added_to_hass(self) -> None:
|
async def async_added_to_hass(self) -> None:
|
||||||
"""Register for status events."""
|
"""Register for status events."""
|
||||||
self.async_on_remove(
|
self.async_on_remove(
|
||||||
@@ -64,7 +71,7 @@ class HacsBaseEntity(Entity):
|
|||||||
self.async_write_ha_state()
|
self.async_write_ha_state()
|
||||||
|
|
||||||
|
|
||||||
class HacsSystemEntity(HacsBaseEntity):
|
class HacsSystemEntity(HacsDispatcherEntity):
|
||||||
"""Base system entity."""
|
"""Base system entity."""
|
||||||
|
|
||||||
_attr_icon = "hacs:hacs"
|
_attr_icon = "hacs:hacs"
|
||||||
@@ -76,7 +83,7 @@ class HacsSystemEntity(HacsBaseEntity):
|
|||||||
return system_info(self.hacs)
|
return system_info(self.hacs)
|
||||||
|
|
||||||
|
|
||||||
class HacsRepositoryEntity(HacsBaseEntity):
|
class HacsRepositoryEntity(BaseCoordinatorEntity[HacsUpdateCoordinator], HacsBaseEntity):
|
||||||
"""Base repository entity."""
|
"""Base repository entity."""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
@@ -85,9 +92,11 @@ class HacsRepositoryEntity(HacsBaseEntity):
|
|||||||
repository: HacsRepository,
|
repository: HacsRepository,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize."""
|
"""Initialize."""
|
||||||
super().__init__(hacs=hacs)
|
BaseCoordinatorEntity.__init__(self, hacs.coordinators[repository.data.category])
|
||||||
|
HacsBaseEntity.__init__(self, hacs=hacs)
|
||||||
self.repository = repository
|
self.repository = repository
|
||||||
self._attr_unique_id = str(repository.data.id)
|
self._attr_unique_id = str(repository.data.id)
|
||||||
|
self._repo_last_fetched = repository.data.last_fetched
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def available(self) -> bool:
|
def available(self) -> bool:
|
||||||
@@ -100,20 +109,35 @@ class HacsRepositoryEntity(HacsBaseEntity):
|
|||||||
if self.repository.data.full_name == HacsGitHubRepo.INTEGRATION:
|
if self.repository.data.full_name == HacsGitHubRepo.INTEGRATION:
|
||||||
return system_info(self.hacs)
|
return system_info(self.hacs)
|
||||||
|
|
||||||
|
def _manufacturer():
|
||||||
|
if authors := self.repository.data.authors:
|
||||||
|
return ", ".join(author.replace("@", "") for author in authors)
|
||||||
|
return self.repository.data.full_name.split("/")[0]
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"identifiers": {(DOMAIN, str(self.repository.data.id))},
|
"identifiers": {(DOMAIN, str(self.repository.data.id))},
|
||||||
"name": self.repository.display_name,
|
"name": self.repository.display_name,
|
||||||
"model": self.repository.data.category,
|
"model": self.repository.data.category,
|
||||||
"manufacturer": ", ".join(
|
"manufacturer": _manufacturer(),
|
||||||
author.replace("@", "") for author in self.repository.data.authors
|
"configuration_url": f"homeassistant://hacs/repository/{self.repository.data.id}",
|
||||||
),
|
|
||||||
"configuration_url": "homeassistant://hacs",
|
|
||||||
"entry_type": DeviceEntryType.SERVICE,
|
"entry_type": DeviceEntryType.SERVICE,
|
||||||
}
|
}
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _update_and_write_state(self, data: dict) -> None:
|
def _handle_coordinator_update(self) -> None:
|
||||||
"""Update the entity and write state."""
|
"""Handle updated data from the coordinator."""
|
||||||
if data.get("repository_id") == self.repository.data.id:
|
if (
|
||||||
self._update()
|
self._repo_last_fetched is not None
|
||||||
self.async_write_ha_state()
|
and self.repository.data.last_fetched is not None
|
||||||
|
and self._repo_last_fetched >= self.repository.data.last_fetched
|
||||||
|
):
|
||||||
|
return
|
||||||
|
|
||||||
|
self._repo_last_fetched = self.repository.data.last_fetched
|
||||||
|
self.async_write_ha_state()
|
||||||
|
|
||||||
|
async def async_update(self) -> None:
|
||||||
|
"""Update the entity.
|
||||||
|
|
||||||
|
Only used by the generic entity update service.
|
||||||
|
"""
|
||||||
|
|||||||
@@ -1,20 +1,7 @@
|
|||||||
"""Helper constants."""
|
"""Helper constants."""
|
||||||
|
|
||||||
# pylint: disable=missing-class-docstring
|
# pylint: disable=missing-class-docstring
|
||||||
import sys
|
from enum import StrEnum
|
||||||
|
|
||||||
if sys.version_info.minor >= 11:
|
|
||||||
# Needs Python 3.11
|
|
||||||
from enum import StrEnum # # pylint: disable=no-name-in-module
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
# https://github.com/home-assistant/core/blob/dev/homeassistant/backports/enum.py
|
|
||||||
# Considered internal to Home Assistant, can be removed whenever.
|
|
||||||
from homeassistant.backports.enum import StrEnum
|
|
||||||
except ImportError:
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
class StrEnum(str, Enum):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class HacsGitHubRepo(StrEnum):
|
class HacsGitHubRepo(StrEnum):
|
||||||
@@ -29,7 +16,6 @@ class HacsCategory(StrEnum):
|
|||||||
INTEGRATION = "integration"
|
INTEGRATION = "integration"
|
||||||
LOVELACE = "lovelace"
|
LOVELACE = "lovelace"
|
||||||
PLUGIN = "plugin" # Kept for legacy purposes
|
PLUGIN = "plugin" # Kept for legacy purposes
|
||||||
NETDAEMON = "netdaemon"
|
|
||||||
PYTHON_SCRIPT = "python_script"
|
PYTHON_SCRIPT = "python_script"
|
||||||
TEMPLATE = "template"
|
TEMPLATE = "template"
|
||||||
THEME = "theme"
|
THEME = "theme"
|
||||||
@@ -59,11 +45,6 @@ class RepositoryFile(StrEnum):
|
|||||||
MAINIFEST_JSON = "manifest.json"
|
MAINIFEST_JSON = "manifest.json"
|
||||||
|
|
||||||
|
|
||||||
class ConfigurationType(StrEnum):
|
|
||||||
YAML = "yaml"
|
|
||||||
CONFIG_ENTRY = "config_entry"
|
|
||||||
|
|
||||||
|
|
||||||
class LovelaceMode(StrEnum):
|
class LovelaceMode(StrEnum):
|
||||||
"""Lovelace Modes."""
|
"""Lovelace Modes."""
|
||||||
|
|
||||||
|
|||||||
@@ -1,71 +1,53 @@
|
|||||||
""""Starting setup task: Frontend"."""
|
"""Starting setup task: Frontend."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.components.frontend import (
|
||||||
|
add_extra_js_url,
|
||||||
|
async_register_built_in_panel,
|
||||||
|
)
|
||||||
|
|
||||||
from .const import DOMAIN, URL_BASE
|
from .const import DOMAIN, URL_BASE
|
||||||
from .hacs_frontend import VERSION as FE_VERSION, locate_dir
|
from .hacs_frontend import VERSION as FE_VERSION, locate_dir
|
||||||
from .hacs_frontend_experimental import (
|
from .utils.workarounds import async_register_static_path
|
||||||
VERSION as EXPERIMENTAL_FE_VERSION,
|
|
||||||
locate_dir as experimental_locate_dir,
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
from homeassistant.components.frontend import add_extra_js_url
|
|
||||||
except ImportError:
|
|
||||||
|
|
||||||
def add_extra_js_url(hass: HomeAssistant, url: str, es5: bool = False) -> None:
|
|
||||||
hacs: HacsBase = hass.data.get(DOMAIN)
|
|
||||||
hacs.log.error("Could not import add_extra_js_url from frontend.")
|
|
||||||
if "frontend_extra_module_url" not in hass.data:
|
|
||||||
hass.data["frontend_extra_module_url"] = set()
|
|
||||||
hass.data["frontend_extra_module_url"].add(url)
|
|
||||||
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
from .base import HacsBase
|
from .base import HacsBase
|
||||||
|
|
||||||
|
|
||||||
@callback
|
async def async_register_frontend(hass: HomeAssistant, hacs: HacsBase) -> None:
|
||||||
def async_register_frontend(hass: HomeAssistant, hacs: HacsBase) -> None:
|
|
||||||
"""Register the frontend."""
|
"""Register the frontend."""
|
||||||
|
|
||||||
# Setup themes endpoint if needed
|
|
||||||
hacs.async_setup_frontend_endpoint_themes()
|
|
||||||
|
|
||||||
# Register frontend
|
# Register frontend
|
||||||
if hacs.configuration.dev and (frontend_path := os.getenv("HACS_FRONTEND_DIR")):
|
if hacs.configuration.dev and (frontend_path := os.getenv("HACS_FRONTEND_DIR")):
|
||||||
hacs.log.warning(
|
hacs.log.warning(
|
||||||
"<HacsFrontend> Frontend development mode enabled. Do not run in production!"
|
"<HacsFrontend> Frontend development mode enabled. Do not run in production!"
|
||||||
)
|
)
|
||||||
hass.http.register_static_path(
|
await async_register_static_path(
|
||||||
f"{URL_BASE}/frontend", f"{frontend_path}/hacs_frontend", cache_headers=False
|
hass, f"{URL_BASE}/frontend", f"{frontend_path}/hacs_frontend", cache_headers=False
|
||||||
)
|
|
||||||
elif hacs.configuration.experimental:
|
|
||||||
hacs.log.info("<HacsFrontend> Using experimental frontend")
|
|
||||||
hass.http.register_static_path(
|
|
||||||
f"{URL_BASE}/frontend", experimental_locate_dir(), cache_headers=False
|
|
||||||
)
|
)
|
||||||
|
hacs.frontend_version = "dev"
|
||||||
else:
|
else:
|
||||||
#
|
await async_register_static_path(
|
||||||
hass.http.register_static_path(f"{URL_BASE}/frontend", locate_dir(), cache_headers=False)
|
hass, f"{URL_BASE}/frontend", locate_dir(), cache_headers=False
|
||||||
|
)
|
||||||
|
hacs.frontend_version = FE_VERSION
|
||||||
|
|
||||||
# Custom iconset
|
# Custom iconset
|
||||||
hass.http.register_static_path(
|
await async_register_static_path(
|
||||||
f"{URL_BASE}/iconset.js", str(hacs.integration_dir / "iconset.js")
|
hass, f"{URL_BASE}/iconset.js", str(hacs.integration_dir / "iconset.js")
|
||||||
)
|
)
|
||||||
add_extra_js_url(hass, f"{URL_BASE}/iconset.js")
|
add_extra_js_url(hass, f"{URL_BASE}/iconset.js")
|
||||||
|
|
||||||
hacs.frontend_version = (
|
|
||||||
FE_VERSION if not hacs.configuration.experimental else EXPERIMENTAL_FE_VERSION
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add to sidepanel if needed
|
# Add to sidepanel if needed
|
||||||
if DOMAIN not in hass.data.get("frontend_panels", {}):
|
if DOMAIN not in hass.data.get("frontend_panels", {}):
|
||||||
hass.components.frontend.async_register_built_in_panel(
|
async_register_built_in_panel(
|
||||||
|
hass,
|
||||||
component_name="custom",
|
component_name="custom",
|
||||||
sidebar_title=hacs.configuration.sidepanel_title,
|
sidebar_title=hacs.configuration.sidepanel_title,
|
||||||
sidebar_icon=hacs.configuration.sidepanel_icon,
|
sidebar_icon=hacs.configuration.sidepanel_icon,
|
||||||
@@ -82,4 +64,4 @@ def async_register_frontend(hass: HomeAssistant, hacs: HacsBase) -> None:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Setup plugin endpoint if needed
|
# Setup plugin endpoint if needed
|
||||||
hacs.async_setup_frontend_endpoint_plugin()
|
await hacs.async_setup_frontend_endpoint_plugin()
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
function t(t){const a=t.language||"en";return t.translationMetadata.translations[a]&&t.translationMetadata.translations[a].isRTL||!1}function a(a){return t(a)?"rtl":"ltr"}export{a,t as c};
|
|
||||||
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
var e="undefined"!=typeof globalThis?globalThis:"undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:{};function o(e){return e&&e.__esModule&&Object.prototype.hasOwnProperty.call(e,"default")?e.default:e}function t(e,o){return e(o={exports:{}},o.exports),o.exports}function n(e){return e&&e.default||e}export{e as a,t as c,n as g,o as u};
|
|
||||||
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
const r=r=>(s,o)=>{if(s.constructor._observers){if(!s.constructor.hasOwnProperty("_observers")){const r=s.constructor._observers;s.constructor._observers=new Map,r.forEach(((r,o)=>s.constructor._observers.set(o,r)))}}else{s.constructor._observers=new Map;const r=s.updated;s.updated=function(s){r.call(this,s),s.forEach(((r,s)=>{const o=this.constructor._observers.get(s);void 0!==o&&o.call(this,this[s],r)}))}}s.constructor._observers.set(o,r)};export{r as o};
|
|
||||||
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
Intl.PluralRules&&"function"==typeof Intl.PluralRules.__addLocaleData&&Intl.PluralRules.__addLocaleData({data:{categories:{cardinal:["one","other"],ordinal:["one","two","few","other"]},fn:function(e,l){var a=String(e).split("."),t=!a[1],o=Number(a[0])==e,n=o&&a[0].slice(-1),r=o&&a[0].slice(-2);return l?1==n&&11!=r?"one":2==n&&12!=r?"two":3==n&&13!=r?"few":"other":1==e&&t?"one":"other"}},locale:"en"});
|
|
||||||
Binary file not shown.
@@ -1,10 +1 @@
|
|||||||
|
!function(){function n(n){var e=document.createElement("script");e.src=n,document.body.appendChild(e)}if(/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent))n("/hacsfiles/frontend/frontend_es5/entrypoint.4G_vEpsjfjQ.js");else try{new Function("import('/hacsfiles/frontend/frontend_latest/entrypoint.xkDQGhK7H8M.js')")()}catch(e){n("/hacsfiles/frontend/frontend_es5/entrypoint.4G_vEpsjfjQ.js")}}()
|
||||||
try {
|
|
||||||
new Function("import('/hacsfiles/frontend/main-ad130be7.js')")();
|
|
||||||
} catch (err) {
|
|
||||||
var el = document.createElement('script');
|
|
||||||
el.src = '/hacsfiles/frontend/main-ad130be7.js';
|
|
||||||
el.type = 'module';
|
|
||||||
document.body.appendChild(el);
|
|
||||||
}
|
|
||||||
|
|
||||||
Binary file not shown.
@@ -1,3 +0,0 @@
|
|||||||
{
|
|
||||||
"./src/main.ts": "main-ad130be7.js"
|
|
||||||
}
|
|
||||||
@@ -1 +1 @@
|
|||||||
VERSION="20220906112053"
|
VERSION="20240814080035"
|
||||||
@@ -1,6 +1,9 @@
|
|||||||
{
|
{
|
||||||
"domain": "hacs",
|
"domain": "hacs",
|
||||||
"name": "HACS",
|
"name": "HACS",
|
||||||
|
"after_dependencies": [
|
||||||
|
"python_script"
|
||||||
|
],
|
||||||
"codeowners": [
|
"codeowners": [
|
||||||
"@ludeeus"
|
"@ludeeus"
|
||||||
],
|
],
|
||||||
@@ -19,5 +22,5 @@
|
|||||||
"requirements": [
|
"requirements": [
|
||||||
"aiogithubapi>=22.10.1"
|
"aiogithubapi>=22.10.1"
|
||||||
],
|
],
|
||||||
"version": "1.34.0"
|
"version": "2.0.0"
|
||||||
}
|
}
|
||||||
@@ -1,22 +1,21 @@
|
|||||||
"""Initialize repositories."""
|
"""Initialize repositories."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from ..enums import HacsCategory
|
from ..enums import HacsCategory
|
||||||
from .appdaemon import HacsAppdaemonRepository
|
from .appdaemon import HacsAppdaemonRepository
|
||||||
from .base import HacsRepository
|
from .base import HacsRepository
|
||||||
from .integration import HacsIntegrationRepository
|
from .integration import HacsIntegrationRepository
|
||||||
from .netdaemon import HacsNetdaemonRepository
|
|
||||||
from .plugin import HacsPluginRepository
|
from .plugin import HacsPluginRepository
|
||||||
from .python_script import HacsPythonScriptRepository
|
from .python_script import HacsPythonScriptRepository
|
||||||
from .template import HacsTemplateRepository
|
from .template import HacsTemplateRepository
|
||||||
from .theme import HacsThemeRepository
|
from .theme import HacsThemeRepository
|
||||||
|
|
||||||
RERPOSITORY_CLASSES: dict[HacsCategory, HacsRepository] = {
|
REPOSITORY_CLASSES: dict[HacsCategory, HacsRepository] = {
|
||||||
HacsCategory.THEME: HacsThemeRepository,
|
HacsCategory.THEME: HacsThemeRepository,
|
||||||
HacsCategory.INTEGRATION: HacsIntegrationRepository,
|
HacsCategory.INTEGRATION: HacsIntegrationRepository,
|
||||||
HacsCategory.PYTHON_SCRIPT: HacsPythonScriptRepository,
|
HacsCategory.PYTHON_SCRIPT: HacsPythonScriptRepository,
|
||||||
HacsCategory.APPDAEMON: HacsAppdaemonRepository,
|
HacsCategory.APPDAEMON: HacsAppdaemonRepository,
|
||||||
HacsCategory.NETDAEMON: HacsNetdaemonRepository,
|
|
||||||
HacsCategory.PLUGIN: HacsPluginRepository,
|
HacsCategory.PLUGIN: HacsPluginRepository,
|
||||||
HacsCategory.TEMPLATE: HacsTemplateRepository,
|
HacsCategory.TEMPLATE: HacsTemplateRepository,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Class for appdaemon apps in HACS."""
|
"""Class for appdaemon apps in HACS."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
@@ -79,7 +80,7 @@ class HacsAppdaemonRepository(HacsRepository):
|
|||||||
# Set local path
|
# Set local path
|
||||||
self.content.path.local = self.localpath
|
self.content.path.local = self.localpath
|
||||||
|
|
||||||
# Signal entities to refresh
|
# Signal frontend to refresh
|
||||||
if self.data.installed:
|
if self.data.installed:
|
||||||
self.hacs.async_dispatch(
|
self.hacs.async_dispatch(
|
||||||
HacsDispatchEvent.REPOSITORY,
|
HacsDispatchEvent.REPOSITORY,
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
"""Repository."""
|
"""Repository."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from asyncio import sleep
|
from asyncio import sleep
|
||||||
from datetime import datetime
|
from datetime import UTC, datetime
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import shutil
|
import shutil
|
||||||
@@ -20,7 +21,7 @@ import attr
|
|||||||
from homeassistant.helpers import device_registry as dr, issue_registry as ir
|
from homeassistant.helpers import device_registry as dr, issue_registry as ir
|
||||||
|
|
||||||
from ..const import DOMAIN
|
from ..const import DOMAIN
|
||||||
from ..enums import ConfigurationType, HacsDispatchEvent, RepositoryFile
|
from ..enums import HacsDispatchEvent, RepositoryFile
|
||||||
from ..exceptions import (
|
from ..exceptions import (
|
||||||
HacsException,
|
HacsException,
|
||||||
HacsNotModifiedException,
|
HacsNotModifiedException,
|
||||||
@@ -28,16 +29,17 @@ from ..exceptions import (
|
|||||||
HacsRepositoryExistException,
|
HacsRepositoryExistException,
|
||||||
)
|
)
|
||||||
from ..types import DownloadableContent
|
from ..types import DownloadableContent
|
||||||
from ..utils.backup import Backup, BackupNetDaemon
|
from ..utils.backup import Backup
|
||||||
from ..utils.decode import decode_content
|
from ..utils.decode import decode_content
|
||||||
from ..utils.decorator import concurrent
|
from ..utils.decorator import concurrent
|
||||||
|
from ..utils.file_system import async_exists, async_remove, async_remove_directory
|
||||||
from ..utils.filters import filter_content_return_one_of_type
|
from ..utils.filters import filter_content_return_one_of_type
|
||||||
|
from ..utils.github_graphql_query import GET_REPOSITORY_RELEASES
|
||||||
from ..utils.json import json_loads
|
from ..utils.json import json_loads
|
||||||
from ..utils.logger import LOGGER
|
from ..utils.logger import LOGGER
|
||||||
from ..utils.path import is_safe
|
from ..utils.path import is_safe
|
||||||
from ..utils.queue_manager import QueueManager
|
from ..utils.queue_manager import QueueManager
|
||||||
from ..utils.store import async_remove_store
|
from ..utils.store import async_remove_store
|
||||||
from ..utils.template import render_template
|
|
||||||
from ..utils.url import github_archive, github_release_asset
|
from ..utils.url import github_archive, github_release_asset
|
||||||
from ..utils.validate import Validate
|
from ..utils.validate import Validate
|
||||||
from ..utils.version import (
|
from ..utils.version import (
|
||||||
@@ -84,7 +86,6 @@ TOPIC_FILTER = (
|
|||||||
"lovelace",
|
"lovelace",
|
||||||
"media-player",
|
"media-player",
|
||||||
"mediaplayer",
|
"mediaplayer",
|
||||||
"netdaemon",
|
|
||||||
"plugin",
|
"plugin",
|
||||||
"python_script",
|
"python_script",
|
||||||
"python-script",
|
"python-script",
|
||||||
@@ -113,6 +114,7 @@ REPOSITORY_KEYS_TO_EXPORT = (
|
|||||||
("last_version", None),
|
("last_version", None),
|
||||||
("manifest_name", None),
|
("manifest_name", None),
|
||||||
("open_issues", 0),
|
("open_issues", 0),
|
||||||
|
("prerelease", None),
|
||||||
("stargazers_count", 0),
|
("stargazers_count", 0),
|
||||||
("topics", []),
|
("topics", []),
|
||||||
)
|
)
|
||||||
@@ -164,6 +166,7 @@ class RepositoryData:
|
|||||||
manifest_name: str = None
|
manifest_name: str = None
|
||||||
new: bool = True
|
new: bool = True
|
||||||
open_issues: int = 0
|
open_issues: int = 0
|
||||||
|
prerelease: str = None
|
||||||
published_tags: list[str] = []
|
published_tags: list[str] = []
|
||||||
releases: bool = False
|
releases: bool = False
|
||||||
selected_tag: str = None
|
selected_tag: str = None
|
||||||
@@ -174,7 +177,7 @@ class RepositoryData:
|
|||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
"""Return the name."""
|
"""Return the name."""
|
||||||
if self.category in ["integration", "netdaemon"]:
|
if self.category == "integration":
|
||||||
return self.domain
|
return self.domain
|
||||||
return self.full_name.split("/")[-1]
|
return self.full_name.split("/")[-1]
|
||||||
|
|
||||||
@@ -196,7 +199,7 @@ class RepositoryData:
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
if key == "last_fetched" and isinstance(value, float):
|
if key == "last_fetched" and isinstance(value, float):
|
||||||
setattr(self, key, datetime.fromtimestamp(value))
|
setattr(self, key, datetime.fromtimestamp(value, UTC))
|
||||||
elif key == "id":
|
elif key == "id":
|
||||||
setattr(self, key, str(value))
|
setattr(self, key, str(value))
|
||||||
elif key == "country":
|
elif key == "country":
|
||||||
@@ -384,7 +387,9 @@ class HacsRepository:
|
|||||||
@property
|
@property
|
||||||
def display_available_version(self) -> str:
|
def display_available_version(self) -> str:
|
||||||
"""Return display_authors"""
|
"""Return display_authors"""
|
||||||
if self.data.last_version is not None:
|
if self.data.show_beta and self.data.prerelease is not None:
|
||||||
|
available = self.data.prerelease
|
||||||
|
elif self.data.last_version is not None:
|
||||||
available = self.data.last_version
|
available = self.data.last_version
|
||||||
else:
|
else:
|
||||||
if self.data.last_commit is not None:
|
if self.data.last_commit is not None:
|
||||||
@@ -501,13 +506,7 @@ class HacsRepository:
|
|||||||
|
|
||||||
if self.repository_object:
|
if self.repository_object:
|
||||||
self.data.last_updated = self.repository_object.attributes.get("pushed_at", 0)
|
self.data.last_updated = self.repository_object.attributes.get("pushed_at", 0)
|
||||||
self.data.last_fetched = datetime.utcnow()
|
self.data.last_fetched = datetime.now(UTC)
|
||||||
|
|
||||||
# Set topics
|
|
||||||
self.data.topics = self.data.topics
|
|
||||||
|
|
||||||
# Set description
|
|
||||||
self.data.description = self.data.description
|
|
||||||
|
|
||||||
@concurrent(concurrenttasks=10, backoff_time=5)
|
@concurrent(concurrenttasks=10, backoff_time=5)
|
||||||
async def common_update(self, ignore_issues=False, force=False, skip_releases=False) -> bool:
|
async def common_update(self, ignore_issues=False, force=False, skip_releases=False) -> bool:
|
||||||
@@ -555,7 +554,7 @@ class HacsRepository:
|
|||||||
self.additional_info = await self.async_get_info_file_contents()
|
self.additional_info = await self.async_get_info_file_contents()
|
||||||
|
|
||||||
# Set last fetch attribute
|
# Set last fetch attribute
|
||||||
self.data.last_fetched = datetime.utcnow()
|
self.data.last_fetched = datetime.now(UTC)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -574,9 +573,11 @@ class HacsRepository:
|
|||||||
),
|
),
|
||||||
validate,
|
validate,
|
||||||
)
|
)
|
||||||
except BaseException: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
# lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||||
|
except BaseException:
|
||||||
validate.errors.append(
|
validate.errors.append(
|
||||||
f"Download of {self.repository_manifest.filename} was not completed"
|
f"Download of {
|
||||||
|
self.repository_manifest.filename} was not completed"
|
||||||
)
|
)
|
||||||
|
|
||||||
async def async_download_zip_file(
|
async def async_download_zip_file(
|
||||||
@@ -596,8 +597,12 @@ class HacsRepository:
|
|||||||
temp_file = f"{temp_dir}/{self.repository_manifest.filename}"
|
temp_file = f"{temp_dir}/{self.repository_manifest.filename}"
|
||||||
|
|
||||||
result = await self.hacs.async_save_file(temp_file, filecontent)
|
result = await self.hacs.async_save_file(temp_file, filecontent)
|
||||||
with zipfile.ZipFile(temp_file, "r") as zip_file:
|
|
||||||
zip_file.extractall(self.content.path.local)
|
def _extract_zip_file():
|
||||||
|
with zipfile.ZipFile(temp_file, "r") as zip_file:
|
||||||
|
zip_file.extractall(self.content.path.local)
|
||||||
|
|
||||||
|
await self.hacs.hass.async_add_executor_job(_extract_zip_file)
|
||||||
|
|
||||||
def cleanup_temp_dir():
|
def cleanup_temp_dir():
|
||||||
"""Cleanup temp_dir."""
|
"""Cleanup temp_dir."""
|
||||||
@@ -611,24 +616,24 @@ class HacsRepository:
|
|||||||
return
|
return
|
||||||
|
|
||||||
validate.errors.append(f"[{content['name']}] was not downloaded")
|
validate.errors.append(f"[{content['name']}] was not downloaded")
|
||||||
except BaseException: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
# lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||||
|
except BaseException:
|
||||||
validate.errors.append("Download was not completed")
|
validate.errors.append("Download was not completed")
|
||||||
|
|
||||||
async def download_content(self, version: string | None = None) -> None:
|
async def download_content(self, version: string | None = None) -> None:
|
||||||
"""Download the content of a directory."""
|
"""Download the content of a directory."""
|
||||||
contents: list[FileInformation] | None = None
|
contents: list[FileInformation] | None = None
|
||||||
if self.hacs.configuration.experimental:
|
if (
|
||||||
if (
|
not self.repository_manifest.zip_release
|
||||||
not self.repository_manifest.zip_release
|
and not self.data.file_name
|
||||||
and not self.data.file_name
|
and self.content.path.remote is not None
|
||||||
and self.content.path.remote is not None
|
):
|
||||||
):
|
self.logger.info("%s Downloading repository archive", self.string)
|
||||||
self.logger.info("%s Trying experimental download", self.string)
|
try:
|
||||||
try:
|
await self.download_repository_zip()
|
||||||
await self.download_repository_zip()
|
return
|
||||||
return
|
except HacsException as exception:
|
||||||
except HacsException as exception:
|
self.logger.exception(exception)
|
||||||
self.logger.exception(exception)
|
|
||||||
|
|
||||||
if self.repository_manifest.filename:
|
if self.repository_manifest.filename:
|
||||||
self.logger.debug("%s %s", self.string, self.repository_manifest.filename)
|
self.logger.debug("%s %s", self.string, self.repository_manifest.filename)
|
||||||
@@ -679,23 +684,26 @@ class HacsRepository:
|
|||||||
if not result:
|
if not result:
|
||||||
raise HacsException("Could not save ZIP file")
|
raise HacsException("Could not save ZIP file")
|
||||||
|
|
||||||
with zipfile.ZipFile(temp_file, "r") as zip_file:
|
def _extract_zip_file():
|
||||||
extractable = []
|
with zipfile.ZipFile(temp_file, "r") as zip_file:
|
||||||
for path in zip_file.filelist:
|
extractable = []
|
||||||
filename = "/".join(path.filename.split("/")[1:])
|
for path in zip_file.filelist:
|
||||||
if (
|
filename = "/".join(path.filename.split("/")[1:])
|
||||||
filename.startswith(self.content.path.remote)
|
if (
|
||||||
and filename != self.content.path.remote
|
filename.startswith(self.content.path.remote)
|
||||||
):
|
and filename != self.content.path.remote
|
||||||
path.filename = filename.replace(self.content.path.remote, "")
|
):
|
||||||
if path.filename == "/":
|
path.filename = filename.replace(self.content.path.remote, "")
|
||||||
# Blank files is not valid, and will start to throw in Python 3.12
|
if path.filename == "/":
|
||||||
continue
|
# Blank files is not valid, and will start to throw in Python 3.12
|
||||||
extractable.append(path)
|
continue
|
||||||
|
extractable.append(path)
|
||||||
|
|
||||||
if len(extractable) == 0:
|
if len(extractable) == 0:
|
||||||
raise HacsException("No content to extract")
|
raise HacsException("No content to extract")
|
||||||
zip_file.extractall(self.content.path.local, extractable)
|
zip_file.extractall(self.content.path.local, extractable)
|
||||||
|
|
||||||
|
await self.hacs.hass.async_add_executor_job(_extract_zip_file)
|
||||||
|
|
||||||
def cleanup_temp_dir():
|
def cleanup_temp_dir():
|
||||||
"""Cleanup temp_dir."""
|
"""Cleanup temp_dir."""
|
||||||
@@ -718,18 +726,15 @@ class HacsRepository:
|
|||||||
)
|
)
|
||||||
if response:
|
if response:
|
||||||
return json_loads(decode_content(response.data.content))
|
return json_loads(decode_content(response.data.content))
|
||||||
except BaseException: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
# lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||||
|
except BaseException:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
async def async_get_info_file_contents(self) -> str:
|
async def async_get_info_file_contents(self, *, version: str | None = None, **kwargs) -> str:
|
||||||
"""Get the content of the info.md file."""
|
"""Get the content of the info.md file."""
|
||||||
|
|
||||||
def _info_file_variants() -> tuple[str, ...]:
|
def _info_file_variants() -> tuple[str, ...]:
|
||||||
name: str = (
|
name: str = "readme"
|
||||||
"readme"
|
|
||||||
if self.repository_manifest.render_readme or self.hacs.configuration.experimental
|
|
||||||
else "info"
|
|
||||||
)
|
|
||||||
return (
|
return (
|
||||||
f"{name.upper()}.md",
|
f"{name.upper()}.md",
|
||||||
f"{name}.md",
|
f"{name}.md",
|
||||||
@@ -744,7 +749,7 @@ class HacsRepository:
|
|||||||
if not info_files:
|
if not info_files:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
return await self.get_documentation(filename=info_files[0]) or ""
|
return await self.get_documentation(filename=info_files[0], version=version) or ""
|
||||||
|
|
||||||
def remove(self) -> None:
|
def remove(self) -> None:
|
||||||
"""Run remove tasks."""
|
"""Run remove tasks."""
|
||||||
@@ -758,19 +763,7 @@ class HacsRepository:
|
|||||||
if not await self.remove_local_directory():
|
if not await self.remove_local_directory():
|
||||||
raise HacsException("Could not uninstall")
|
raise HacsException("Could not uninstall")
|
||||||
self.data.installed = False
|
self.data.installed = False
|
||||||
if self.data.category == "integration":
|
await self._async_post_uninstall()
|
||||||
if self.data.config_flow:
|
|
||||||
await self.reload_custom_components()
|
|
||||||
else:
|
|
||||||
self.pending_restart = True
|
|
||||||
elif self.data.category == "theme":
|
|
||||||
try:
|
|
||||||
await self.hacs.hass.services.async_call("frontend", "reload_themes", {})
|
|
||||||
except BaseException: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
|
||||||
pass
|
|
||||||
elif self.data.category == "template":
|
|
||||||
await self.hacs.hass.services.async_call("homeassistant", "reload_custom_templates", {})
|
|
||||||
|
|
||||||
await async_remove_store(self.hacs.hass, f"hacs/{self.data.id}.hacs")
|
await async_remove_store(self.hacs.hass, f"hacs/{self.data.id}.hacs")
|
||||||
|
|
||||||
self.data.installed_version = None
|
self.data.installed_version = None
|
||||||
@@ -802,8 +795,7 @@ class HacsRepository:
|
|||||||
f"{self.hacs.configuration.theme_path}/"
|
f"{self.hacs.configuration.theme_path}/"
|
||||||
f"{self.data.name}.yaml"
|
f"{self.data.name}.yaml"
|
||||||
)
|
)
|
||||||
if os.path.exists(path):
|
await async_remove(self.hacs.hass, path, missing_ok=True)
|
||||||
os.remove(path)
|
|
||||||
local_path = self.content.path.local
|
local_path = self.content.path.local
|
||||||
elif self.data.category == "integration":
|
elif self.data.category == "integration":
|
||||||
if not self.data.domain:
|
if not self.data.domain:
|
||||||
@@ -817,18 +809,18 @@ class HacsRepository:
|
|||||||
else:
|
else:
|
||||||
local_path = self.content.path.local
|
local_path = self.content.path.local
|
||||||
|
|
||||||
if os.path.exists(local_path):
|
if await async_exists(self.hacs.hass, local_path):
|
||||||
if not is_safe(self.hacs, local_path):
|
if not is_safe(self.hacs, local_path):
|
||||||
self.logger.error("%s Path %s is blocked from removal", self.string, local_path)
|
self.logger.error("%s Path %s is blocked from removal", self.string, local_path)
|
||||||
return False
|
return False
|
||||||
self.logger.debug("%s Removing %s", self.string, local_path)
|
self.logger.debug("%s Removing %s", self.string, local_path)
|
||||||
|
|
||||||
if self.data.category in ["python_script", "template"]:
|
if self.data.category in ["python_script", "template"]:
|
||||||
os.remove(local_path)
|
await async_remove(self.hacs.hass, local_path)
|
||||||
else:
|
else:
|
||||||
shutil.rmtree(local_path)
|
await async_remove_directory(self.hacs.hass, local_path)
|
||||||
|
|
||||||
while os.path.exists(local_path):
|
while await async_exists(self.hacs.hass, local_path):
|
||||||
await sleep(1)
|
await sleep(1)
|
||||||
else:
|
else:
|
||||||
self.logger.debug(
|
self.logger.debug(
|
||||||
@@ -836,7 +828,8 @@ class HacsRepository:
|
|||||||
)
|
)
|
||||||
|
|
||||||
except (
|
except (
|
||||||
BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
# lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||||
|
BaseException
|
||||||
) as exception:
|
) as exception:
|
||||||
self.logger.debug("%s Removing %s failed with %s", self.string, local_path, exception)
|
self.logger.debug("%s Removing %s failed with %s", self.string, local_path, exception)
|
||||||
return False
|
return False
|
||||||
@@ -905,6 +898,13 @@ class HacsRepository:
|
|||||||
async def async_post_installation(self) -> None:
|
async def async_post_installation(self) -> None:
|
||||||
"""Run post install steps."""
|
"""Run post install steps."""
|
||||||
|
|
||||||
|
async def async_post_uninstall(self):
|
||||||
|
"""Run post uninstall steps."""
|
||||||
|
|
||||||
|
async def _async_post_uninstall(self):
|
||||||
|
"""Run post uninstall steps."""
|
||||||
|
await self.async_post_uninstall()
|
||||||
|
|
||||||
async def _async_post_install(self) -> None:
|
async def _async_post_install(self) -> None:
|
||||||
"""Run post install steps."""
|
"""Run post install steps."""
|
||||||
self.logger.info("%s Running post installation steps", self.string)
|
self.logger.info("%s Running post installation steps", self.string)
|
||||||
@@ -943,17 +943,15 @@ class HacsRepository:
|
|||||||
{"repository": self.data.full_name, "progress": 40},
|
{"repository": self.data.full_name, "progress": 40},
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.data.installed and self.data.category == "netdaemon":
|
if self.repository_manifest.persistent_directory:
|
||||||
persistent_directory = BackupNetDaemon(hacs=self.hacs, repository=self)
|
if await async_exists(
|
||||||
await self.hacs.hass.async_add_executor_job(persistent_directory.create)
|
self.hacs.hass,
|
||||||
|
f"{self.content.path.local}/{self.repository_manifest.persistent_directory}",
|
||||||
elif self.repository_manifest.persistent_directory:
|
|
||||||
if os.path.exists(
|
|
||||||
f"{self.content.path.local}/{self.repository_manifest.persistent_directory}"
|
|
||||||
):
|
):
|
||||||
persistent_directory = Backup(
|
persistent_directory = Backup(
|
||||||
hacs=self.hacs,
|
hacs=self.hacs,
|
||||||
local_path=f"{self.content.path.local}/{self.repository_manifest.persistent_directory}",
|
local_path=f"{
|
||||||
|
self.content.path.local}/{self.repository_manifest.persistent_directory}",
|
||||||
backup_path=tempfile.gettempdir() + "/hacs_persistent_directory/",
|
backup_path=tempfile.gettempdir() + "/hacs_persistent_directory/",
|
||||||
)
|
)
|
||||||
await self.hacs.hass.async_add_executor_job(persistent_directory.create)
|
await self.hacs.hass.async_add_executor_job(persistent_directory.create)
|
||||||
@@ -1066,9 +1064,9 @@ class HacsRepository:
|
|||||||
)
|
)
|
||||||
self.repository_object = repository_object
|
self.repository_object = repository_object
|
||||||
if self.data.full_name.lower() != repository_object.full_name.lower():
|
if self.data.full_name.lower() != repository_object.full_name.lower():
|
||||||
self.hacs.common.renamed_repositories[
|
self.hacs.common.renamed_repositories[self.data.full_name] = (
|
||||||
self.data.full_name
|
repository_object.full_name
|
||||||
] = repository_object.full_name
|
)
|
||||||
if not self.hacs.system.generator:
|
if not self.hacs.system.generator:
|
||||||
raise HacsRepositoryExistException
|
raise HacsRepositoryExistException
|
||||||
self.logger.error(
|
self.logger.error(
|
||||||
@@ -1084,7 +1082,7 @@ class HacsRepository:
|
|||||||
except HacsRepositoryExistException:
|
except HacsRepositoryExistException:
|
||||||
raise HacsRepositoryExistException from None
|
raise HacsRepositoryExistException from None
|
||||||
except (AIOGitHubAPIException, HacsException) as exception:
|
except (AIOGitHubAPIException, HacsException) as exception:
|
||||||
if not self.hacs.status.startup:
|
if not self.hacs.status.startup or self.hacs.system.generator:
|
||||||
self.logger.error("%s %s", self.string, exception)
|
self.logger.error("%s %s", self.string, exception)
|
||||||
if not ignore_issues:
|
if not ignore_issues:
|
||||||
self.validate.errors.append("Repository does not exist.")
|
self.validate.errors.append("Repository does not exist.")
|
||||||
@@ -1107,15 +1105,28 @@ class HacsRepository:
|
|||||||
# Get releases.
|
# Get releases.
|
||||||
if not skip_releases:
|
if not skip_releases:
|
||||||
try:
|
try:
|
||||||
releases = await self.get_releases(
|
releases = await self.get_releases(prerelease=True, returnlimit=30)
|
||||||
prerelease=self.data.show_beta,
|
|
||||||
returnlimit=self.hacs.configuration.release_limit,
|
|
||||||
)
|
|
||||||
if releases:
|
if releases:
|
||||||
|
self.data.prerelease = None
|
||||||
|
for release in releases:
|
||||||
|
if release.draft:
|
||||||
|
continue
|
||||||
|
elif release.prerelease:
|
||||||
|
if self.data.prerelease is None:
|
||||||
|
self.data.prerelease = release.tag_name
|
||||||
|
else:
|
||||||
|
self.data.last_version = release.tag_name
|
||||||
|
break
|
||||||
|
|
||||||
self.data.releases = True
|
self.data.releases = True
|
||||||
self.releases.objects = releases
|
|
||||||
self.data.published_tags = [x.tag_name for x in self.releases.objects]
|
filtered_releases = [
|
||||||
self.data.last_version = next(iter(self.data.published_tags))
|
release
|
||||||
|
for release in releases
|
||||||
|
if not release.draft and (self.data.show_beta or not release.prerelease)
|
||||||
|
]
|
||||||
|
self.releases.objects = filtered_releases
|
||||||
|
self.data.published_tags = [x.tag_name for x in filtered_releases]
|
||||||
|
|
||||||
except HacsException:
|
except HacsException:
|
||||||
self.data.releases = False
|
self.data.releases = False
|
||||||
@@ -1280,18 +1291,13 @@ class HacsRepository:
|
|||||||
self.validate.errors.append(f"[{content.name}] was not downloaded.")
|
self.validate.errors.append(f"[{content.name}] was not downloaded.")
|
||||||
|
|
||||||
except (
|
except (
|
||||||
BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
# lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||||
|
BaseException
|
||||||
) as exception:
|
) as exception:
|
||||||
self.validate.errors.append(f"Download was not completed [{exception}]")
|
self.validate.errors.append(f"Download was not completed [{exception}]")
|
||||||
|
|
||||||
async def async_remove_entity_device(self) -> None:
|
async def async_remove_entity_device(self) -> None:
|
||||||
"""Remove the entity device."""
|
"""Remove the entity device."""
|
||||||
if (
|
|
||||||
self.hacs.configuration == ConfigurationType.YAML
|
|
||||||
or not self.hacs.configuration.experimental
|
|
||||||
):
|
|
||||||
return
|
|
||||||
|
|
||||||
device_registry: dr.DeviceRegistry = dr.async_get(hass=self.hacs.hass)
|
device_registry: dr.DeviceRegistry = dr.async_get(hass=self.hacs.hass)
|
||||||
device = device_registry.async_get_device(identifiers={(DOMAIN, str(self.data.id))})
|
device = device_registry.async_get_device(identifiers={(DOMAIN, str(self.data.id))})
|
||||||
|
|
||||||
@@ -1322,39 +1328,39 @@ class HacsRepository:
|
|||||||
self,
|
self,
|
||||||
*,
|
*,
|
||||||
filename: str | None = None,
|
filename: str | None = None,
|
||||||
|
version: str | None = None,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
) -> str | None:
|
) -> str | None:
|
||||||
"""Get the documentation of the repository."""
|
"""Get the documentation of the repository."""
|
||||||
if filename is None:
|
if filename is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
version = (
|
if version is not None:
|
||||||
(self.data.installed_version or self.data.installed_commit)
|
target_version = version
|
||||||
if self.data.installed
|
elif self.data.installed:
|
||||||
else (self.data.last_version or self.data.last_commit or self.ref)
|
target_version = self.data.installed_version or self.data.installed_commit
|
||||||
)
|
else:
|
||||||
|
target_version = self.data.last_version or self.data.last_commit or self.ref
|
||||||
|
|
||||||
self.logger.debug(
|
self.logger.debug(
|
||||||
"%s Getting documentation for version=%s,filename=%s",
|
"%s Getting documentation for version=%s,filename=%s",
|
||||||
self.string,
|
self.string,
|
||||||
version,
|
target_version,
|
||||||
filename,
|
filename,
|
||||||
)
|
)
|
||||||
if version is None:
|
if target_version is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
result = await self.hacs.async_download_file(
|
result = await self.hacs.async_download_file(
|
||||||
f"https://raw.githubusercontent.com/{self.data.full_name}/{version}/{filename}",
|
f"https://raw.githubusercontent.com/{
|
||||||
|
self.data.full_name}/{target_version}/{filename}",
|
||||||
nolog=True,
|
nolog=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
return (
|
return (
|
||||||
render_template(
|
result.decode(encoding="utf-8")
|
||||||
self.hacs,
|
.replace("<svg", "<disabled")
|
||||||
result.decode(encoding="utf-8")
|
.replace("</svg", "</disabled")
|
||||||
.replace("<svg", "<disabled")
|
|
||||||
.replace("</svg", "</disabled"),
|
|
||||||
self,
|
|
||||||
)
|
|
||||||
if result
|
if result
|
||||||
else None
|
else None
|
||||||
)
|
)
|
||||||
@@ -1364,7 +1370,8 @@ class HacsRepository:
|
|||||||
self.logger.debug("%s Getting hacs.json for version=%s", self.string, version)
|
self.logger.debug("%s Getting hacs.json for version=%s", self.string, version)
|
||||||
try:
|
try:
|
||||||
result = await self.hacs.async_download_file(
|
result = await self.hacs.async_download_file(
|
||||||
f"https://raw.githubusercontent.com/{self.data.full_name}/{version}/hacs.json",
|
f"https://raw.githubusercontent.com/{
|
||||||
|
self.data.full_name}/{version}/hacs.json",
|
||||||
nolog=True,
|
nolog=True,
|
||||||
)
|
)
|
||||||
if result is None:
|
if result is None:
|
||||||
@@ -1372,3 +1379,81 @@ class HacsRepository:
|
|||||||
return HacsManifest.from_dict(json_loads(result))
|
return HacsManifest.from_dict(json_loads(result))
|
||||||
except Exception: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
async def _ensure_download_capabilities(self, ref: str | None, **kwargs: Any) -> None:
|
||||||
|
"""Ensure that the download can be handled."""
|
||||||
|
target_manifest: HacsManifest | None = None
|
||||||
|
if ref is None:
|
||||||
|
if not self.can_download:
|
||||||
|
raise HacsException(
|
||||||
|
f"This {
|
||||||
|
self.data.category.value} is not available for download."
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
if ref == self.data.last_version:
|
||||||
|
target_manifest = self.repository_manifest
|
||||||
|
else:
|
||||||
|
target_manifest = await self.get_hacs_json(version=ref)
|
||||||
|
|
||||||
|
if target_manifest is None:
|
||||||
|
raise HacsException(
|
||||||
|
f"The version {ref} for this {
|
||||||
|
self.data.category.value} can not be used with HACS."
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
target_manifest.homeassistant is not None
|
||||||
|
and self.hacs.core.ha_version < target_manifest.homeassistant
|
||||||
|
):
|
||||||
|
raise HacsException(
|
||||||
|
f"This version requires Home Assistant {
|
||||||
|
target_manifest.homeassistant} or newer."
|
||||||
|
)
|
||||||
|
if target_manifest.hacs is not None and self.hacs.version < target_manifest.hacs:
|
||||||
|
raise HacsException(f"This version requires HACS {
|
||||||
|
target_manifest.hacs} or newer.")
|
||||||
|
|
||||||
|
async def async_download_repository(self, *, ref: str | None = None, **_) -> None:
|
||||||
|
"""Download the content of a repository."""
|
||||||
|
await self._ensure_download_capabilities(ref)
|
||||||
|
self.logger.info("Starting download, %s", ref)
|
||||||
|
if self.display_version_or_commit == "version":
|
||||||
|
self.hacs.async_dispatch(
|
||||||
|
HacsDispatchEvent.REPOSITORY_DOWNLOAD_PROGRESS,
|
||||||
|
{"repository": self.data.full_name, "progress": 10},
|
||||||
|
)
|
||||||
|
if not ref:
|
||||||
|
await self.update_repository(force=True)
|
||||||
|
else:
|
||||||
|
self.ref = ref
|
||||||
|
self.data.selected_tag = ref
|
||||||
|
self.force_branch = ref is not None
|
||||||
|
self.hacs.async_dispatch(
|
||||||
|
HacsDispatchEvent.REPOSITORY_DOWNLOAD_PROGRESS,
|
||||||
|
{"repository": self.data.full_name, "progress": 20},
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self.async_install(version=ref)
|
||||||
|
except HacsException as exception:
|
||||||
|
raise HacsException(
|
||||||
|
f"Downloading {self.data.full_name} with version {
|
||||||
|
ref or self.data.last_version or self.data.last_commit} failed with ({exception})"
|
||||||
|
) from exception
|
||||||
|
finally:
|
||||||
|
self.data.selected_tag = None
|
||||||
|
self.force_branch = False
|
||||||
|
self.hacs.async_dispatch(
|
||||||
|
HacsDispatchEvent.REPOSITORY_DOWNLOAD_PROGRESS,
|
||||||
|
{"repository": self.data.full_name, "progress": False},
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_get_releases(self, *, first: int = 30) -> list[GitHubReleaseModel]:
|
||||||
|
"""Get the last x releases of a repository."""
|
||||||
|
response = await self.hacs.async_github_api_method(
|
||||||
|
method=self.hacs.githubapi.repos.releases.list,
|
||||||
|
repository=self.data.full_name,
|
||||||
|
kwargs={"per_page": 30},
|
||||||
|
)
|
||||||
|
return response.data
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Class for integrations in HACS."""
|
"""Class for integrations in HACS."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING, Any
|
||||||
@@ -45,7 +46,7 @@ class HacsIntegrationRepository(HacsRepository):
|
|||||||
if self.data.first_install:
|
if self.data.first_install:
|
||||||
self.pending_restart = False
|
self.pending_restart = False
|
||||||
|
|
||||||
if self.pending_restart and self.hacs.configuration.experimental:
|
if self.pending_restart:
|
||||||
self.logger.debug("%s Creating restart_required issue", self.string)
|
self.logger.debug("%s Creating restart_required issue", self.string)
|
||||||
async_create_issue(
|
async_create_issue(
|
||||||
hass=self.hacs.hass,
|
hass=self.hacs.hass,
|
||||||
@@ -60,6 +61,13 @@ class HacsIntegrationRepository(HacsRepository):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def async_post_uninstall(self) -> None:
|
||||||
|
"""Run post uninstall steps."""
|
||||||
|
if self.data.config_flow:
|
||||||
|
await self.reload_custom_components()
|
||||||
|
else:
|
||||||
|
self.pending_restart = True
|
||||||
|
|
||||||
async def validate_repository(self):
|
async def validate_repository(self):
|
||||||
"""Validate."""
|
"""Validate."""
|
||||||
await self.common_validate()
|
await self.common_validate()
|
||||||
@@ -78,7 +86,8 @@ class HacsIntegrationRepository(HacsRepository):
|
|||||||
):
|
):
|
||||||
raise AddonRepositoryException()
|
raise AddonRepositoryException()
|
||||||
raise HacsException(
|
raise HacsException(
|
||||||
f"{self.string} Repository structure for {self.ref.replace('tags/','')} is not compliant"
|
f"{self.string} Repository structure for {
|
||||||
|
self.ref.replace('tags/', '')} is not compliant"
|
||||||
)
|
)
|
||||||
self.content.path.remote = f"custom_components/{name}"
|
self.content.path.remote = f"custom_components/{name}"
|
||||||
|
|
||||||
@@ -93,7 +102,8 @@ class HacsIntegrationRepository(HacsRepository):
|
|||||||
|
|
||||||
except KeyError as exception:
|
except KeyError as exception:
|
||||||
self.validate.errors.append(
|
self.validate.errors.append(
|
||||||
f"Missing expected key '{exception}' in { RepositoryFile.MAINIFEST_JSON}"
|
f"Missing expected key '{exception}' in {
|
||||||
|
RepositoryFile.MAINIFEST_JSON}"
|
||||||
)
|
)
|
||||||
self.hacs.log.error(
|
self.hacs.log.error(
|
||||||
"Missing expected key '%s' in '%s'", exception, RepositoryFile.MAINIFEST_JSON
|
"Missing expected key '%s' in '%s'", exception, RepositoryFile.MAINIFEST_JSON
|
||||||
@@ -133,7 +143,8 @@ class HacsIntegrationRepository(HacsRepository):
|
|||||||
|
|
||||||
except KeyError as exception:
|
except KeyError as exception:
|
||||||
self.validate.errors.append(
|
self.validate.errors.append(
|
||||||
f"Missing expected key '{exception}' in { RepositoryFile.MAINIFEST_JSON}"
|
f"Missing expected key '{exception}' in {
|
||||||
|
RepositoryFile.MAINIFEST_JSON}"
|
||||||
)
|
)
|
||||||
self.hacs.log.error(
|
self.hacs.log.error(
|
||||||
"Missing expected key '%s' in '%s'", exception, RepositoryFile.MAINIFEST_JSON
|
"Missing expected key '%s' in '%s'", exception, RepositoryFile.MAINIFEST_JSON
|
||||||
@@ -142,7 +153,7 @@ class HacsIntegrationRepository(HacsRepository):
|
|||||||
# Set local path
|
# Set local path
|
||||||
self.content.path.local = self.localpath
|
self.content.path.local = self.localpath
|
||||||
|
|
||||||
# Signal entities to refresh
|
# Signal frontend to refresh
|
||||||
if self.data.installed:
|
if self.data.installed:
|
||||||
self.hacs.async_dispatch(
|
self.hacs.async_dispatch(
|
||||||
HacsDispatchEvent.REPOSITORY,
|
HacsDispatchEvent.REPOSITORY,
|
||||||
@@ -180,3 +191,27 @@ class HacsIntegrationRepository(HacsRepository):
|
|||||||
)
|
)
|
||||||
if response:
|
if response:
|
||||||
return json_loads(decode_content(response.data.content))
|
return json_loads(decode_content(response.data.content))
|
||||||
|
|
||||||
|
async def get_integration_manifest(self, *, version: str, **kwargs) -> dict[str, Any] | None:
|
||||||
|
"""Get the content of the manifest.json file."""
|
||||||
|
manifest_path = (
|
||||||
|
"manifest.json"
|
||||||
|
if self.repository_manifest.content_in_root
|
||||||
|
else f"{self.content.path.remote}/{RepositoryFile.MAINIFEST_JSON}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if manifest_path not in (x.full_path for x in self.tree):
|
||||||
|
raise HacsException(f"No {RepositoryFile.MAINIFEST_JSON} file found '{manifest_path}'")
|
||||||
|
|
||||||
|
self.logger.debug("%s Getting manifest.json for version=%s", self.string, version)
|
||||||
|
try:
|
||||||
|
result = await self.hacs.async_download_file(
|
||||||
|
f"https://raw.githubusercontent.com/{
|
||||||
|
self.data.full_name}/{version}/{manifest_path}",
|
||||||
|
nolog=True,
|
||||||
|
)
|
||||||
|
if result is None:
|
||||||
|
return None
|
||||||
|
return json_loads(result)
|
||||||
|
except Exception: # pylint: disable=broad-except
|
||||||
|
return None
|
||||||
|
|||||||
@@ -1,104 +0,0 @@
|
|||||||
"""Class for netdaemon apps in HACS."""
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
from ..enums import HacsCategory, HacsDispatchEvent
|
|
||||||
from ..exceptions import HacsException
|
|
||||||
from ..utils import filters
|
|
||||||
from ..utils.decorator import concurrent
|
|
||||||
from .base import HacsRepository
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from ..base import HacsBase
|
|
||||||
|
|
||||||
|
|
||||||
class HacsNetdaemonRepository(HacsRepository):
|
|
||||||
"""Netdaemon apps in HACS."""
|
|
||||||
|
|
||||||
def __init__(self, hacs: HacsBase, full_name: str):
|
|
||||||
"""Initialize."""
|
|
||||||
super().__init__(hacs=hacs)
|
|
||||||
self.data.full_name = full_name
|
|
||||||
self.data.full_name_lower = full_name.lower()
|
|
||||||
self.data.category = HacsCategory.NETDAEMON
|
|
||||||
self.content.path.local = self.localpath
|
|
||||||
self.content.path.remote = "apps"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def localpath(self):
|
|
||||||
"""Return localpath."""
|
|
||||||
return f"{self.hacs.core.config_path}/netdaemon/apps/{self.data.name}"
|
|
||||||
|
|
||||||
async def validate_repository(self):
|
|
||||||
"""Validate."""
|
|
||||||
await self.common_validate()
|
|
||||||
|
|
||||||
# Custom step 1: Validate content.
|
|
||||||
if self.repository_manifest:
|
|
||||||
if self.repository_manifest.content_in_root:
|
|
||||||
self.content.path.remote = ""
|
|
||||||
|
|
||||||
if self.content.path.remote == "apps":
|
|
||||||
self.data.domain = filters.get_first_directory_in_directory(
|
|
||||||
self.tree, self.content.path.remote
|
|
||||||
)
|
|
||||||
self.content.path.remote = f"apps/{self.data.name}"
|
|
||||||
|
|
||||||
compliant = False
|
|
||||||
for treefile in self.treefiles:
|
|
||||||
if treefile.startswith(f"{self.content.path.remote}") and treefile.endswith(".cs"):
|
|
||||||
compliant = True
|
|
||||||
break
|
|
||||||
if not compliant:
|
|
||||||
raise HacsException(
|
|
||||||
f"{self.string} Repository structure for {self.ref.replace('tags/','')} is not compliant"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Handle potential errors
|
|
||||||
if self.validate.errors:
|
|
||||||
for error in self.validate.errors:
|
|
||||||
if not self.hacs.status.startup:
|
|
||||||
self.logger.error("%s %s", self.string, error)
|
|
||||||
return self.validate.success
|
|
||||||
|
|
||||||
@concurrent(concurrenttasks=10, backoff_time=5)
|
|
||||||
async def update_repository(self, ignore_issues=False, force=False):
|
|
||||||
"""Update."""
|
|
||||||
if not await self.common_update(ignore_issues, force) and not force:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Get appdaemon objects.
|
|
||||||
if self.repository_manifest:
|
|
||||||
if self.repository_manifest.content_in_root:
|
|
||||||
self.content.path.remote = ""
|
|
||||||
|
|
||||||
if self.content.path.remote == "apps":
|
|
||||||
self.data.domain = filters.get_first_directory_in_directory(
|
|
||||||
self.tree, self.content.path.remote
|
|
||||||
)
|
|
||||||
self.content.path.remote = f"apps/{self.data.name}"
|
|
||||||
|
|
||||||
# Set local path
|
|
||||||
self.content.path.local = self.localpath
|
|
||||||
|
|
||||||
# Signal entities to refresh
|
|
||||||
if self.data.installed:
|
|
||||||
self.hacs.async_dispatch(
|
|
||||||
HacsDispatchEvent.REPOSITORY,
|
|
||||||
{
|
|
||||||
"id": 1337,
|
|
||||||
"action": "update",
|
|
||||||
"repository": self.data.full_name,
|
|
||||||
"repository_id": self.data.id,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
async def async_post_installation(self):
|
|
||||||
"""Run post installation steps."""
|
|
||||||
try:
|
|
||||||
await self.hacs.hass.services.async_call(
|
|
||||||
"hassio", "addon_restart", {"addon": "c6a2317c_netdaemon"}
|
|
||||||
)
|
|
||||||
except BaseException: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
|
||||||
pass
|
|
||||||
@@ -1,6 +1,8 @@
|
|||||||
"""Class for plugins in HACS."""
|
"""Class for plugins in HACS."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from ..enums import HacsCategory, HacsDispatchEvent
|
from ..enums import HacsCategory, HacsDispatchEvent
|
||||||
@@ -9,7 +11,11 @@ from ..utils.decorator import concurrent
|
|||||||
from ..utils.json import json_loads
|
from ..utils.json import json_loads
|
||||||
from .base import HacsRepository
|
from .base import HacsRepository
|
||||||
|
|
||||||
|
HACSTAG_REPLACER = re.compile(r"\D+")
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
from homeassistant.components.lovelace.resources import ResourceStorageCollection
|
||||||
|
|
||||||
from ..base import HacsBase
|
from ..base import HacsBase
|
||||||
|
|
||||||
|
|
||||||
@@ -55,7 +61,12 @@ class HacsPluginRepository(HacsRepository):
|
|||||||
|
|
||||||
async def async_post_installation(self):
|
async def async_post_installation(self):
|
||||||
"""Run post installation steps."""
|
"""Run post installation steps."""
|
||||||
self.hacs.async_setup_frontend_endpoint_plugin()
|
await self.hacs.async_setup_frontend_endpoint_plugin()
|
||||||
|
await self.update_dashboard_resources()
|
||||||
|
|
||||||
|
async def async_post_uninstall(self):
|
||||||
|
"""Run post uninstall steps."""
|
||||||
|
await self.remove_dashboard_resources()
|
||||||
|
|
||||||
@concurrent(concurrenttasks=10, backoff_time=5)
|
@concurrent(concurrenttasks=10, backoff_time=5)
|
||||||
async def update_repository(self, ignore_issues=False, force=False):
|
async def update_repository(self, ignore_issues=False, force=False):
|
||||||
@@ -74,7 +85,7 @@ class HacsPluginRepository(HacsRepository):
|
|||||||
if self.content.path.remote == "release":
|
if self.content.path.remote == "release":
|
||||||
self.content.single = True
|
self.content.single = True
|
||||||
|
|
||||||
# Signal entities to refresh
|
# Signal frontend to refresh
|
||||||
if self.data.installed:
|
if self.data.installed:
|
||||||
self.hacs.async_dispatch(
|
self.hacs.async_dispatch(
|
||||||
HacsDispatchEvent.REPOSITORY,
|
HacsDispatchEvent.REPOSITORY,
|
||||||
@@ -99,9 +110,9 @@ class HacsPluginRepository(HacsRepository):
|
|||||||
|
|
||||||
def update_filenames(self) -> None:
|
def update_filenames(self) -> None:
|
||||||
"""Get the filename to target."""
|
"""Get the filename to target."""
|
||||||
# Handler for plug requirement 3
|
content_in_root = self.repository_manifest.content_in_root
|
||||||
if self.repository_manifest.filename:
|
if specific_filename := self.repository_manifest.filename:
|
||||||
valid_filenames = (self.repository_manifest.filename,)
|
valid_filenames = (specific_filename,)
|
||||||
else:
|
else:
|
||||||
valid_filenames = (
|
valid_filenames = (
|
||||||
f"{self.data.name.replace('lovelace-', '')}.js",
|
f"{self.data.name.replace('lovelace-', '')}.js",
|
||||||
@@ -110,7 +121,7 @@ class HacsPluginRepository(HacsRepository):
|
|||||||
f"{self.data.name}-bundle.js",
|
f"{self.data.name}-bundle.js",
|
||||||
)
|
)
|
||||||
|
|
||||||
if not self.repository_manifest.content_in_root:
|
if not content_in_root:
|
||||||
if self.releases.objects:
|
if self.releases.objects:
|
||||||
release = self.releases.objects[0]
|
release = self.releases.objects[0]
|
||||||
if release.assets:
|
if release.assets:
|
||||||
@@ -124,11 +135,106 @@ class HacsPluginRepository(HacsRepository):
|
|||||||
self.content.path.remote = "release"
|
self.content.path.remote = "release"
|
||||||
return
|
return
|
||||||
|
|
||||||
for location in ("",) if self.repository_manifest.content_in_root else ("dist", ""):
|
all_paths = {x.full_path for x in self.tree}
|
||||||
for filename in valid_filenames:
|
for filename in valid_filenames:
|
||||||
if f"{location+'/' if location else ''}{filename}" in [
|
if filename in all_paths:
|
||||||
x.full_path for x in self.tree
|
self.data.file_name = filename
|
||||||
]:
|
self.content.path.remote = ""
|
||||||
self.data.file_name = filename.split("/")[-1]
|
return
|
||||||
self.content.path.remote = location
|
if not content_in_root and f"dist/{filename}" in all_paths:
|
||||||
break
|
self.data.file_name = filename.split("/")[-1]
|
||||||
|
self.content.path.remote = "dist"
|
||||||
|
return
|
||||||
|
|
||||||
|
def generate_dashboard_resource_hacstag(self) -> str:
|
||||||
|
"""Get the HACS tag used by dashboard resources."""
|
||||||
|
version = (
|
||||||
|
self.display_installed_version
|
||||||
|
or self.data.selected_tag
|
||||||
|
or self.display_available_version
|
||||||
|
)
|
||||||
|
return f"{self.data.id}{HACSTAG_REPLACER.sub('', version)}"
|
||||||
|
|
||||||
|
def generate_dashboard_resource_namespace(self) -> str:
|
||||||
|
"""Get the dashboard resource namespace."""
|
||||||
|
return f"/hacsfiles/{self.data.full_name.split("/")[1]}"
|
||||||
|
|
||||||
|
def generate_dashboard_resource_url(self) -> str:
|
||||||
|
"""Get the dashboard resource namespace."""
|
||||||
|
filename = self.data.file_name
|
||||||
|
if "/" in filename:
|
||||||
|
self.logger.warning("%s have defined an invalid file name %s", self.string, filename)
|
||||||
|
filename = filename.split("/")[-1]
|
||||||
|
return (
|
||||||
|
f"{self.generate_dashboard_resource_namespace()}/{filename}"
|
||||||
|
f"?hacstag={self.generate_dashboard_resource_hacstag()}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_resource_handler(self) -> ResourceStorageCollection | None:
|
||||||
|
"""Get the resource handler."""
|
||||||
|
if not (hass_data := self.hacs.hass.data):
|
||||||
|
self.logger.error("%s Can not access the hass data", self.string)
|
||||||
|
return
|
||||||
|
|
||||||
|
if (lovelace_data := hass_data.get("lovelace")) is None:
|
||||||
|
self.logger.warning("%s Can not access the lovelace integration data", self.string)
|
||||||
|
return
|
||||||
|
|
||||||
|
resources: ResourceStorageCollection | None = lovelace_data.get("resources")
|
||||||
|
|
||||||
|
if resources is None:
|
||||||
|
self.logger.warning("%s Can not access the dashboard resources", self.string)
|
||||||
|
return
|
||||||
|
|
||||||
|
if not hasattr(resources, "store") or resources.store is None:
|
||||||
|
self.logger.info("%s YAML mode detected, can not update resources", self.string)
|
||||||
|
return
|
||||||
|
|
||||||
|
if resources.store.key != "lovelace_resources" or resources.store.version != 1:
|
||||||
|
self.logger.warning("%s Can not use the dashboard resources", self.string)
|
||||||
|
return
|
||||||
|
|
||||||
|
return resources
|
||||||
|
|
||||||
|
async def update_dashboard_resources(self) -> None:
|
||||||
|
"""Update dashboard resources."""
|
||||||
|
if not (resources := self._get_resource_handler()):
|
||||||
|
return
|
||||||
|
|
||||||
|
if not resources.loaded:
|
||||||
|
await resources.async_load()
|
||||||
|
|
||||||
|
namespace = self.generate_dashboard_resource_namespace()
|
||||||
|
url = self.generate_dashboard_resource_url()
|
||||||
|
|
||||||
|
for entry in resources.async_items():
|
||||||
|
if (entry_url := entry["url"]).startswith(namespace):
|
||||||
|
if entry_url != url:
|
||||||
|
self.logger.info(
|
||||||
|
"%s Updating existing dashboard resource from %s to %s",
|
||||||
|
self.string,
|
||||||
|
entry_url,
|
||||||
|
url,
|
||||||
|
)
|
||||||
|
await resources.async_update_item(entry["id"], {"url": url})
|
||||||
|
return
|
||||||
|
|
||||||
|
# Nothing was updated, add the resource
|
||||||
|
self.logger.info("%s Adding dashboard resource %s", self.string, url)
|
||||||
|
await resources.async_create_item({"res_type": "module", "url": url})
|
||||||
|
|
||||||
|
async def remove_dashboard_resources(self) -> None:
|
||||||
|
"""Remove dashboard resources."""
|
||||||
|
if not (resources := self._get_resource_handler()):
|
||||||
|
return
|
||||||
|
|
||||||
|
if not resources.loaded:
|
||||||
|
await resources.async_load()
|
||||||
|
|
||||||
|
namespace = self.generate_dashboard_resource_namespace()
|
||||||
|
|
||||||
|
for entry in resources.async_items():
|
||||||
|
if entry["url"].startswith(namespace):
|
||||||
|
self.logger.info("%s Removing dashboard resource %s", self.string, entry["url"])
|
||||||
|
await resources.async_delete_item(entry["id"])
|
||||||
|
return
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Class for python_scripts in HACS."""
|
"""Class for python_scripts in HACS."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
@@ -89,7 +90,7 @@ class HacsPythonScriptRepository(HacsRepository):
|
|||||||
# Update name
|
# Update name
|
||||||
self.update_filenames()
|
self.update_filenames()
|
||||||
|
|
||||||
# Signal entities to refresh
|
# Signal frontend to refresh
|
||||||
if self.data.installed:
|
if self.data.installed:
|
||||||
self.hacs.async_dispatch(
|
self.hacs.async_dispatch(
|
||||||
HacsDispatchEvent.REPOSITORY,
|
HacsDispatchEvent.REPOSITORY,
|
||||||
|
|||||||
@@ -1,8 +1,11 @@
|
|||||||
"""Class for themes in HACS."""
|
"""Class for themes in HACS."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
|
|
||||||
from ..enums import HacsCategory, HacsDispatchEvent
|
from ..enums import HacsCategory, HacsDispatchEvent
|
||||||
from ..exceptions import HacsException
|
from ..exceptions import HacsException
|
||||||
from ..utils.decorator import concurrent
|
from ..utils.decorator import concurrent
|
||||||
@@ -32,12 +35,7 @@ class HacsThemeRepository(HacsRepository):
|
|||||||
|
|
||||||
async def async_post_installation(self):
|
async def async_post_installation(self):
|
||||||
"""Run post installation steps."""
|
"""Run post installation steps."""
|
||||||
try:
|
await self._reload_frontend_themes()
|
||||||
await self.hacs.hass.services.async_call("frontend", "reload_themes", {})
|
|
||||||
except BaseException: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
|
||||||
pass
|
|
||||||
|
|
||||||
self.hacs.async_setup_frontend_endpoint_themes()
|
|
||||||
|
|
||||||
async def validate_repository(self):
|
async def validate_repository(self):
|
||||||
"""Validate."""
|
"""Validate."""
|
||||||
@@ -74,6 +72,18 @@ class HacsThemeRepository(HacsRepository):
|
|||||||
if self.hacs.system.action:
|
if self.hacs.system.action:
|
||||||
await self.hacs.validation.async_run_repository_checks(self)
|
await self.hacs.validation.async_run_repository_checks(self)
|
||||||
|
|
||||||
|
async def _reload_frontend_themes(self) -> None:
|
||||||
|
"""Reload frontend themes."""
|
||||||
|
self.logger.debug("%s Reloading frontend themes", self.string)
|
||||||
|
try:
|
||||||
|
await self.hacs.hass.services.async_call("frontend", "reload_themes", {})
|
||||||
|
except HomeAssistantError as exception:
|
||||||
|
self.logger.exception("%s %s", self.string, exception)
|
||||||
|
|
||||||
|
async def async_post_uninstall(self) -> None:
|
||||||
|
"""Run post uninstall steps."""
|
||||||
|
await self._reload_frontend_themes()
|
||||||
|
|
||||||
@concurrent(concurrenttasks=10, backoff_time=5)
|
@concurrent(concurrenttasks=10, backoff_time=5)
|
||||||
async def update_repository(self, ignore_issues=False, force=False):
|
async def update_repository(self, ignore_issues=False, force=False):
|
||||||
"""Update."""
|
"""Update."""
|
||||||
@@ -88,7 +98,7 @@ class HacsThemeRepository(HacsRepository):
|
|||||||
self.update_filenames()
|
self.update_filenames()
|
||||||
self.content.path.local = self.localpath
|
self.content.path.local = self.localpath
|
||||||
|
|
||||||
# Signal entities to refresh
|
# Signal frontend to refresh
|
||||||
if self.data.installed:
|
if self.data.installed:
|
||||||
self.hacs.async_dispatch(
|
self.hacs.async_dispatch(
|
||||||
HacsDispatchEvent.REPOSITORY,
|
HacsDispatchEvent.REPOSITORY,
|
||||||
|
|||||||
@@ -1,62 +0,0 @@
|
|||||||
"""Sensor platform for HACS."""
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
from homeassistant.components.sensor import SensorEntity
|
|
||||||
from homeassistant.core import callback
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from .base import HacsBase
|
|
||||||
|
|
||||||
from .const import DOMAIN
|
|
||||||
from .entity import HacsSystemEntity
|
|
||||||
from .enums import ConfigurationType
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_platform(hass, _config, async_add_entities, _discovery_info=None):
|
|
||||||
"""Setup sensor platform."""
|
|
||||||
async_add_entities([HACSSensor(hacs=hass.data.get(DOMAIN))])
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass, _config_entry, async_add_devices):
|
|
||||||
"""Setup sensor platform."""
|
|
||||||
hacs: HacsBase = hass.data.get(DOMAIN)
|
|
||||||
if hacs.configuration.experimental:
|
|
||||||
return
|
|
||||||
|
|
||||||
async_add_devices([HACSSensor(hacs=hass.data.get(DOMAIN))])
|
|
||||||
|
|
||||||
|
|
||||||
class HACSSensor(HacsSystemEntity, SensorEntity):
|
|
||||||
"""HACS Sensor class."""
|
|
||||||
|
|
||||||
_attr_name = "hacs"
|
|
||||||
_attr_native_unit_of_measurement = "pending update(s)"
|
|
||||||
_attr_native_value = None
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def _update(self) -> None:
|
|
||||||
"""Update the sensor."""
|
|
||||||
|
|
||||||
repositories = [
|
|
||||||
repository
|
|
||||||
for repository in self.hacs.repositories.list_all
|
|
||||||
if repository.pending_update
|
|
||||||
]
|
|
||||||
self._attr_native_value = len(repositories)
|
|
||||||
if (
|
|
||||||
self.hacs.configuration.config_type == ConfigurationType.YAML
|
|
||||||
or not self.hacs.configuration.experimental
|
|
||||||
):
|
|
||||||
self._attr_extra_state_attributes = {
|
|
||||||
"repositories": [
|
|
||||||
{
|
|
||||||
"name": repository.data.full_name,
|
|
||||||
"display_name": repository.display_name,
|
|
||||||
"installed_version": repository.display_installed_version,
|
|
||||||
"available_version": repository.display_available_version,
|
|
||||||
}
|
|
||||||
for repository in repositories
|
|
||||||
]
|
|
||||||
}
|
|
||||||
@@ -1,4 +1,7 @@
|
|||||||
"""Provide info to system health."""
|
"""Provide info to system health."""
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from aiogithubapi.common.const import BASE_API_URL
|
from aiogithubapi.common.const import BASE_API_URL
|
||||||
from homeassistant.components import system_health
|
from homeassistant.components import system_health
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
@@ -17,8 +20,11 @@ def async_register(hass: HomeAssistant, register: system_health.SystemHealthRegi
|
|||||||
register.async_register_info(system_health_info, "/hacs")
|
register.async_register_info(system_health_info, "/hacs")
|
||||||
|
|
||||||
|
|
||||||
async def system_health_info(hass):
|
async def system_health_info(hass: HomeAssistant) -> dict[str, Any]:
|
||||||
"""Get info for the info page."""
|
"""Get info for the info page."""
|
||||||
|
if DOMAIN not in hass.data:
|
||||||
|
return {"Disabled": "HACS is not loaded, but HA still requests this information..."}
|
||||||
|
|
||||||
hacs: HacsBase = hass.data[DOMAIN]
|
hacs: HacsBase = hass.data[DOMAIN]
|
||||||
response = await hacs.githubapi.rate_limit()
|
response = await hacs.githubapi.rate_limit()
|
||||||
|
|
||||||
@@ -30,6 +36,9 @@ async def system_health_info(hass):
|
|||||||
"GitHub Web": system_health.async_check_can_reach_url(
|
"GitHub Web": system_health.async_check_can_reach_url(
|
||||||
hass, "https://github.com/", GITHUB_STATUS
|
hass, "https://github.com/", GITHUB_STATUS
|
||||||
),
|
),
|
||||||
|
"HACS Data": system_health.async_check_can_reach_url(
|
||||||
|
hass, "https://data-v2.hacs.xyz/data.json", CLOUDFLARE_STATUS
|
||||||
|
),
|
||||||
"GitHub API Calls Remaining": response.data.resources.core.remaining,
|
"GitHub API Calls Remaining": response.data.resources.core.remaining,
|
||||||
"Installed Version": hacs.version,
|
"Installed Version": hacs.version,
|
||||||
"Stage": hacs.stage,
|
"Stage": hacs.stage,
|
||||||
@@ -40,9 +49,4 @@ async def system_health_info(hass):
|
|||||||
if hacs.system.disabled:
|
if hacs.system.disabled:
|
||||||
data["Disabled"] = hacs.system.disabled_reason
|
data["Disabled"] = hacs.system.disabled_reason
|
||||||
|
|
||||||
if hacs.configuration.experimental:
|
|
||||||
data["HACS Data"] = system_health.async_check_can_reach_url(
|
|
||||||
hass, "https://data-v2.hacs.xyz/data.json", CLOUDFLARE_STATUS
|
|
||||||
)
|
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|||||||
@@ -17,8 +17,7 @@
|
|||||||
"acc_logs": "I know how to access Home Assistant logs",
|
"acc_logs": "I know how to access Home Assistant logs",
|
||||||
"acc_addons": "I know that there are no add-ons in HACS",
|
"acc_addons": "I know that there are no add-ons in HACS",
|
||||||
"acc_untested": "I know that everything inside HACS including HACS itself is custom and untested by Home Assistant",
|
"acc_untested": "I know that everything inside HACS including HACS itself is custom and untested by Home Assistant",
|
||||||
"acc_disable": "I know that if I get issues with Home Assistant I should disable all my custom_components",
|
"acc_disable": "I know that if I get issues with Home Assistant I should disable all my custom_components"
|
||||||
"experimental": "Enable experimental features, this is what eventually will become HACS 2.0.0, if you enable it now you do not need to do anything when 2.0.0 is released"
|
|
||||||
},
|
},
|
||||||
"description": "Before you can setup HACS you need to acknowledge the following"
|
"description": "Before you can setup HACS you need to acknowledge the following"
|
||||||
},
|
},
|
||||||
@@ -45,11 +44,9 @@
|
|||||||
"data": {
|
"data": {
|
||||||
"not_in_use": "Not in use with YAML",
|
"not_in_use": "Not in use with YAML",
|
||||||
"country": "Filter with country code",
|
"country": "Filter with country code",
|
||||||
"experimental": "Enable experimental features",
|
|
||||||
"release_limit": "Number of releases to show",
|
"release_limit": "Number of releases to show",
|
||||||
"debug": "Enable debug",
|
"debug": "Enable debug",
|
||||||
"appdaemon": "Enable AppDaemon apps discovery & tracking",
|
"appdaemon": "Enable AppDaemon apps discovery & tracking",
|
||||||
"netdaemon": "[DEPRECATED] Enable NetDaemon apps discovery & tracking",
|
|
||||||
"sidepanel_icon": "Side panel icon",
|
"sidepanel_icon": "Side panel icon",
|
||||||
"sidepanel_title": "Side panel title"
|
"sidepanel_title": "Side panel title"
|
||||||
}
|
}
|
||||||
@@ -71,10 +68,17 @@
|
|||||||
"removed": {
|
"removed": {
|
||||||
"title": "Repository removed from HACS",
|
"title": "Repository removed from HACS",
|
||||||
"description": "Because {reason}, `{name}` has been removed from HACS. Please visit the [HACS Panel](/hacs/repository/{repositry_id}) to remove it."
|
"description": "Because {reason}, `{name}` has been removed from HACS. Please visit the [HACS Panel](/hacs/repository/{repositry_id}) to remove it."
|
||||||
},
|
}
|
||||||
"deprecated_yaml_configuration": {
|
},
|
||||||
"title": "YAML configuration is deprecated",
|
"entity": {
|
||||||
"description": "YAML configuration of HACS is deprecated and will be removed in version 2.0.0, there will be no automatic import of this.\nPlease remove it from your configuration, restart Home Assistant and use the UI to configure it instead."
|
"switch": {
|
||||||
|
"pre-release": {
|
||||||
|
"name": "Pre-release",
|
||||||
|
"state": {
|
||||||
|
"off": "No pre-releases",
|
||||||
|
"on": "Pre-releases preferred"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1,24 +1,28 @@
|
|||||||
"""Update entities for HACS."""
|
"""Update entities for HACS."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from homeassistant.components.update import UpdateEntity, UpdateEntityFeature
|
from homeassistant.components.update import UpdateEntity, UpdateEntityFeature
|
||||||
from homeassistant.core import HomeAssistantError, callback
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.core import HomeAssistant, HomeAssistantError, callback
|
||||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||||
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
|
|
||||||
from .base import HacsBase
|
from .base import HacsBase
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
from .entity import HacsRepositoryEntity
|
from .entity import HacsRepositoryEntity
|
||||||
from .enums import HacsCategory, HacsDispatchEvent
|
from .enums import HacsCategory, HacsDispatchEvent
|
||||||
from .exceptions import HacsException
|
from .exceptions import HacsException
|
||||||
from .repositories.base import HacsManifest
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass, _config_entry, async_add_devices):
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
||||||
|
) -> None:
|
||||||
"""Setup update platform."""
|
"""Setup update platform."""
|
||||||
hacs: HacsBase = hass.data.get(DOMAIN)
|
hacs: HacsBase = hass.data[DOMAIN]
|
||||||
async_add_devices(
|
async_add_entities(
|
||||||
HacsRepositoryUpdateEntity(hacs=hacs, repository=repository)
|
HacsRepositoryUpdateEntity(hacs=hacs, repository=repository)
|
||||||
for repository in hacs.repositories.list_downloaded
|
for repository in hacs.repositories.list_downloaded
|
||||||
)
|
)
|
||||||
@@ -59,8 +63,6 @@ class HacsRepositoryUpdateEntity(HacsRepositoryEntity, UpdateEntity):
|
|||||||
@property
|
@property
|
||||||
def release_summary(self) -> str | None:
|
def release_summary(self) -> str | None:
|
||||||
"""Return the release summary."""
|
"""Return the release summary."""
|
||||||
if not self.repository.can_download:
|
|
||||||
return f"<ha-alert alert-type='warning'>Requires Home Assistant {self.repository.repository_manifest.homeassistant}</ha-alert>"
|
|
||||||
if self.repository.pending_restart:
|
if self.repository.pending_restart:
|
||||||
return "<ha-alert alert-type='error'>Restart of Home Assistant required</ha-alert>"
|
return "<ha-alert alert-type='error'>Restart of Home Assistant required</ha-alert>"
|
||||||
return None
|
return None
|
||||||
@@ -76,64 +78,20 @@ class HacsRepositoryUpdateEntity(HacsRepositoryEntity, UpdateEntity):
|
|||||||
|
|
||||||
return f"https://brands.home-assistant.io/_/{self.repository.data.domain}/icon.png"
|
return f"https://brands.home-assistant.io/_/{self.repository.data.domain}/icon.png"
|
||||||
|
|
||||||
async def _ensure_capabilities(self, version: str | None, **kwargs: Any) -> None:
|
|
||||||
"""Ensure that the entity has capabilities."""
|
|
||||||
target_manifest: HacsManifest | None = None
|
|
||||||
if version is None:
|
|
||||||
if not self.repository.can_download:
|
|
||||||
raise HomeAssistantError(
|
|
||||||
f"This {self.repository.data.category.value} is not available for download."
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
if version == self.repository.data.last_version:
|
|
||||||
target_manifest = self.repository.repository_manifest
|
|
||||||
else:
|
|
||||||
target_manifest = await self.repository.get_hacs_json(version=version)
|
|
||||||
|
|
||||||
if target_manifest is None:
|
|
||||||
raise HomeAssistantError(
|
|
||||||
f"The version {version} for this {self.repository.data.category.value} can not be used with HACS."
|
|
||||||
)
|
|
||||||
|
|
||||||
if (
|
|
||||||
target_manifest.homeassistant is not None
|
|
||||||
and self.hacs.core.ha_version < target_manifest.homeassistant
|
|
||||||
):
|
|
||||||
raise HomeAssistantError(
|
|
||||||
f"This version requires Home Assistant {target_manifest.homeassistant} or newer."
|
|
||||||
)
|
|
||||||
if target_manifest.hacs is not None and self.hacs.version < target_manifest.hacs:
|
|
||||||
raise HomeAssistantError(f"This version requires HACS {target_manifest.hacs} or newer.")
|
|
||||||
|
|
||||||
async def async_install(self, version: str | None, backup: bool, **kwargs: Any) -> None:
|
async def async_install(self, version: str | None, backup: bool, **kwargs: Any) -> None:
|
||||||
"""Install an update."""
|
"""Install an update."""
|
||||||
await self._ensure_capabilities(version)
|
to_download = version or self.latest_version
|
||||||
self.repository.logger.info("Starting update, %s", version)
|
if to_download == self.installed_version:
|
||||||
if self.repository.display_version_or_commit == "version":
|
raise HomeAssistantError(f"Version {self.installed_version} of {
|
||||||
self._update_in_progress(progress=10)
|
self.repository.data.full_name} is already downloaded")
|
||||||
if not version:
|
|
||||||
await self.repository.update_repository(force=True)
|
|
||||||
else:
|
|
||||||
self.repository.ref = version
|
|
||||||
self.repository.data.selected_tag = version
|
|
||||||
self.repository.force_branch = version is not None
|
|
||||||
self._update_in_progress(progress=20)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await self.repository.async_install(version=version)
|
await self.repository.async_download_repository(ref=version or self.latest_version)
|
||||||
except HacsException as exception:
|
except HacsException as exception:
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(exception) from exception
|
||||||
f"Downloading {self.repository.data.full_name} with version {version or self.repository.data.last_version or self.repository.data.last_commit} failed with ({exception})"
|
|
||||||
) from exception
|
|
||||||
finally:
|
|
||||||
self.repository.data.selected_tag = None
|
|
||||||
self.repository.force_branch = False
|
|
||||||
self._update_in_progress(progress=False)
|
|
||||||
|
|
||||||
async def async_release_notes(self) -> str | None:
|
async def async_release_notes(self) -> str | None:
|
||||||
"""Return the release notes."""
|
"""Return the release notes."""
|
||||||
if self.repository.pending_restart or not self.repository.can_download:
|
if self.repository.pending_restart:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if self.latest_version not in self.repository.data.published_tags:
|
if self.latest_version not in self.repository.data.published_tags:
|
||||||
@@ -148,9 +106,18 @@ class HacsRepositoryUpdateEntity(HacsRepositoryEntity, UpdateEntity):
|
|||||||
self.repository.data.last_version = next(iter(self.repository.data.published_tags))
|
self.repository.data.last_version = next(iter(self.repository.data.published_tags))
|
||||||
|
|
||||||
release_notes = ""
|
release_notes = ""
|
||||||
if len(self.repository.releases.objects) > 0:
|
# Compile release notes from installed version up to the latest
|
||||||
release = self.repository.releases.objects[0]
|
if self.installed_version in self.repository.data.published_tags:
|
||||||
release_notes += release.body
|
for release in self.repository.releases.objects:
|
||||||
|
if release.tag_name == self.installed_version:
|
||||||
|
break
|
||||||
|
release_notes += f"# {release.tag_name}"
|
||||||
|
if release.tag_name != release.name:
|
||||||
|
release_notes += f" - {release.name}"
|
||||||
|
release_notes += f"\n\n{release.body}"
|
||||||
|
release_notes += "\n\n---\n\n"
|
||||||
|
elif any(self.repository.releases.objects):
|
||||||
|
release_notes += self.repository.releases.objects[0].body
|
||||||
|
|
||||||
if self.repository.pending_update:
|
if self.repository.pending_update:
|
||||||
if self.repository.data.category == HacsCategory.INTEGRATION:
|
if self.repository.data.category == HacsCategory.INTEGRATION:
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Backup."""
|
"""Backup."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
@@ -27,7 +28,7 @@ class Backup:
|
|||||||
backup_path: str = DEFAULT_BACKUP_PATH,
|
backup_path: str = DEFAULT_BACKUP_PATH,
|
||||||
repository: HacsRepository | None = None,
|
repository: HacsRepository | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""initialize."""
|
"""Initialize."""
|
||||||
self.hacs = hacs
|
self.hacs = hacs
|
||||||
self.repository = repository
|
self.repository = repository
|
||||||
self.local_path = local_path or repository.content.path.local
|
self.local_path = local_path or repository.content.path.local
|
||||||
@@ -107,33 +108,3 @@ class Backup:
|
|||||||
while os.path.exists(self.backup_path):
|
while os.path.exists(self.backup_path):
|
||||||
sleep(0.1)
|
sleep(0.1)
|
||||||
self.hacs.log.debug("Backup dir %s cleared", self.backup_path)
|
self.hacs.log.debug("Backup dir %s cleared", self.backup_path)
|
||||||
|
|
||||||
|
|
||||||
class BackupNetDaemon(Backup):
|
|
||||||
"""BackupNetDaemon."""
|
|
||||||
|
|
||||||
def create(self) -> None:
|
|
||||||
"""Create a backup in /tmp"""
|
|
||||||
if not self._init_backup_dir():
|
|
||||||
return
|
|
||||||
|
|
||||||
for filename in os.listdir(self.repository.content.path.local):
|
|
||||||
if not filename.endswith(".yaml"):
|
|
||||||
continue
|
|
||||||
|
|
||||||
source_file_name = f"{self.repository.content.path.local}/{filename}"
|
|
||||||
target_file_name = f"{self.backup_path}/{filename}"
|
|
||||||
shutil.copyfile(source_file_name, target_file_name)
|
|
||||||
|
|
||||||
def restore(self) -> None:
|
|
||||||
"""Create a backup in /tmp"""
|
|
||||||
if not os.path.exists(self.backup_path):
|
|
||||||
return
|
|
||||||
|
|
||||||
for filename in os.listdir(self.backup_path):
|
|
||||||
if not filename.endswith(".yaml"):
|
|
||||||
continue
|
|
||||||
|
|
||||||
source_file_name = f"{self.backup_path}/{filename}"
|
|
||||||
target_file_name = f"{self.repository.content.path.local}/{filename}"
|
|
||||||
shutil.copyfile(source_file_name, target_file_name)
|
|
||||||
|
|||||||
@@ -1,74 +1,9 @@
|
|||||||
"""HACS Configuration Schemas."""
|
"""HACS Configuration Schemas."""
|
||||||
# pylint: disable=dangerous-default-value
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from ..const import LOCALE
|
|
||||||
|
|
||||||
# Configuration:
|
# Configuration:
|
||||||
TOKEN = "token"
|
|
||||||
SIDEPANEL_TITLE = "sidepanel_title"
|
SIDEPANEL_TITLE = "sidepanel_title"
|
||||||
SIDEPANEL_ICON = "sidepanel_icon"
|
SIDEPANEL_ICON = "sidepanel_icon"
|
||||||
FRONTEND_REPO = "frontend_repo"
|
|
||||||
FRONTEND_REPO_URL = "frontend_repo_url"
|
|
||||||
APPDAEMON = "appdaemon"
|
APPDAEMON = "appdaemon"
|
||||||
NETDAEMON = "netdaemon"
|
|
||||||
|
|
||||||
# Options:
|
# Options:
|
||||||
COUNTRY = "country"
|
COUNTRY = "country"
|
||||||
DEBUG = "debug"
|
|
||||||
RELEASE_LIMIT = "release_limit"
|
|
||||||
EXPERIMENTAL = "experimental"
|
|
||||||
|
|
||||||
# Config group
|
|
||||||
PATH_OR_URL = "frontend_repo_path_or_url"
|
|
||||||
|
|
||||||
|
|
||||||
def hacs_base_config_schema(config: dict = {}) -> dict:
|
|
||||||
"""Return a shcema configuration dict for HACS."""
|
|
||||||
if not config:
|
|
||||||
config = {
|
|
||||||
TOKEN: "xxxxxxxxxxxxxxxxxxxxxxxxxxx",
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
vol.Required(TOKEN, default=config.get(TOKEN)): str,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def hacs_config_option_schema(options: dict = {}) -> dict:
|
|
||||||
"""Return a shcema for HACS configuration options."""
|
|
||||||
if not options:
|
|
||||||
options = {
|
|
||||||
APPDAEMON: False,
|
|
||||||
COUNTRY: "ALL",
|
|
||||||
DEBUG: False,
|
|
||||||
EXPERIMENTAL: False,
|
|
||||||
NETDAEMON: False,
|
|
||||||
RELEASE_LIMIT: 5,
|
|
||||||
SIDEPANEL_ICON: "hacs:hacs",
|
|
||||||
SIDEPANEL_TITLE: "HACS",
|
|
||||||
FRONTEND_REPO: "",
|
|
||||||
FRONTEND_REPO_URL: "",
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
vol.Optional(SIDEPANEL_TITLE, default=options.get(SIDEPANEL_TITLE)): str,
|
|
||||||
vol.Optional(SIDEPANEL_ICON, default=options.get(SIDEPANEL_ICON)): str,
|
|
||||||
vol.Optional(RELEASE_LIMIT, default=options.get(RELEASE_LIMIT)): int,
|
|
||||||
vol.Optional(COUNTRY, default=options.get(COUNTRY)): vol.In(LOCALE),
|
|
||||||
vol.Optional(APPDAEMON, default=options.get(APPDAEMON)): bool,
|
|
||||||
vol.Optional(NETDAEMON, default=options.get(NETDAEMON)): bool,
|
|
||||||
vol.Optional(DEBUG, default=options.get(DEBUG)): bool,
|
|
||||||
vol.Optional(EXPERIMENTAL, default=options.get(EXPERIMENTAL)): bool,
|
|
||||||
vol.Exclusive(FRONTEND_REPO, PATH_OR_URL): str,
|
|
||||||
vol.Exclusive(FRONTEND_REPO_URL, PATH_OR_URL): str,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def hacs_config_combined() -> dict:
|
|
||||||
"""Combine the configuration options."""
|
|
||||||
base = hacs_base_config_schema()
|
|
||||||
options = hacs_config_option_schema()
|
|
||||||
|
|
||||||
for option in options:
|
|
||||||
base[option] = options[option]
|
|
||||||
|
|
||||||
return base
|
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
"""Data handler for HACS."""
|
"""Data handler for HACS."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from datetime import datetime
|
from datetime import UTC, datetime
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from homeassistant.core import callback
|
from homeassistant.core import callback
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.util import json as json_util
|
|
||||||
|
|
||||||
from ..base import HacsBase
|
from ..base import HacsBase
|
||||||
from ..const import HACS_REPOSITORY_ID
|
from ..const import HACS_REPOSITORY_ID
|
||||||
@@ -47,6 +47,7 @@ EXPORTED_DOWNLOADED_REPOSITORY_DATA = EXPORTED_REPOSITORY_DATA + (
|
|||||||
("last_version", None),
|
("last_version", None),
|
||||||
("manifest_name", None),
|
("manifest_name", None),
|
||||||
("open_issues", 0),
|
("open_issues", 0),
|
||||||
|
("prerelease", None),
|
||||||
("published_tags", []),
|
("published_tags", []),
|
||||||
("releases", False),
|
("releases", False),
|
||||||
("selected_tag", None),
|
("selected_tag", None),
|
||||||
@@ -84,8 +85,7 @@ class HacsData:
|
|||||||
"ignored_repositories": self.hacs.common.ignored_repositories,
|
"ignored_repositories": self.hacs.common.ignored_repositories,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
if self.hacs.configuration.experimental:
|
await self._async_store_experimental_content_and_repos()
|
||||||
await self._async_store_experimental_content_and_repos()
|
|
||||||
await self._async_store_content_and_repos()
|
await self._async_store_content_and_repos()
|
||||||
|
|
||||||
async def _async_store_content_and_repos(self, _=None): # bb: ignore
|
async def _async_store_content_and_repos(self, _=None): # bb: ignore
|
||||||
@@ -100,7 +100,7 @@ class HacsData:
|
|||||||
for event in (HacsDispatchEvent.REPOSITORY, HacsDispatchEvent.CONFIG):
|
for event in (HacsDispatchEvent.REPOSITORY, HacsDispatchEvent.CONFIG):
|
||||||
self.hacs.async_dispatch(event, {})
|
self.hacs.async_dispatch(event, {})
|
||||||
|
|
||||||
async def _async_store_experimental_content_and_repos(self, _=None): # bb: ignore
|
async def _async_store_experimental_content_and_repos(self, _=None):
|
||||||
"""Store the main repos file and each repo that is out of date."""
|
"""Store the main repos file and each repo that is out of date."""
|
||||||
# Repositories
|
# Repositories
|
||||||
self.content = {}
|
self.content = {}
|
||||||
@@ -165,29 +165,16 @@ class HacsData:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = (
|
repositories = await async_load_from_store(self.hacs.hass, "repositories")
|
||||||
await async_load_from_store(
|
if not repositories and (data := await async_load_from_store(self.hacs.hass, "data")):
|
||||||
self.hacs.hass,
|
|
||||||
"data" if self.hacs.configuration.experimental else "repositories",
|
|
||||||
)
|
|
||||||
or {}
|
|
||||||
)
|
|
||||||
if data and self.hacs.configuration.experimental:
|
|
||||||
for category, entries in data.get("repositories", {}).items():
|
for category, entries in data.get("repositories", {}).items():
|
||||||
for repository in entries:
|
for repository in entries:
|
||||||
repositories[repository["id"]] = {"category": category, **repository}
|
repositories[repository["id"]] = {"category": category, **repository}
|
||||||
else:
|
|
||||||
repositories = (
|
|
||||||
data or await async_load_from_store(self.hacs.hass, "repositories") or {}
|
|
||||||
)
|
|
||||||
except HomeAssistantError as exception:
|
except HomeAssistantError as exception:
|
||||||
self.hacs.log.error(
|
self.hacs.log.error(
|
||||||
"Could not read %s, restore the file from a backup - %s",
|
"Could not read %s, restore the file from a backup - %s",
|
||||||
self.hacs.hass.config.path(
|
self.hacs.hass.config.path(".storage/hacs.data"),
|
||||||
".storage/hacs.data"
|
|
||||||
if self.hacs.configuration.experimental
|
|
||||||
else ".storage/hacs.repositories"
|
|
||||||
),
|
|
||||||
exception,
|
exception,
|
||||||
)
|
)
|
||||||
self.hacs.disable_hacs(HacsDisabledReason.RESTORE)
|
self.hacs.disable_hacs(HacsDisabledReason.RESTORE)
|
||||||
@@ -196,13 +183,7 @@ class HacsData:
|
|||||||
if not hacs and not repositories:
|
if not hacs and not repositories:
|
||||||
# Assume new install
|
# Assume new install
|
||||||
self.hacs.status.new = True
|
self.hacs.status.new = True
|
||||||
if self.hacs.configuration.experimental:
|
return True
|
||||||
return True
|
|
||||||
self.logger.info("<HacsData restore> Loading base repository information")
|
|
||||||
repositories = await self.hacs.hass.async_add_executor_job(
|
|
||||||
json_util.load_json,
|
|
||||||
f"{self.hacs.core.config_path}/custom_components/hacs/utils/default.repositories",
|
|
||||||
)
|
|
||||||
|
|
||||||
self.logger.info("<HacsData restore> Restore started")
|
self.logger.info("<HacsData restore> Restore started")
|
||||||
|
|
||||||
@@ -242,7 +223,8 @@ class HacsData:
|
|||||||
|
|
||||||
self.logger.info("<HacsData restore> Restore done")
|
self.logger.info("<HacsData restore> Restore done")
|
||||||
except (
|
except (
|
||||||
BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
# lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||||
|
BaseException
|
||||||
) as exception:
|
) as exception:
|
||||||
self.logger.critical(
|
self.logger.critical(
|
||||||
"<HacsData restore> [%s] Restore Failed!", exception, exc_info=exception
|
"<HacsData restore> [%s] Restore Failed!", exception, exc_info=exception
|
||||||
@@ -250,22 +232,28 @@ class HacsData:
|
|||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def register_unknown_repositories(self, repositories, category: str | None = None):
|
async def register_unknown_repositories(
|
||||||
|
self, repositories: dict[str, dict[str, Any]], category: str | None = None
|
||||||
|
):
|
||||||
"""Registry any unknown repositories."""
|
"""Registry any unknown repositories."""
|
||||||
register_tasks = [
|
for repo_idx, (entry, repo_data) in enumerate(repositories.items()):
|
||||||
self.hacs.async_register_repository(
|
# async_register_repository is awaited in a loop
|
||||||
|
# since its unlikely to ever suspend at startup
|
||||||
|
if (
|
||||||
|
entry == "0"
|
||||||
|
or repo_data.get("category", category) is None
|
||||||
|
or self.hacs.repositories.is_registered(repository_id=entry)
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
await self.hacs.async_register_repository(
|
||||||
repository_full_name=repo_data["full_name"],
|
repository_full_name=repo_data["full_name"],
|
||||||
category=repo_data.get("category", category),
|
category=repo_data.get("category", category),
|
||||||
check=False,
|
check=False,
|
||||||
repository_id=entry,
|
repository_id=entry,
|
||||||
)
|
)
|
||||||
for entry, repo_data in repositories.items()
|
if repo_idx % 100 == 0:
|
||||||
if entry != "0"
|
# yield to avoid blocking the event loop
|
||||||
and not self.hacs.repositories.is_registered(repository_id=entry)
|
await asyncio.sleep(0)
|
||||||
and repo_data.get("category", category) is not None
|
|
||||||
]
|
|
||||||
if register_tasks:
|
|
||||||
await asyncio.gather(*register_tasks)
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_restore_repository(self, entry: str, repository_data: dict[str, Any]):
|
def async_restore_repository(self, entry: str, repository_data: dict[str, Any]):
|
||||||
@@ -302,18 +290,22 @@ class HacsData:
|
|||||||
repository.data.selected_tag = repository_data.get("selected_tag")
|
repository.data.selected_tag = repository_data.get("selected_tag")
|
||||||
repository.data.show_beta = repository_data.get("show_beta", False)
|
repository.data.show_beta = repository_data.get("show_beta", False)
|
||||||
repository.data.last_version = repository_data.get("last_version")
|
repository.data.last_version = repository_data.get("last_version")
|
||||||
|
repository.data.prerelease = repository_data.get("prerelease")
|
||||||
repository.data.last_commit = repository_data.get("last_commit")
|
repository.data.last_commit = repository_data.get("last_commit")
|
||||||
repository.data.installed_version = repository_data.get("version_installed")
|
repository.data.installed_version = repository_data.get("version_installed")
|
||||||
repository.data.installed_commit = repository_data.get("installed_commit")
|
repository.data.installed_commit = repository_data.get("installed_commit")
|
||||||
repository.data.manifest_name = repository_data.get("manifest_name")
|
repository.data.manifest_name = repository_data.get("manifest_name")
|
||||||
|
|
||||||
if last_fetched := repository_data.get("last_fetched"):
|
if last_fetched := repository_data.get("last_fetched"):
|
||||||
repository.data.last_fetched = datetime.fromtimestamp(last_fetched)
|
repository.data.last_fetched = datetime.fromtimestamp(last_fetched, UTC)
|
||||||
|
|
||||||
repository.repository_manifest = HacsManifest.from_dict(
|
repository.repository_manifest = HacsManifest.from_dict(
|
||||||
repository_data.get("manifest") or repository_data.get("repository_manifest") or {}
|
repository_data.get("manifest") or repository_data.get("repository_manifest") or {}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if repository.data.prerelease == repository.data.last_version:
|
||||||
|
repository.data.prerelease = None
|
||||||
|
|
||||||
if repository.localpath is not None and is_safe(self.hacs, repository.localpath):
|
if repository.localpath is not None and is_safe(self.hacs, repository.localpath):
|
||||||
# Set local path
|
# Set local path
|
||||||
repository.content.path.local = repository.localpath
|
repository.content.path.local = repository.localpath
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Util to decode content from the github API."""
|
"""Util to decode content from the github API."""
|
||||||
|
|
||||||
from base64 import b64decode
|
from base64 import b64decode
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +1,11 @@
|
|||||||
"""HACS Decorators."""
|
"""HACS Decorators."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from collections.abc import Coroutine
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
from typing import TYPE_CHECKING, Any, Coroutine
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
from ..const import DEFAULT_CONCURRENT_BACKOFF_TIME, DEFAULT_CONCURRENT_TASKS
|
from ..const import DEFAULT_CONCURRENT_BACKOFF_TIME, DEFAULT_CONCURRENT_TASKS
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
@@ -1,4 +1,5 @@
|
|||||||
"""Filter functions."""
|
"""Filter functions."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Custom logger for HACS."""
|
"""Custom logger for HACS."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from ..const import PACKAGE_NAME
|
from ..const import PACKAGE_NAME
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
"""Path utils"""
|
"""Path utils"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from functools import lru_cache
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
@@ -8,14 +10,32 @@ if TYPE_CHECKING:
|
|||||||
from ..base import HacsBase
|
from ..base import HacsBase
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(maxsize=1)
|
||||||
|
def _get_safe_paths(
|
||||||
|
config_path: str,
|
||||||
|
appdaemon_path: str,
|
||||||
|
plugin_path: str,
|
||||||
|
python_script_path: str,
|
||||||
|
theme_path: str,
|
||||||
|
) -> set[str]:
|
||||||
|
"""Get safe paths."""
|
||||||
|
return {
|
||||||
|
Path(f"{config_path}/{appdaemon_path}").as_posix(),
|
||||||
|
Path(f"{config_path}/{plugin_path}").as_posix(),
|
||||||
|
Path(f"{config_path}/{python_script_path}").as_posix(),
|
||||||
|
Path(f"{config_path}/{theme_path}").as_posix(),
|
||||||
|
Path(f"{config_path}/custom_components/").as_posix(),
|
||||||
|
Path(f"{config_path}/custom_templates/").as_posix(),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def is_safe(hacs: HacsBase, path: str | Path) -> bool:
|
def is_safe(hacs: HacsBase, path: str | Path) -> bool:
|
||||||
"""Helper to check if path is safe to remove."""
|
"""Helper to check if path is safe to remove."""
|
||||||
return Path(path).as_posix() not in (
|
configuration = hacs.configuration
|
||||||
Path(f"{hacs.core.config_path}/{hacs.configuration.appdaemon_path}").as_posix(),
|
return Path(path).as_posix() not in _get_safe_paths(
|
||||||
Path(f"{hacs.core.config_path}/{hacs.configuration.netdaemon_path}").as_posix(),
|
hacs.core.config_path,
|
||||||
Path(f"{hacs.core.config_path}/{hacs.configuration.plugin_path}").as_posix(),
|
configuration.appdaemon_path,
|
||||||
Path(f"{hacs.core.config_path}/{hacs.configuration.python_script_path}").as_posix(),
|
configuration.plugin_path,
|
||||||
Path(f"{hacs.core.config_path}/{hacs.configuration.theme_path}").as_posix(),
|
configuration.python_script_path,
|
||||||
Path(f"{hacs.core.config_path}/custom_components/").as_posix(),
|
configuration.theme_path,
|
||||||
Path(f"{hacs.core.config_path}/custom_templates/").as_posix(),
|
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
"""The QueueManager class."""
|
"""The QueueManager class."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from collections.abc import Coroutine
|
||||||
import time
|
import time
|
||||||
from typing import Coroutine
|
|
||||||
|
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
@@ -60,9 +61,6 @@ class QueueManager:
|
|||||||
for task in self.queue:
|
for task in self.queue:
|
||||||
local_queue.append(task)
|
local_queue.append(task)
|
||||||
|
|
||||||
for task in local_queue:
|
|
||||||
self.queue.remove(task)
|
|
||||||
|
|
||||||
_LOGGER.debug("<QueueManager> Starting queue execution for %s tasks", len(local_queue))
|
_LOGGER.debug("<QueueManager> Starting queue execution for %s tasks", len(local_queue))
|
||||||
start = time.time()
|
start = time.time()
|
||||||
result = await asyncio.gather(*local_queue, return_exceptions=True)
|
result = await asyncio.gather(*local_queue, return_exceptions=True)
|
||||||
@@ -71,6 +69,9 @@ class QueueManager:
|
|||||||
_LOGGER.error("<QueueManager> %s", entry)
|
_LOGGER.error("<QueueManager> %s", entry)
|
||||||
end = time.time() - start
|
end = time.time() - start
|
||||||
|
|
||||||
|
for task in local_queue:
|
||||||
|
self.queue.remove(task)
|
||||||
|
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"<QueueManager> Queue execution finished for %s tasks finished in %.2f seconds",
|
"<QueueManager> Queue execution finished for %s tasks finished in %.2f seconds",
|
||||||
len(local_queue),
|
len(local_queue),
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Regex utils"""
|
"""Regex utils"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Storage handers."""
|
"""Storage handers."""
|
||||||
|
|
||||||
from homeassistant.helpers.json import JSONEncoder
|
from homeassistant.helpers.json import JSONEncoder
|
||||||
from homeassistant.helpers.storage import Store
|
from homeassistant.helpers.storage import Store
|
||||||
from homeassistant.util import json as json_util
|
from homeassistant.util import json as json_util
|
||||||
|
|||||||
@@ -1,38 +0,0 @@
|
|||||||
"""Custom template support."""
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
from jinja2 import Template
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from ..base import HacsBase
|
|
||||||
from ..repositories.base import HacsRepository
|
|
||||||
|
|
||||||
|
|
||||||
def render_template(hacs: HacsBase, content: str, context: HacsRepository) -> str:
|
|
||||||
"""Render templates in content."""
|
|
||||||
if hacs.configuration.experimental:
|
|
||||||
# Do not render for experimental
|
|
||||||
return content
|
|
||||||
# Fix None issues
|
|
||||||
if context.releases.last_release_object is not None:
|
|
||||||
prerelease = context.releases.last_release_object.prerelease
|
|
||||||
else:
|
|
||||||
prerelease = False
|
|
||||||
|
|
||||||
# Render the template
|
|
||||||
try:
|
|
||||||
return Template(content).render(
|
|
||||||
installed=context.data.installed,
|
|
||||||
pending_update=context.pending_update,
|
|
||||||
prerelease=prerelease,
|
|
||||||
selected_tag=context.data.selected_tag,
|
|
||||||
version_available=context.releases.last_release,
|
|
||||||
version_installed=context.display_installed_version,
|
|
||||||
)
|
|
||||||
except (
|
|
||||||
BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
|
||||||
) as exception:
|
|
||||||
context.logger.debug(exception)
|
|
||||||
return content
|
|
||||||
@@ -1,7 +1,10 @@
|
|||||||
"""Validation utilities."""
|
"""Validation utilities."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Callable
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from awesomeversion import AwesomeVersion
|
from awesomeversion import AwesomeVersion
|
||||||
from homeassistant.helpers.config_validation import url as url_validator
|
from homeassistant.helpers.config_validation import url as url_validator
|
||||||
@@ -67,3 +70,146 @@ INTEGRATION_MANIFEST_JSON_SCHEMA = vol.Schema(
|
|||||||
},
|
},
|
||||||
extra=vol.ALLOW_EXTRA,
|
extra=vol.ALLOW_EXTRA,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_repo_data(schema: dict[str, Any], extra: int) -> Callable[[Any], Any]:
|
||||||
|
"""Return a validator for repo data.
|
||||||
|
|
||||||
|
This is used instead of vol.All to always try both the repo schema and
|
||||||
|
and the validate_version validator.
|
||||||
|
"""
|
||||||
|
_schema = vol.Schema(schema, extra=extra)
|
||||||
|
|
||||||
|
def validate_repo_data(data: Any) -> Any:
|
||||||
|
"""Validate integration repo data."""
|
||||||
|
schema_errors: vol.MultipleInvalid | None = None
|
||||||
|
try:
|
||||||
|
_schema(data)
|
||||||
|
except vol.MultipleInvalid as err:
|
||||||
|
schema_errors = err
|
||||||
|
try:
|
||||||
|
validate_version(data)
|
||||||
|
except vol.Invalid as err:
|
||||||
|
if schema_errors:
|
||||||
|
schema_errors.add(err)
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
if schema_errors:
|
||||||
|
raise schema_errors
|
||||||
|
return data
|
||||||
|
|
||||||
|
return validate_repo_data
|
||||||
|
|
||||||
|
|
||||||
|
def validate_version(data: Any) -> Any:
|
||||||
|
"""Ensure at least one of last_commit or last_version is present."""
|
||||||
|
if "last_commit" not in data and "last_version" not in data:
|
||||||
|
raise vol.Invalid("Expected at least one of [`last_commit`, `last_version`], got none")
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
V2_COMMON_DATA_JSON_SCHEMA = {
|
||||||
|
vol.Required("description"): vol.Any(str, None),
|
||||||
|
vol.Optional("downloads"): int,
|
||||||
|
vol.Optional("etag_releases"): str,
|
||||||
|
vol.Required("etag_repository"): str,
|
||||||
|
vol.Required("full_name"): str,
|
||||||
|
vol.Optional("last_commit"): str,
|
||||||
|
vol.Required("last_fetched"): vol.Any(int, float),
|
||||||
|
vol.Required("last_updated"): str,
|
||||||
|
vol.Optional("last_version"): str,
|
||||||
|
vol.Optional("prerelease"): str,
|
||||||
|
vol.Required("manifest"): {
|
||||||
|
vol.Optional("country"): vol.Any([str], False),
|
||||||
|
vol.Optional("name"): str,
|
||||||
|
},
|
||||||
|
vol.Optional("open_issues"): int,
|
||||||
|
vol.Optional("stargazers_count"): int,
|
||||||
|
vol.Optional("topics"): [str],
|
||||||
|
}
|
||||||
|
|
||||||
|
V2_INTEGRATION_DATA_JSON_SCHEMA = {
|
||||||
|
**V2_COMMON_DATA_JSON_SCHEMA,
|
||||||
|
vol.Required("domain"): str,
|
||||||
|
vol.Required("manifest_name"): str,
|
||||||
|
}
|
||||||
|
|
||||||
|
_V2_REPO_SCHEMAS = {
|
||||||
|
"appdaemon": V2_COMMON_DATA_JSON_SCHEMA,
|
||||||
|
"integration": V2_INTEGRATION_DATA_JSON_SCHEMA,
|
||||||
|
"plugin": V2_COMMON_DATA_JSON_SCHEMA,
|
||||||
|
"python_script": V2_COMMON_DATA_JSON_SCHEMA,
|
||||||
|
"template": V2_COMMON_DATA_JSON_SCHEMA,
|
||||||
|
"theme": V2_COMMON_DATA_JSON_SCHEMA,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Used when validating repos in the hacs integration, discards extra keys
|
||||||
|
VALIDATE_FETCHED_V2_REPO_DATA = {
|
||||||
|
category: validate_repo_data(schema, vol.REMOVE_EXTRA)
|
||||||
|
for category, schema in _V2_REPO_SCHEMAS.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
# Used when validating repos when generating data, fails on extra keys
|
||||||
|
VALIDATE_GENERATED_V2_REPO_DATA = {
|
||||||
|
category: vol.Schema({str: validate_repo_data(schema, vol.PREVENT_EXTRA)})
|
||||||
|
for category, schema in _V2_REPO_SCHEMAS.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
V2_CRITICAL_REPO_DATA_SCHEMA = {
|
||||||
|
vol.Required("link"): str,
|
||||||
|
vol.Required("reason"): str,
|
||||||
|
vol.Required("repository"): str,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Used when validating critical repos in the hacs integration, discards extra keys
|
||||||
|
VALIDATE_FETCHED_V2_CRITICAL_REPO_SCHEMA = vol.Schema(
|
||||||
|
V2_CRITICAL_REPO_DATA_SCHEMA,
|
||||||
|
extra=vol.REMOVE_EXTRA,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Used when validating critical repos when generating data, fails on extra keys
|
||||||
|
VALIDATE_GENERATED_V2_CRITICAL_REPO_SCHEMA = vol.Schema(
|
||||||
|
[
|
||||||
|
vol.Schema(
|
||||||
|
V2_CRITICAL_REPO_DATA_SCHEMA,
|
||||||
|
extra=vol.PREVENT_EXTRA,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
V2_REMOVED_REPO_DATA_SCHEMA = {
|
||||||
|
vol.Optional("link"): str,
|
||||||
|
vol.Optional("reason"): str,
|
||||||
|
vol.Required("removal_type"): vol.In(
|
||||||
|
[
|
||||||
|
"Integration is missing a version, and is abandoned.",
|
||||||
|
"Remove",
|
||||||
|
"archived",
|
||||||
|
"blacklist",
|
||||||
|
"critical",
|
||||||
|
"deprecated",
|
||||||
|
"removal",
|
||||||
|
"remove",
|
||||||
|
"removed",
|
||||||
|
"replaced",
|
||||||
|
"repository",
|
||||||
|
]
|
||||||
|
),
|
||||||
|
vol.Required("repository"): str,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Used when validating removed repos in the hacs integration, discards extra keys
|
||||||
|
VALIDATE_FETCHED_V2_REMOVED_REPO_SCHEMA = vol.Schema(
|
||||||
|
V2_REMOVED_REPO_DATA_SCHEMA,
|
||||||
|
extra=vol.REMOVE_EXTRA,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Used when validating removed repos when generating data, fails on extra keys
|
||||||
|
VALIDATE_GENERATED_V2_REMOVED_REPO_SCHEMA = vol.Schema(
|
||||||
|
[
|
||||||
|
vol.Schema(
|
||||||
|
V2_REMOVED_REPO_DATA_SCHEMA,
|
||||||
|
extra=vol.PREVENT_EXTRA,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Version utils."""
|
"""Version utils."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
|
|||||||
@@ -1,7 +1,37 @@
|
|||||||
"""Workarounds for issues that should not be fixed."""
|
"""Workarounds."""
|
||||||
|
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
DOMAIN_OVERRIDES = {
|
DOMAIN_OVERRIDES = {
|
||||||
# https://github.com/hacs/integration/issues/2465
|
# https://github.com/hacs/integration/issues/2465
|
||||||
"custom-components/sensor.custom_aftership": "custom_aftership"
|
"custom-components/sensor.custom_aftership": "custom_aftership"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from homeassistant.components.http import StaticPathConfig
|
||||||
|
|
||||||
|
async def async_register_static_path(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
url_path: str,
|
||||||
|
path: str,
|
||||||
|
cache_headers: bool = True,
|
||||||
|
) -> None:
|
||||||
|
"""Register a static path with the HTTP component."""
|
||||||
|
await hass.http.async_register_static_paths(
|
||||||
|
[StaticPathConfig(url_path, path, cache_headers)]
|
||||||
|
)
|
||||||
|
except ImportError:
|
||||||
|
|
||||||
|
async def async_register_static_path(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
url_path: str,
|
||||||
|
path: str,
|
||||||
|
cache_headers: bool = True,
|
||||||
|
) -> None:
|
||||||
|
"""Register a static path with the HTTP component.
|
||||||
|
|
||||||
|
Legacy: Can be removed when min version is 2024.7
|
||||||
|
https://developers.home-assistant.io/blog/2024/06/18/async_register_static_paths/
|
||||||
|
"""
|
||||||
|
hass.http.register_static_path(url_path, path, cache_headers)
|
||||||
|
|||||||
@@ -1,8 +1,12 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from ..repositories.base import HacsRepository
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from .base import ActionValidationBase, ValidationException
|
from .base import ActionValidationBase, ValidationException
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ..repositories.base import HacsRepository
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_validator(repository: HacsRepository) -> Validator:
|
async def async_setup_validator(repository: HacsRepository) -> Validator:
|
||||||
"""Set up this validator."""
|
"""Set up this validator."""
|
||||||
@@ -15,7 +19,7 @@ class Validator(ActionValidationBase):
|
|||||||
more_info = "https://hacs.xyz/docs/publish/include#check-archived"
|
more_info = "https://hacs.xyz/docs/publish/include#check-archived"
|
||||||
allow_fork = False
|
allow_fork = False
|
||||||
|
|
||||||
async def async_validate(self):
|
async def async_validate(self) -> None:
|
||||||
"""Validate the repository."""
|
"""Validate the repository."""
|
||||||
if self.repository.data.archived:
|
if self.repository.data.archived:
|
||||||
raise ValidationException("The repository is archived")
|
raise ValidationException("The repository is archived")
|
||||||
|
|||||||
@@ -1,12 +1,13 @@
|
|||||||
"""Base class for validation."""
|
"""Base class for validation."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
from ..enums import HacsCategory
|
|
||||||
from ..exceptions import HacsException
|
from ..exceptions import HacsException
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
from ..enums import HacsCategory
|
||||||
from ..repositories.base import HacsRepository
|
from ..repositories.base import HacsRepository
|
||||||
|
|
||||||
|
|
||||||
@@ -17,7 +18,7 @@ class ValidationException(HacsException):
|
|||||||
class ActionValidationBase:
|
class ActionValidationBase:
|
||||||
"""Base class for action validation."""
|
"""Base class for action validation."""
|
||||||
|
|
||||||
categories: list[HacsCategory] = []
|
categories: tuple[HacsCategory, ...] = ()
|
||||||
allow_fork: bool = True
|
allow_fork: bool = True
|
||||||
more_info: str = "https://hacs.xyz/docs/publish/action"
|
more_info: str = "https://hacs.xyz/docs/publish/action"
|
||||||
|
|
||||||
@@ -34,7 +35,7 @@ class ActionValidationBase:
|
|||||||
async def async_validate(self) -> None:
|
async def async_validate(self) -> None:
|
||||||
"""Validate the repository."""
|
"""Validate the repository."""
|
||||||
|
|
||||||
async def execute_validation(self, *_, **__) -> None:
|
async def execute_validation(self, *_: Any, **__: Any) -> None:
|
||||||
"""Execute the task defined in subclass."""
|
"""Execute the task defined in subclass."""
|
||||||
self.failed = False
|
self.failed = False
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,14 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from custom_components.hacs.enums import HacsCategory
|
from custom_components.hacs.enums import HacsCategory
|
||||||
|
|
||||||
from ..repositories.base import HacsRepository
|
|
||||||
from .base import ActionValidationBase, ValidationException
|
from .base import ActionValidationBase, ValidationException
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ..repositories.base import HacsRepository
|
||||||
|
|
||||||
URL = "https://brands.home-assistant.io/domains.json"
|
URL = "https://brands.home-assistant.io/domains.json"
|
||||||
|
|
||||||
|
|
||||||
@@ -17,9 +21,9 @@ class Validator(ActionValidationBase):
|
|||||||
"""Validate the repository."""
|
"""Validate the repository."""
|
||||||
|
|
||||||
more_info = "https://hacs.xyz/docs/publish/include#check-brands"
|
more_info = "https://hacs.xyz/docs/publish/include#check-brands"
|
||||||
categories = [HacsCategory.INTEGRATION]
|
categories = (HacsCategory.INTEGRATION,)
|
||||||
|
|
||||||
async def async_validate(self):
|
async def async_validate(self) -> None:
|
||||||
"""Validate the repository."""
|
"""Validate the repository."""
|
||||||
|
|
||||||
response = await self.hacs.session.get(URL)
|
response = await self.hacs.session.get(URL)
|
||||||
|
|||||||
@@ -1,8 +1,12 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from ..repositories.base import HacsRepository
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from .base import ActionValidationBase, ValidationException
|
from .base import ActionValidationBase, ValidationException
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ..repositories.base import HacsRepository
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_validator(repository: HacsRepository) -> Validator:
|
async def async_setup_validator(repository: HacsRepository) -> Validator:
|
||||||
"""Set up this validator."""
|
"""Set up this validator."""
|
||||||
@@ -15,7 +19,7 @@ class Validator(ActionValidationBase):
|
|||||||
more_info = "https://hacs.xyz/docs/publish/include#check-repository"
|
more_info = "https://hacs.xyz/docs/publish/include#check-repository"
|
||||||
allow_fork = False
|
allow_fork = False
|
||||||
|
|
||||||
async def async_validate(self):
|
async def async_validate(self) -> None:
|
||||||
"""Validate the repository."""
|
"""Validate the repository."""
|
||||||
if not self.repository.data.description:
|
if not self.repository.data.description:
|
||||||
raise ValidationException("The repository has no description")
|
raise ValidationException("The repository has no description")
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ class Validator(ActionValidationBase):
|
|||||||
|
|
||||||
more_info = "https://hacs.xyz/docs/publish/include#check-hacs-manifest"
|
more_info = "https://hacs.xyz/docs/publish/include#check-hacs-manifest"
|
||||||
|
|
||||||
async def async_validate(self):
|
async def async_validate(self) -> None:
|
||||||
"""Validate the repository."""
|
"""Validate the repository."""
|
||||||
if RepositoryFile.HACS_JSON not in [x.filename for x in self.repository.tree]:
|
if RepositoryFile.HACS_JSON not in [x.filename for x in self.repository.tree]:
|
||||||
raise ValidationException(f"The repository has no '{RepositoryFile.HACS_JSON}' file")
|
raise ValidationException(f"The repository has no '{RepositoryFile.HACS_JSON}' file")
|
||||||
|
|||||||
@@ -1,9 +1,13 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from ..enums import HacsCategory
|
from ..enums import HacsCategory
|
||||||
from ..repositories.base import HacsRepository
|
|
||||||
from .base import ActionValidationBase, ValidationException
|
from .base import ActionValidationBase, ValidationException
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ..repositories.base import HacsRepository
|
||||||
|
|
||||||
IGNORED = ["-shield", "img.shields.io", "buymeacoffee.com"]
|
IGNORED = ["-shield", "img.shields.io", "buymeacoffee.com"]
|
||||||
|
|
||||||
|
|
||||||
@@ -15,12 +19,12 @@ async def async_setup_validator(repository: HacsRepository) -> Validator:
|
|||||||
class Validator(ActionValidationBase):
|
class Validator(ActionValidationBase):
|
||||||
"""Validate the repository."""
|
"""Validate the repository."""
|
||||||
|
|
||||||
categories = [HacsCategory.PLUGIN, HacsCategory.THEME]
|
categories = (HacsCategory.PLUGIN, HacsCategory.THEME)
|
||||||
more_info = "https://hacs.xyz/docs/publish/include#check-images"
|
more_info = "https://hacs.xyz/docs/publish/include#check-images"
|
||||||
|
|
||||||
async def async_validate(self):
|
async def async_validate(self) -> None:
|
||||||
"""Validate the repository."""
|
"""Validate the repository."""
|
||||||
info = await self.repository.async_get_info_file_contents()
|
info = await self.repository.async_get_info_file_contents(version=self.repository.ref)
|
||||||
for line in info.split("\n"):
|
for line in info.split("\n"):
|
||||||
if "<img" in line or "![" in line:
|
if "<img" in line or "![" in line:
|
||||||
if [ignore for ignore in IGNORED if ignore in line]:
|
if [ignore for ignore in IGNORED if ignore in line]:
|
||||||
|
|||||||
@@ -1,8 +1,12 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from ..repositories.base import HacsRepository
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from .base import ActionValidationBase, ValidationException
|
from .base import ActionValidationBase, ValidationException
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ..repositories.base import HacsRepository
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_validator(repository: HacsRepository) -> Validator:
|
async def async_setup_validator(repository: HacsRepository) -> Validator:
|
||||||
"""Set up this validator."""
|
"""Set up this validator."""
|
||||||
@@ -14,7 +18,7 @@ class Validator(ActionValidationBase):
|
|||||||
|
|
||||||
more_info = "https://hacs.xyz/docs/publish/include#check-info"
|
more_info = "https://hacs.xyz/docs/publish/include#check-info"
|
||||||
|
|
||||||
async def async_validate(self):
|
async def async_validate(self) -> None:
|
||||||
"""Validate the repository."""
|
"""Validate the repository."""
|
||||||
filenames = [x.filename.lower() for x in self.repository.tree]
|
filenames = [x.filename.lower() for x in self.repository.tree]
|
||||||
if "readme" in filenames:
|
if "readme" in filenames:
|
||||||
|
|||||||
@@ -1,13 +1,17 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from voluptuous.error import Invalid
|
from voluptuous.error import Invalid
|
||||||
|
|
||||||
from ..enums import HacsCategory, RepositoryFile
|
from ..enums import HacsCategory, RepositoryFile
|
||||||
from ..repositories.base import HacsRepository
|
|
||||||
from ..repositories.integration import HacsIntegrationRepository
|
|
||||||
from ..utils.validate import INTEGRATION_MANIFEST_JSON_SCHEMA
|
from ..utils.validate import INTEGRATION_MANIFEST_JSON_SCHEMA
|
||||||
from .base import ActionValidationBase, ValidationException
|
from .base import ActionValidationBase, ValidationException
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ..repositories.base import HacsRepository
|
||||||
|
from ..repositories.integration import HacsIntegrationRepository
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_validator(repository: HacsRepository) -> Validator:
|
async def async_setup_validator(repository: HacsRepository) -> Validator:
|
||||||
"""Set up this validator."""
|
"""Set up this validator."""
|
||||||
@@ -19,16 +23,16 @@ class Validator(ActionValidationBase):
|
|||||||
|
|
||||||
repository: HacsIntegrationRepository
|
repository: HacsIntegrationRepository
|
||||||
more_info = "https://hacs.xyz/docs/publish/include#check-manifest"
|
more_info = "https://hacs.xyz/docs/publish/include#check-manifest"
|
||||||
categories = [HacsCategory.INTEGRATION]
|
categories = (HacsCategory.INTEGRATION,)
|
||||||
|
|
||||||
async def async_validate(self):
|
async def async_validate(self) -> None:
|
||||||
"""Validate the repository."""
|
"""Validate the repository."""
|
||||||
if RepositoryFile.MAINIFEST_JSON not in [x.filename for x in self.repository.tree]:
|
if RepositoryFile.MAINIFEST_JSON not in [x.filename for x in self.repository.tree]:
|
||||||
raise ValidationException(
|
raise ValidationException(
|
||||||
f"The repository has no '{RepositoryFile.MAINIFEST_JSON}' file"
|
f"The repository has no '{RepositoryFile.MAINIFEST_JSON}' file"
|
||||||
)
|
)
|
||||||
|
|
||||||
content = await self.repository.async_get_integration_manifest(self.repository.ref)
|
content = await self.repository.get_integration_manifest(version=self.repository.ref)
|
||||||
try:
|
try:
|
||||||
INTEGRATION_MANIFEST_JSON_SCHEMA(content)
|
INTEGRATION_MANIFEST_JSON_SCHEMA(content)
|
||||||
except Invalid as exception:
|
except Invalid as exception:
|
||||||
|
|||||||
@@ -1,8 +1,12 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from ..repositories.base import HacsRepository
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from .base import ActionValidationBase, ValidationException
|
from .base import ActionValidationBase, ValidationException
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ..repositories.base import HacsRepository
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_validator(repository: HacsRepository) -> Validator:
|
async def async_setup_validator(repository: HacsRepository) -> Validator:
|
||||||
"""Set up this validator."""
|
"""Set up this validator."""
|
||||||
@@ -15,7 +19,7 @@ class Validator(ActionValidationBase):
|
|||||||
more_info = "https://hacs.xyz/docs/publish/include#check-repository"
|
more_info = "https://hacs.xyz/docs/publish/include#check-repository"
|
||||||
allow_fork = False
|
allow_fork = False
|
||||||
|
|
||||||
async def async_validate(self):
|
async def async_validate(self) -> None:
|
||||||
"""Validate the repository."""
|
"""Validate the repository."""
|
||||||
if not self.repository.data.has_issues:
|
if not self.repository.data.has_issues:
|
||||||
raise ValidationException("The repository does not have issues enabled")
|
raise ValidationException("The repository does not have issues enabled")
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Hacs validation manager."""
|
"""Hacs validation manager."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
@@ -7,13 +8,12 @@ import os
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from homeassistant.core import HomeAssistant
|
|
||||||
|
|
||||||
from ..repositories.base import HacsRepository
|
|
||||||
from .base import ActionValidationBase
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
from ..base import HacsBase
|
from ..base import HacsBase
|
||||||
|
from ..repositories.base import HacsRepository
|
||||||
|
from .base import ActionValidationBase
|
||||||
|
|
||||||
|
|
||||||
class ValidationManager:
|
class ValidationManager:
|
||||||
@@ -23,16 +23,16 @@ class ValidationManager:
|
|||||||
"""Initialize the setup manager class."""
|
"""Initialize the setup manager class."""
|
||||||
self.hacs = hacs
|
self.hacs = hacs
|
||||||
self.hass = hass
|
self.hass = hass
|
||||||
self._validatiors: dict[str, ActionValidationBase] = {}
|
self._validators: dict[str, ActionValidationBase] = {}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def validatiors(self) -> list[ActionValidationBase]:
|
def validators(self) -> list[ActionValidationBase]:
|
||||||
"""Return all list of all tasks."""
|
"""Return all list of all tasks."""
|
||||||
return list(self._validatiors.values())
|
return list(self._validators.values())
|
||||||
|
|
||||||
async def async_load(self, repository: HacsRepository) -> None:
|
async def async_load(self, repository: HacsRepository) -> None:
|
||||||
"""Load all tasks."""
|
"""Load all tasks."""
|
||||||
self._validatiors = {}
|
self._validators = {}
|
||||||
validator_files = Path(__file__).parent
|
validator_files = Path(__file__).parent
|
||||||
validator_modules = (
|
validator_modules = (
|
||||||
module.stem
|
module.stem
|
||||||
@@ -40,10 +40,10 @@ class ValidationManager:
|
|||||||
if module.name not in ("base.py", "__init__.py", "manager.py")
|
if module.name not in ("base.py", "__init__.py", "manager.py")
|
||||||
)
|
)
|
||||||
|
|
||||||
async def _load_module(module: str):
|
async def _load_module(module: str) -> None:
|
||||||
task_module = import_module(f"{__package__}.{module}")
|
task_module = import_module(f"{__package__}.{module}")
|
||||||
if task := await task_module.async_setup_validator(repository=repository):
|
if task := await task_module.async_setup_validator(repository=repository):
|
||||||
self._validatiors[task.slug] = task
|
self._validators[task.slug] = task
|
||||||
|
|
||||||
await asyncio.gather(*[_load_module(task) for task in validator_modules])
|
await asyncio.gather(*[_load_module(task) for task in validator_modules])
|
||||||
|
|
||||||
@@ -59,9 +59,9 @@ class ValidationManager:
|
|||||||
and os.getenv("GITHUB_REPOSITORY") != repository.data.full_name
|
and os.getenv("GITHUB_REPOSITORY") != repository.data.full_name
|
||||||
)
|
)
|
||||||
|
|
||||||
validatiors = [
|
validators = [
|
||||||
validator
|
validator
|
||||||
for validator in self.validatiors or []
|
for validator in self.validators or []
|
||||||
if (
|
if (
|
||||||
(not validator.categories or repository.data.category in validator.categories)
|
(not validator.categories or repository.data.category in validator.categories)
|
||||||
and validator.slug not in os.getenv("INPUT_IGNORE", "").split(" ")
|
and validator.slug not in os.getenv("INPUT_IGNORE", "").split(" ")
|
||||||
@@ -69,10 +69,10 @@ class ValidationManager:
|
|||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
await asyncio.gather(*[validator.execute_validation() for validator in validatiors])
|
await asyncio.gather(*[validator.execute_validation() for validator in validators])
|
||||||
|
|
||||||
total = len(validatiors)
|
total = len(validators)
|
||||||
failed = len([x for x in validatiors if x.failed])
|
failed = len([x for x in validators if x.failed])
|
||||||
|
|
||||||
if failed != 0:
|
if failed != 0:
|
||||||
repository.logger.error("%s %s/%s checks failed", repository.string, failed, total)
|
repository.logger.error("%s %s/%s checks failed", repository.string, failed, total)
|
||||||
|
|||||||
@@ -1,8 +1,12 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from ..repositories.base import HacsRepository
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from .base import ActionValidationBase, ValidationException
|
from .base import ActionValidationBase, ValidationException
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ..repositories.base import HacsRepository
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_validator(repository: HacsRepository) -> Validator:
|
async def async_setup_validator(repository: HacsRepository) -> Validator:
|
||||||
"""Set up this validator."""
|
"""Set up this validator."""
|
||||||
@@ -15,7 +19,7 @@ class Validator(ActionValidationBase):
|
|||||||
more_info = "https://hacs.xyz/docs/publish/include#check-repository"
|
more_info = "https://hacs.xyz/docs/publish/include#check-repository"
|
||||||
allow_fork = False
|
allow_fork = False
|
||||||
|
|
||||||
async def async_validate(self):
|
async def async_validate(self) -> None:
|
||||||
"""Validate the repository."""
|
"""Validate the repository."""
|
||||||
if not self.repository.data.topics:
|
if not self.repository.data.topics:
|
||||||
raise ValidationException("The repository has no valid topics")
|
raise ValidationException("The repository has no valid topics")
|
||||||
|
|||||||
Reference in New Issue
Block a user