Add HACS
This commit is contained in:
parent
abe60c128a
commit
80d8215d5c
|
@ -7,6 +7,7 @@ zones.yaml
|
|||
|
||||
.storage/
|
||||
tts/
|
||||
__pycache__/
|
||||
|
||||
*.conf
|
||||
*.db
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
"""
|
||||
HACS gives you a powerful UI to handle downloads of all your custom needs.
|
||||
|
||||
For more details about this integration, please refer to the documentation at
|
||||
https://hacs.xyz/
|
||||
"""
|
||||
import voluptuous as vol
|
||||
|
||||
from .const import DOMAIN
|
||||
from .helpers.functions.configuration_schema import hacs_config_combined
|
||||
from .operational.setup import async_setup as hacs_yaml_setup
|
||||
from .operational.setup import async_setup_entry as hacs_ui_setup
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({DOMAIN: hacs_config_combined()}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
async def async_setup(hass, config):
|
||||
"""Set up this integration using yaml."""
|
||||
|
||||
return await hacs_yaml_setup(hass, config)
|
||||
|
||||
|
||||
async def async_setup_entry(hass, config_entry):
|
||||
"""Set up this integration using UI."""
|
||||
|
||||
return await hacs_ui_setup(hass, config_entry)
|
|
@ -0,0 +1 @@
|
|||
"""Initialize HACS API"""
|
|
@ -0,0 +1,25 @@
|
|||
"""API Handler for acknowledge_critical_repository"""
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
import voluptuous as vol
|
||||
from homeassistant.components import websocket_api
|
||||
|
||||
from custom_components.hacs.helpers.functions.store import (
|
||||
async_load_from_store,
|
||||
async_save_to_store,
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.async_response
|
||||
@websocket_api.websocket_command(
|
||||
{vol.Required("type"): "hacs/critical", vol.Optional("repository"): cv.string}
|
||||
)
|
||||
async def acknowledge_critical_repository(hass, connection, msg):
|
||||
"""Handle get media player cover command."""
|
||||
repository = msg["repository"]
|
||||
|
||||
critical = await async_load_from_store(hass, "critical")
|
||||
for repo in critical:
|
||||
if repository == repo["repository"]:
|
||||
repo["acknowledged"] = True
|
||||
await async_save_to_store(hass, "critical", critical)
|
||||
connection.send_message(websocket_api.result_message(msg["id"], critical))
|
|
@ -0,0 +1,24 @@
|
|||
"""API Handler for check_local_path"""
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
import voluptuous as vol
|
||||
from homeassistant.components import websocket_api
|
||||
|
||||
from custom_components.hacs.helpers.functions.path_exsist import async_path_exsist
|
||||
|
||||
|
||||
@websocket_api.async_response
|
||||
@websocket_api.websocket_command(
|
||||
{vol.Required("type"): "hacs/check_path", vol.Optional("path"): cv.string}
|
||||
)
|
||||
async def check_local_path(_hass, connection, msg):
|
||||
"""Handle get media player cover command."""
|
||||
path = msg.get("path")
|
||||
exist = {"exist": False}
|
||||
|
||||
if path is None:
|
||||
return
|
||||
|
||||
if await async_path_exsist(path):
|
||||
exist["exist"] = True
|
||||
|
||||
connection.send_message(websocket_api.result_message(msg["id"], exist))
|
|
@ -0,0 +1,15 @@
|
|||
"""API Handler for get_critical_repositories"""
|
||||
import voluptuous as vol
|
||||
from homeassistant.components import websocket_api
|
||||
|
||||
from custom_components.hacs.helpers.functions.store import async_load_from_store
|
||||
|
||||
|
||||
@websocket_api.async_response
|
||||
@websocket_api.websocket_command({vol.Required("type"): "hacs/get_critical"})
|
||||
async def get_critical_repositories(hass, connection, msg):
|
||||
"""Handle get media player cover command."""
|
||||
critical = await async_load_from_store(hass, "critical")
|
||||
if not critical:
|
||||
critical = []
|
||||
connection.send_message(websocket_api.result_message(msg["id"], critical))
|
|
@ -0,0 +1,28 @@
|
|||
"""API Handler for hacs_config"""
|
||||
import voluptuous as vol
|
||||
from homeassistant.components import websocket_api
|
||||
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
|
||||
@websocket_api.async_response
|
||||
@websocket_api.websocket_command({vol.Required("type"): "hacs/config"})
|
||||
async def hacs_config(_hass, connection, msg):
|
||||
"""Handle get media player cover command."""
|
||||
hacs = get_hacs()
|
||||
config = hacs.configuration
|
||||
|
||||
content = {}
|
||||
content["frontend_mode"] = config.frontend_mode
|
||||
content["frontend_compact"] = config.frontend_compact
|
||||
content["onboarding_done"] = config.onboarding_done
|
||||
content["version"] = hacs.version
|
||||
content["frontend_expected"] = hacs.frontend.version_expected
|
||||
content["frontend_running"] = hacs.frontend.version_running
|
||||
content["dev"] = config.dev
|
||||
content["debug"] = config.debug
|
||||
content["country"] = config.country
|
||||
content["experimental"] = config.experimental
|
||||
content["categories"] = hacs.common.categories
|
||||
|
||||
connection.send_message(websocket_api.result_message(msg["id"], content))
|
|
@ -0,0 +1,15 @@
|
|||
"""API Handler for hacs_removed"""
|
||||
import voluptuous as vol
|
||||
from homeassistant.components import websocket_api
|
||||
|
||||
from custom_components.hacs.share import list_removed_repositories
|
||||
|
||||
|
||||
@websocket_api.async_response
|
||||
@websocket_api.websocket_command({vol.Required("type"): "hacs/removed"})
|
||||
async def hacs_removed(_hass, connection, msg):
|
||||
"""Get information about removed repositories."""
|
||||
content = []
|
||||
for repo in list_removed_repositories():
|
||||
content.append(repo.to_json())
|
||||
connection.send_message(websocket_api.result_message(msg["id"], content))
|
|
@ -0,0 +1,62 @@
|
|||
"""API Handler for hacs_repositories"""
|
||||
import voluptuous as vol
|
||||
from homeassistant.components import websocket_api
|
||||
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
|
||||
@websocket_api.async_response
|
||||
@websocket_api.websocket_command({vol.Required("type"): "hacs/repositories"})
|
||||
async def hacs_repositories(_hass, connection, msg):
|
||||
"""Handle get media player cover command."""
|
||||
hacs = get_hacs()
|
||||
repositories = hacs.repositories
|
||||
content = []
|
||||
for repo in repositories:
|
||||
if repo.data.category in hacs.common.categories:
|
||||
data = {
|
||||
"additional_info": repo.information.additional_info,
|
||||
"authors": repo.data.authors,
|
||||
"available_version": repo.display_available_version,
|
||||
"beta": repo.data.show_beta,
|
||||
"can_install": repo.can_install,
|
||||
"category": repo.data.category,
|
||||
"country": repo.data.country,
|
||||
"config_flow": repo.data.config_flow,
|
||||
"custom": repo.custom,
|
||||
"default_branch": repo.data.default_branch,
|
||||
"description": repo.data.description,
|
||||
"domain": repo.data.domain,
|
||||
"downloads": repo.data.downloads,
|
||||
"file_name": repo.data.file_name,
|
||||
"first_install": repo.status.first_install,
|
||||
"full_name": repo.data.full_name,
|
||||
"hide": repo.data.hide,
|
||||
"hide_default_branch": repo.data.hide_default_branch,
|
||||
"homeassistant": repo.data.homeassistant,
|
||||
"id": repo.data.id,
|
||||
"info": repo.information.info,
|
||||
"installed_version": repo.display_installed_version,
|
||||
"installed": repo.data.installed,
|
||||
"issues": repo.data.open_issues,
|
||||
"javascript_type": repo.information.javascript_type,
|
||||
"last_updated": repo.data.last_updated,
|
||||
"local_path": repo.content.path.local,
|
||||
"main_action": repo.main_action,
|
||||
"name": repo.display_name,
|
||||
"new": repo.data.new,
|
||||
"pending_upgrade": repo.pending_upgrade,
|
||||
"releases": repo.data.published_tags,
|
||||
"selected_tag": repo.data.selected_tag,
|
||||
"stars": repo.data.stargazers_count,
|
||||
"state": repo.state,
|
||||
"status_description": repo.display_status_description,
|
||||
"status": repo.display_status,
|
||||
"topics": repo.data.topics,
|
||||
"updated_info": repo.status.updated_info,
|
||||
"version_or_commit": repo.display_version_or_commit,
|
||||
}
|
||||
|
||||
content.append(data)
|
||||
|
||||
connection.send_message(websocket_api.result_message(msg["id"], content))
|
|
@ -0,0 +1,113 @@
|
|||
"""API Handler for hacs_repository"""
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
import voluptuous as vol
|
||||
from aiogithubapi import AIOGitHubAPIException
|
||||
from homeassistant.components import websocket_api
|
||||
|
||||
from custom_components.hacs.helpers.functions.logger import getLogger
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
|
||||
@websocket_api.async_response
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "hacs/repository",
|
||||
vol.Optional("action"): cv.string,
|
||||
vol.Optional("repository"): cv.string,
|
||||
}
|
||||
)
|
||||
async def hacs_repository(hass, connection, msg):
|
||||
"""Handle get media player cover command."""
|
||||
hacs = get_hacs()
|
||||
logger = getLogger()
|
||||
data = {}
|
||||
repository = None
|
||||
|
||||
repo_id = msg.get("repository")
|
||||
action = msg.get("action")
|
||||
if repo_id is None or action is None:
|
||||
return
|
||||
|
||||
try:
|
||||
repository = hacs.get_by_id(repo_id)
|
||||
logger.debug(f"Running {action} for {repository.data.full_name}")
|
||||
|
||||
if action == "update":
|
||||
await repository.update_repository(True)
|
||||
repository.status.updated_info = True
|
||||
|
||||
elif action == "install":
|
||||
repository.data.new = False
|
||||
was_installed = repository.data.installed
|
||||
await repository.async_install()
|
||||
if not was_installed:
|
||||
hass.bus.async_fire("hacs/reload", {"force": True})
|
||||
|
||||
elif action == "not_new":
|
||||
repository.data.new = False
|
||||
|
||||
elif action == "uninstall":
|
||||
repository.data.new = False
|
||||
await repository.update_repository(True)
|
||||
await repository.uninstall()
|
||||
|
||||
elif action == "hide":
|
||||
repository.data.hide = True
|
||||
|
||||
elif action == "unhide":
|
||||
repository.data.hide = False
|
||||
|
||||
elif action == "show_beta":
|
||||
repository.data.show_beta = True
|
||||
await repository.update_repository()
|
||||
|
||||
elif action == "hide_beta":
|
||||
repository.data.show_beta = False
|
||||
await repository.update_repository()
|
||||
|
||||
elif action == "toggle_beta":
|
||||
repository.data.show_beta = not repository.data.show_beta
|
||||
await repository.update_repository()
|
||||
|
||||
elif action == "delete":
|
||||
repository.data.show_beta = False
|
||||
repository.remove()
|
||||
|
||||
elif action == "release_notes":
|
||||
data = [
|
||||
{
|
||||
"name": x.attributes["name"],
|
||||
"body": x.attributes["body"],
|
||||
"tag": x.attributes["tag_name"],
|
||||
}
|
||||
for x in repository.releases.objects
|
||||
]
|
||||
|
||||
elif action == "set_version":
|
||||
if msg["version"] == repository.data.default_branch:
|
||||
repository.data.selected_tag = None
|
||||
else:
|
||||
repository.data.selected_tag = msg["version"]
|
||||
await repository.update_repository()
|
||||
|
||||
hass.bus.async_fire("hacs/reload", {"force": True})
|
||||
|
||||
else:
|
||||
logger.error(f"WS action '{action}' is not valid")
|
||||
|
||||
await hacs.data.async_write()
|
||||
message = None
|
||||
except AIOGitHubAPIException as exception:
|
||||
message = exception
|
||||
except AttributeError as exception:
|
||||
message = f"Could not use repository with ID {repo_id} ({exception})"
|
||||
except (Exception, BaseException) as exception: # pylint: disable=broad-except
|
||||
message = exception
|
||||
|
||||
if message is not None:
|
||||
logger.error(message)
|
||||
hass.bus.async_fire("hacs/error", {"message": str(message)})
|
||||
|
||||
if repository:
|
||||
repository.state = None
|
||||
connection.send_message(websocket_api.result_message(msg["id"], data))
|
|
@ -0,0 +1,121 @@
|
|||
"""API Handler for hacs_repository_data"""
|
||||
import sys
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
import voluptuous as vol
|
||||
from aiogithubapi import AIOGitHubAPIException
|
||||
from homeassistant.components import websocket_api
|
||||
|
||||
from custom_components.hacs.helpers.classes.exceptions import HacsException
|
||||
from custom_components.hacs.helpers.functions.logger import getLogger
|
||||
from custom_components.hacs.helpers.functions.misc import extract_repository_from_url
|
||||
from custom_components.hacs.helpers.functions.register_repository import (
|
||||
register_repository,
|
||||
)
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
_LOGGER = getLogger()
|
||||
|
||||
|
||||
@websocket_api.async_response
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "hacs/repository/data",
|
||||
vol.Optional("action"): cv.string,
|
||||
vol.Optional("repository"): cv.string,
|
||||
vol.Optional("data"): cv.string,
|
||||
}
|
||||
)
|
||||
async def hacs_repository_data(hass, connection, msg):
|
||||
"""Handle get media player cover command."""
|
||||
hacs = get_hacs()
|
||||
repo_id = msg.get("repository")
|
||||
action = msg.get("action")
|
||||
data = msg.get("data")
|
||||
|
||||
if repo_id is None:
|
||||
return
|
||||
|
||||
if action == "add":
|
||||
repo_id = extract_repository_from_url(repo_id)
|
||||
if repo_id is None:
|
||||
return
|
||||
|
||||
if repo_id in hacs.common.skip:
|
||||
hacs.common.skip.remove(repo_id)
|
||||
|
||||
if not hacs.get_by_name(repo_id):
|
||||
try:
|
||||
registration = await register_repository(repo_id, data.lower())
|
||||
if registration is not None:
|
||||
raise HacsException(registration)
|
||||
except (
|
||||
Exception,
|
||||
BaseException,
|
||||
) as exception: # pylint: disable=broad-except
|
||||
hass.bus.async_fire(
|
||||
"hacs/error",
|
||||
{
|
||||
"action": "add_repository",
|
||||
"exception": str(sys.exc_info()[0].__name__),
|
||||
"message": str(exception),
|
||||
},
|
||||
)
|
||||
else:
|
||||
hass.bus.async_fire(
|
||||
"hacs/error",
|
||||
{
|
||||
"action": "add_repository",
|
||||
"message": f"Repository '{repo_id}' exists in the store.",
|
||||
},
|
||||
)
|
||||
|
||||
repository = hacs.get_by_name(repo_id)
|
||||
else:
|
||||
repository = hacs.get_by_id(repo_id)
|
||||
|
||||
if repository is None:
|
||||
hass.bus.async_fire("hacs/repository", {})
|
||||
return
|
||||
|
||||
_LOGGER.debug("Running %s for %s", action, repository.data.full_name)
|
||||
try:
|
||||
if action == "set_state":
|
||||
repository.state = data
|
||||
|
||||
elif action == "set_version":
|
||||
repository.data.selected_tag = data
|
||||
await repository.update_repository()
|
||||
|
||||
repository.state = None
|
||||
|
||||
elif action == "install":
|
||||
was_installed = repository.data.installed
|
||||
repository.data.selected_tag = data
|
||||
await repository.update_repository()
|
||||
await repository.async_install()
|
||||
repository.state = None
|
||||
if not was_installed:
|
||||
hass.bus.async_fire("hacs/reload", {"force": True})
|
||||
|
||||
elif action == "add":
|
||||
repository.state = None
|
||||
|
||||
else:
|
||||
repository.state = None
|
||||
_LOGGER.error("WS action '%s' is not valid", action)
|
||||
|
||||
message = None
|
||||
except AIOGitHubAPIException as exception:
|
||||
message = exception
|
||||
except AttributeError as exception:
|
||||
message = f"Could not use repository with ID {repo_id} ({exception})"
|
||||
except (Exception, BaseException) as exception: # pylint: disable=broad-except
|
||||
message = exception
|
||||
|
||||
if message is not None:
|
||||
_LOGGER.error(message)
|
||||
hass.bus.async_fire("hacs/error", {"message": str(message)})
|
||||
|
||||
await hacs.data.async_write()
|
||||
connection.send_message(websocket_api.result_message(msg["id"], {}))
|
|
@ -0,0 +1,54 @@
|
|||
"""API Handler for hacs_settings"""
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
import voluptuous as vol
|
||||
from homeassistant.components import websocket_api
|
||||
|
||||
from custom_components.hacs.helpers.functions.logger import getLogger
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
_LOGGER = getLogger()
|
||||
|
||||
|
||||
@websocket_api.async_response
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "hacs/settings",
|
||||
vol.Optional("action"): cv.string,
|
||||
vol.Optional("categories"): cv.ensure_list,
|
||||
}
|
||||
)
|
||||
async def hacs_settings(hass, connection, msg):
|
||||
"""Handle get media player cover command."""
|
||||
hacs = get_hacs()
|
||||
|
||||
action = msg["action"]
|
||||
_LOGGER.debug("WS action '%s'", action)
|
||||
|
||||
if action == "set_fe_grid":
|
||||
hacs.configuration.frontend_mode = "Grid"
|
||||
|
||||
elif action == "onboarding_done":
|
||||
hacs.configuration.onboarding_done = True
|
||||
|
||||
elif action == "set_fe_table":
|
||||
hacs.configuration.frontend_mode = "Table"
|
||||
|
||||
elif action == "set_fe_compact_true":
|
||||
hacs.configuration.frontend_compact = False
|
||||
|
||||
elif action == "set_fe_compact_false":
|
||||
hacs.configuration.frontend_compact = True
|
||||
|
||||
elif action == "clear_new":
|
||||
for repo in hacs.repositories:
|
||||
if repo.data.new and repo.data.category in msg.get("categories", []):
|
||||
_LOGGER.debug(
|
||||
"Clearing new flag from '%s'",
|
||||
repo.data.full_name,
|
||||
)
|
||||
repo.data.new = False
|
||||
else:
|
||||
_LOGGER.error("WS action '%s' is not valid", action)
|
||||
hass.bus.async_fire("hacs/config", {})
|
||||
await hacs.data.async_write()
|
||||
connection.send_message(websocket_api.result_message(msg["id"], {}))
|
|
@ -0,0 +1,23 @@
|
|||
"""API Handler for hacs_status"""
|
||||
import voluptuous as vol
|
||||
from homeassistant.components import websocket_api
|
||||
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
|
||||
@websocket_api.async_response
|
||||
@websocket_api.websocket_command({vol.Required("type"): "hacs/status"})
|
||||
async def hacs_status(_hass, connection, msg):
|
||||
"""Handle get media player cover command."""
|
||||
hacs = get_hacs()
|
||||
content = {
|
||||
"startup": hacs.status.startup,
|
||||
"background_task": hacs.status.background_task,
|
||||
"lovelace_mode": hacs.system.lovelace_mode,
|
||||
"reloading_data": hacs.status.reloading_data,
|
||||
"upgrading_all": hacs.status.upgrading_all,
|
||||
"disabled": hacs.system.disabled,
|
||||
"has_pending_tasks": hacs.queue.has_pending_tasks,
|
||||
"stage": hacs.stage,
|
||||
}
|
||||
connection.send_message(websocket_api.result_message(msg["id"], content))
|
|
@ -0,0 +1,114 @@
|
|||
"""Base HACS class."""
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
import pathlib
|
||||
|
||||
import attr
|
||||
from aiogithubapi.github import AIOGitHubAPI
|
||||
from aiogithubapi.objects.repository import AIOGitHubAPIRepository
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .enums import HacsStage
|
||||
from .helpers.functions.logger import getLogger
|
||||
from .models.core import HacsCore
|
||||
from .models.frontend import HacsFrontend
|
||||
from .models.system import HacsSystem
|
||||
|
||||
|
||||
class HacsCommon:
|
||||
"""Common for HACS."""
|
||||
|
||||
categories: List = []
|
||||
default: List = []
|
||||
installed: List = []
|
||||
skip: List = []
|
||||
|
||||
|
||||
class HacsStatus:
|
||||
"""HacsStatus."""
|
||||
|
||||
startup: bool = True
|
||||
new: bool = False
|
||||
background_task: bool = False
|
||||
reloading_data: bool = False
|
||||
upgrading_all: bool = False
|
||||
|
||||
|
||||
@attr.s
|
||||
class HacsBaseAttributes:
|
||||
"""Base HACS class."""
|
||||
|
||||
_default: Optional[AIOGitHubAPIRepository]
|
||||
_github: Optional[AIOGitHubAPI]
|
||||
_hass: Optional[HomeAssistant]
|
||||
_repository: Optional[AIOGitHubAPIRepository]
|
||||
_stage: HacsStage = HacsStage.SETUP
|
||||
_common: Optional[HacsCommon]
|
||||
|
||||
core: HacsCore = attr.ib(HacsCore)
|
||||
common: HacsCommon = attr.ib(HacsCommon)
|
||||
status: HacsStatus = attr.ib(HacsStatus)
|
||||
frontend: HacsFrontend = attr.ib(HacsFrontend)
|
||||
log: logging.Logger = getLogger()
|
||||
system: HacsSystem = attr.ib(HacsSystem)
|
||||
repositories: List = []
|
||||
|
||||
|
||||
@attr.s
|
||||
class HacsBase(HacsBaseAttributes):
|
||||
"""Base HACS class."""
|
||||
|
||||
@property
|
||||
def stage(self) -> HacsStage:
|
||||
"""Returns a HacsStage object."""
|
||||
return self._stage
|
||||
|
||||
@stage.setter
|
||||
def stage(self, value: HacsStage) -> None:
|
||||
"""Set the value for the stage property."""
|
||||
self._stage = value
|
||||
|
||||
@property
|
||||
def github(self) -> Optional[AIOGitHubAPI]:
|
||||
"""Returns a AIOGitHubAPI object."""
|
||||
return self._github
|
||||
|
||||
@github.setter
|
||||
def github(self, value: AIOGitHubAPI) -> None:
|
||||
"""Set the value for the github property."""
|
||||
self._github = value
|
||||
|
||||
@property
|
||||
def repository(self) -> Optional[AIOGitHubAPIRepository]:
|
||||
"""Returns a AIOGitHubAPIRepository object representing hacs/integration."""
|
||||
return self._repository
|
||||
|
||||
@repository.setter
|
||||
def repository(self, value: AIOGitHubAPIRepository) -> None:
|
||||
"""Set the value for the repository property."""
|
||||
self._repository = value
|
||||
|
||||
@property
|
||||
def default(self) -> Optional[AIOGitHubAPIRepository]:
|
||||
"""Returns a AIOGitHubAPIRepository object representing hacs/default."""
|
||||
return self._default
|
||||
|
||||
@default.setter
|
||||
def default(self, value: AIOGitHubAPIRepository) -> None:
|
||||
"""Set the value for the default property."""
|
||||
self._default = value
|
||||
|
||||
@property
|
||||
def hass(self) -> Optional[HomeAssistant]:
|
||||
"""Returns a HomeAssistant object."""
|
||||
return self._hass
|
||||
|
||||
@hass.setter
|
||||
def hass(self, value: HomeAssistant) -> None:
|
||||
"""Set the value for the default property."""
|
||||
self._hass = value
|
||||
|
||||
@property
|
||||
def integration_dir(self) -> pathlib.Path:
|
||||
"""Return the HACS integration dir."""
|
||||
return pathlib.Path(__file__).parent
|
|
@ -0,0 +1,153 @@
|
|||
"""Adds config flow for HACS."""
|
||||
import voluptuous as vol
|
||||
from aiogithubapi import AIOGitHubAPIException, GitHubDevice
|
||||
from aiogithubapi.common.const import OAUTH_USER_LOGIN
|
||||
from awesomeversion import AwesomeVersion
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.const import __version__ as HAVERSION
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
|
||||
from custom_components.hacs.const import CLIENT_ID, DOMAIN, MINIMUM_HA_VERSION
|
||||
from custom_components.hacs.helpers.functions.configuration_schema import (
|
||||
hacs_config_option_schema,
|
||||
)
|
||||
from custom_components.hacs.helpers.functions.logger import getLogger
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
from .base import HacsBase
|
||||
|
||||
_LOGGER = getLogger()
|
||||
|
||||
|
||||
class HacsFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow for HACS."""
|
||||
|
||||
VERSION = 1
|
||||
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize."""
|
||||
self._errors = {}
|
||||
self.device = None
|
||||
self.activation = None
|
||||
self._progress_task = None
|
||||
|
||||
async def async_step_user(self, user_input):
|
||||
"""Handle a flow initialized by the user."""
|
||||
self._errors = {}
|
||||
if self._async_current_entries():
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
if self.hass.data.get(DOMAIN):
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
|
||||
if user_input:
|
||||
if [x for x in user_input if not user_input[x]]:
|
||||
self._errors["base"] = "acc"
|
||||
return await self._show_config_form(user_input)
|
||||
|
||||
return await self.async_step_device(user_input)
|
||||
|
||||
## Initial form
|
||||
return await self._show_config_form(user_input)
|
||||
|
||||
async def async_step_device(self, _user_input):
|
||||
"""Handle device steps"""
|
||||
|
||||
async def _wait_for_activation(_=None):
|
||||
self.activation = await self.device.async_device_activation()
|
||||
self.hass.async_create_task(
|
||||
self.hass.config_entries.flow.async_configure(flow_id=self.flow_id)
|
||||
)
|
||||
|
||||
if not self.activation:
|
||||
if not self.device:
|
||||
self.device = GitHubDevice(
|
||||
CLIENT_ID,
|
||||
session=aiohttp_client.async_get_clientsession(self.hass),
|
||||
)
|
||||
async_call_later(self.hass, 1, _wait_for_activation)
|
||||
try:
|
||||
device_data = await self.device.async_register_device()
|
||||
return self.async_show_progress(
|
||||
step_id="device",
|
||||
progress_action="wait_for_device",
|
||||
description_placeholders={
|
||||
"url": OAUTH_USER_LOGIN,
|
||||
"code": device_data.user_code,
|
||||
},
|
||||
)
|
||||
except AIOGitHubAPIException as exception:
|
||||
_LOGGER.error(exception)
|
||||
return self.async_abort(reason="github")
|
||||
|
||||
return self.async_show_progress_done(next_step_id="device_done")
|
||||
|
||||
async def _show_config_form(self, user_input):
|
||||
"""Show the configuration form to edit location data."""
|
||||
if not user_input:
|
||||
user_input = {}
|
||||
if AwesomeVersion(HAVERSION) < MINIMUM_HA_VERSION:
|
||||
return self.async_abort(
|
||||
reason="min_ha_version",
|
||||
description_placeholders={"version": MINIMUM_HA_VERSION},
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
"acc_logs", default=user_input.get("acc_logs", False)
|
||||
): bool,
|
||||
vol.Required(
|
||||
"acc_addons", default=user_input.get("acc_addons", False)
|
||||
): bool,
|
||||
vol.Required(
|
||||
"acc_untested", default=user_input.get("acc_untested", False)
|
||||
): bool,
|
||||
vol.Required(
|
||||
"acc_disable", default=user_input.get("acc_disable", False)
|
||||
): bool,
|
||||
}
|
||||
),
|
||||
errors=self._errors,
|
||||
)
|
||||
|
||||
async def async_step_device_done(self, _user_input):
|
||||
"""Handle device steps"""
|
||||
return self.async_create_entry(
|
||||
title="", data={"token": self.activation.access_token}
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry):
|
||||
return HacsOptionsFlowHandler(config_entry)
|
||||
|
||||
|
||||
class HacsOptionsFlowHandler(config_entries.OptionsFlow):
|
||||
"""HACS config flow options handler."""
|
||||
|
||||
def __init__(self, config_entry):
|
||||
"""Initialize HACS options flow."""
|
||||
self.config_entry = config_entry
|
||||
|
||||
async def async_step_init(self, _user_input=None):
|
||||
"""Manage the options."""
|
||||
return await self.async_step_user()
|
||||
|
||||
async def async_step_user(self, user_input=None):
|
||||
"""Handle a flow initialized by the user."""
|
||||
hacs: HacsBase = get_hacs()
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
|
||||
if hacs.configuration.config_type == "yaml":
|
||||
schema = {vol.Optional("not_in_use", default=""): str}
|
||||
else:
|
||||
schema = hacs_config_option_schema(self.config_entry.options)
|
||||
del schema["frontend_repo"]
|
||||
del schema["frontend_repo_url"]
|
||||
|
||||
return self.async_show_form(step_id="user", data_schema=vol.Schema(schema))
|
|
@ -0,0 +1,301 @@
|
|||
"""Constants for HACS"""
|
||||
NAME_LONG = "HACS (Home Assistant Community Store)"
|
||||
NAME_SHORT = "HACS"
|
||||
INTEGRATION_VERSION = "1.10.1"
|
||||
DOMAIN = "hacs"
|
||||
CLIENT_ID = "395a8e669c5de9f7c6e8"
|
||||
MINIMUM_HA_VERSION = "2020.12.0"
|
||||
PROJECT_URL = "https://github.com/hacs/integration/"
|
||||
CUSTOM_UPDATER_LOCATIONS = [
|
||||
"{}/custom_components/custom_updater.py",
|
||||
"{}/custom_components/custom_updater/__init__.py",
|
||||
]
|
||||
|
||||
ISSUE_URL = f"{PROJECT_URL}issues"
|
||||
DOMAIN_DATA = f"{NAME_SHORT.lower()}_data"
|
||||
|
||||
ELEMENT_TYPES = ["integration", "plugin"]
|
||||
|
||||
PACKAGE_NAME = "custom_components.hacs"
|
||||
|
||||
IFRAME = {
|
||||
"title": "HACS",
|
||||
"icon": "hacs:hacs",
|
||||
"url": "/community_overview",
|
||||
"path": "community",
|
||||
"require_admin": True,
|
||||
}
|
||||
|
||||
VERSION_STORAGE = "6"
|
||||
STORENAME = "hacs"
|
||||
|
||||
# Messages
|
||||
NO_ELEMENTS = "No elements to show, open the store to install some awesome stuff."
|
||||
|
||||
CUSTOM_UPDATER_WARNING = """
|
||||
This cannot be used with custom_updater.
|
||||
To use this you need to remove custom_updater form {}
|
||||
"""
|
||||
|
||||
STARTUP = f"""
|
||||
-------------------------------------------------------------------
|
||||
HACS (Home Assistant Community Store)
|
||||
|
||||
Version: {INTEGRATION_VERSION}
|
||||
This is a custom integration
|
||||
If you have any issues with this you need to open an issue here:
|
||||
https://github.com/hacs/integration/issues
|
||||
-------------------------------------------------------------------
|
||||
"""
|
||||
|
||||
LOCALE = [
|
||||
"ALL",
|
||||
"AF",
|
||||
"AL",
|
||||
"DZ",
|
||||
"AS",
|
||||
"AD",
|
||||
"AO",
|
||||
"AI",
|
||||
"AQ",
|
||||
"AG",
|
||||
"AR",
|
||||
"AM",
|
||||
"AW",
|
||||
"AU",
|
||||
"AT",
|
||||
"AZ",
|
||||
"BS",
|
||||
"BH",
|
||||
"BD",
|
||||
"BB",
|
||||
"BY",
|
||||
"BE",
|
||||
"BZ",
|
||||
"BJ",
|
||||
"BM",
|
||||
"BT",
|
||||
"BO",
|
||||
"BQ",
|
||||
"BA",
|
||||
"BW",
|
||||
"BV",
|
||||
"BR",
|
||||
"IO",
|
||||
"BN",
|
||||
"BG",
|
||||
"BF",
|
||||
"BI",
|
||||
"KH",
|
||||
"CM",
|
||||
"CA",
|
||||
"CV",
|
||||
"KY",
|
||||
"CF",
|
||||
"TD",
|
||||
"CL",
|
||||
"CN",
|
||||
"CX",
|
||||
"CC",
|
||||
"CO",
|
||||
"KM",
|
||||
"CG",
|
||||
"CD",
|
||||
"CK",
|
||||
"CR",
|
||||
"HR",
|
||||
"CU",
|
||||
"CW",
|
||||
"CY",
|
||||
"CZ",
|
||||
"CI",
|
||||
"DK",
|
||||
"DJ",
|
||||
"DM",
|
||||
"DO",
|
||||
"EC",
|
||||
"EG",
|
||||
"SV",
|
||||
"GQ",
|
||||
"ER",
|
||||
"EE",
|
||||
"ET",
|
||||
"FK",
|
||||
"FO",
|
||||
"FJ",
|
||||
"FI",
|
||||
"FR",
|
||||
"GF",
|
||||
"PF",
|
||||
"TF",
|
||||
"GA",
|
||||
"GM",
|
||||
"GE",
|
||||
"DE",
|
||||
"GH",
|
||||
"GI",
|
||||
"GR",
|
||||
"GL",
|
||||
"GD",
|
||||
"GP",
|
||||
"GU",
|
||||
"GT",
|
||||
"GG",
|
||||
"GN",
|
||||
"GW",
|
||||
"GY",
|
||||
"HT",
|
||||
"HM",
|
||||
"VA",
|
||||
"HN",
|
||||
"HK",
|
||||
"HU",
|
||||
"IS",
|
||||
"IN",
|
||||
"ID",
|
||||
"IR",
|
||||
"IQ",
|
||||
"IE",
|
||||
"IM",
|
||||
"IL",
|
||||
"IT",
|
||||
"JM",
|
||||
"JP",
|
||||
"JE",
|
||||
"JO",
|
||||
"KZ",
|
||||
"KE",
|
||||
"KI",
|
||||
"KP",
|
||||
"KR",
|
||||
"KW",
|
||||
"KG",
|
||||
"LA",
|
||||
"LV",
|
||||
"LB",
|
||||
"LS",
|
||||
"LR",
|
||||
"LY",
|
||||
"LI",
|
||||
"LT",
|
||||
"LU",
|
||||
"MO",
|
||||
"MK",
|
||||
"MG",
|
||||
"MW",
|
||||
"MY",
|
||||
"MV",
|
||||
"ML",
|
||||
"MT",
|
||||
"MH",
|
||||
"MQ",
|
||||
"MR",
|
||||
"MU",
|
||||
"YT",
|
||||
"MX",
|
||||
"FM",
|
||||
"MD",
|
||||
"MC",
|
||||
"MN",
|
||||
"ME",
|
||||
"MS",
|
||||
"MA",
|
||||
"MZ",
|
||||
"MM",
|
||||
"NA",
|
||||
"NR",
|
||||
"NP",
|
||||
"NL",
|
||||
"NC",
|
||||
"NZ",
|
||||
"NI",
|
||||
"NE",
|
||||
"NG",
|
||||
"NU",
|
||||
"NF",
|
||||
"MP",
|
||||
"NO",
|
||||
"OM",
|
||||
"PK",
|
||||
"PW",
|
||||
"PS",
|
||||
"PA",
|
||||
"PG",
|
||||
"PY",
|
||||
"PE",
|
||||
"PH",
|
||||
"PN",
|
||||
"PL",
|
||||
"PT",
|
||||
"PR",
|
||||
"QA",
|
||||
"RO",
|
||||
"RU",
|
||||
"RW",
|
||||
"RE",
|
||||
"BL",
|
||||
"SH",
|
||||
"KN",
|
||||
"LC",
|
||||
"MF",
|
||||
"PM",
|
||||
"VC",
|
||||
"WS",
|
||||
"SM",
|
||||
"ST",
|
||||
"SA",
|
||||
"SN",
|
||||
"RS",
|
||||
"SC",
|
||||
"SL",
|
||||
"SG",
|
||||
"SX",
|
||||
"SK",
|
||||
"SI",
|
||||
"SB",
|
||||
"SO",
|
||||
"ZA",
|
||||
"GS",
|
||||
"SS",
|
||||
"ES",
|
||||
"LK",
|
||||
"SD",
|
||||
"SR",
|
||||
"SJ",
|
||||
"SZ",
|
||||
"SE",
|
||||
"CH",
|
||||
"SY",
|
||||
"TW",
|
||||
"TJ",
|
||||
"TZ",
|
||||
"TH",
|
||||
"TL",
|
||||
"TG",
|
||||
"TK",
|
||||
"TO",
|
||||
"TT",
|
||||
"TN",
|
||||
"TR",
|
||||
"TM",
|
||||
"TC",
|
||||
"TV",
|
||||
"UG",
|
||||
"UA",
|
||||
"AE",
|
||||
"GB",
|
||||
"US",
|
||||
"UM",
|
||||
"UY",
|
||||
"UZ",
|
||||
"VU",
|
||||
"VE",
|
||||
"VN",
|
||||
"VG",
|
||||
"VI",
|
||||
"WF",
|
||||
"EH",
|
||||
"YE",
|
||||
"ZM",
|
||||
"ZW",
|
||||
]
|
|
@ -0,0 +1,39 @@
|
|||
"""Helper constants."""
|
||||
# pylint: disable=missing-class-docstring
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class HacsCategory(str, Enum):
|
||||
APPDAEMON = "appdaemon"
|
||||
INTEGRATION = "integration"
|
||||
LOVELACE = "lovelace"
|
||||
PLUGIN = "plugin" # Kept for legacy purposes
|
||||
NETDAEMON = "netdaemon"
|
||||
PYTHON_SCRIPT = "python_script"
|
||||
THEME = "theme"
|
||||
REMOVED = "removed"
|
||||
|
||||
|
||||
class LovelaceMode(str, Enum):
|
||||
"""Lovelace Modes."""
|
||||
|
||||
STORAGE = "storage"
|
||||
AUTO = "auto"
|
||||
YAML = "yaml"
|
||||
|
||||
|
||||
class HacsStage(str, Enum):
|
||||
SETUP = "setup"
|
||||
STARTUP = "startup"
|
||||
WAITING = "waiting"
|
||||
RUNNING = "running"
|
||||
BACKGROUND = "background"
|
||||
|
||||
|
||||
class HacsSetupTask(str, Enum):
|
||||
WEBSOCKET = "WebSocket API"
|
||||
FRONTEND = "Frontend"
|
||||
SENSOR = "Sensor"
|
||||
HACS_REPO = "Hacs Repository"
|
||||
CATEGORIES = "Additional categories"
|
||||
CLEAR_STORAGE = "Clear storage"
|
|
@ -0,0 +1,77 @@
|
|||
"""HACS Configuration."""
|
||||
import attr
|
||||
|
||||
from custom_components.hacs.helpers.classes.exceptions import HacsException
|
||||
from custom_components.hacs.helpers.functions.logger import getLogger
|
||||
|
||||
_LOGGER = getLogger()
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True)
|
||||
class Configuration:
|
||||
"""Configuration class."""
|
||||
|
||||
# Main configuration:
|
||||
appdaemon_path: str = "appdaemon/apps/"
|
||||
appdaemon: bool = False
|
||||
netdaemon_path: str = "netdaemon/apps/"
|
||||
netdaemon: bool = False
|
||||
config: dict = {}
|
||||
config_entry: dict = {}
|
||||
config_type: str = None
|
||||
debug: bool = False
|
||||
dev: bool = False
|
||||
frontend_mode: str = "Grid"
|
||||
frontend_compact: bool = False
|
||||
frontend_repo: str = ""
|
||||
frontend_repo_url: str = ""
|
||||
options: dict = {}
|
||||
onboarding_done: bool = False
|
||||
plugin_path: str = "www/community/"
|
||||
python_script_path: str = "python_scripts/"
|
||||
python_script: bool = False
|
||||
sidepanel_icon: str = "hacs:hacs"
|
||||
sidepanel_title: str = "HACS"
|
||||
theme_path: str = "themes/"
|
||||
theme: bool = False
|
||||
token: str = None
|
||||
|
||||
# Config options:
|
||||
country: str = "ALL"
|
||||
experimental: bool = False
|
||||
release_limit: int = 5
|
||||
|
||||
def to_json(self) -> dict:
|
||||
"""Return a dict representation of the configuration."""
|
||||
return self.__dict__
|
||||
|
||||
def print(self) -> None:
|
||||
"""Print the current configuration to the log."""
|
||||
config = self.to_json()
|
||||
for key in config:
|
||||
if key in ["config", "config_entry", "options", "token"]:
|
||||
continue
|
||||
_LOGGER.debug("%s: %s", key, config[key])
|
||||
|
||||
@staticmethod
|
||||
def from_dict(configuration: dict, options: dict = None) -> None:
|
||||
"""Set attributes from dicts."""
|
||||
if isinstance(options, bool) or isinstance(configuration.get("options"), bool):
|
||||
raise HacsException("Configuration is not valid.")
|
||||
|
||||
if options is None:
|
||||
options = {}
|
||||
|
||||
if not configuration:
|
||||
raise HacsException("Configuration is not valid.")
|
||||
|
||||
config = Configuration()
|
||||
|
||||
config.config = configuration
|
||||
config.options = options
|
||||
|
||||
for conf_type in [configuration, options]:
|
||||
for key in conf_type:
|
||||
setattr(config, key, conf_type[key])
|
||||
|
||||
return config
|
|
@ -0,0 +1,204 @@
|
|||
"""Data handler for HACS."""
|
||||
import os
|
||||
|
||||
from queueman import QueueManager
|
||||
|
||||
from custom_components.hacs.const import INTEGRATION_VERSION
|
||||
from custom_components.hacs.helpers.classes.manifest import HacsManifest
|
||||
from custom_components.hacs.helpers.functions.logger import getLogger
|
||||
from custom_components.hacs.helpers.functions.register_repository import (
|
||||
register_repository,
|
||||
)
|
||||
from custom_components.hacs.helpers.functions.store import (
|
||||
async_load_from_store,
|
||||
async_save_to_store,
|
||||
get_store_for_key,
|
||||
)
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
|
||||
class HacsData:
|
||||
"""HacsData class."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize."""
|
||||
self.logger = getLogger()
|
||||
self.hacs = get_hacs()
|
||||
self.queue = QueueManager()
|
||||
self.content = {}
|
||||
|
||||
async def async_write(self):
|
||||
"""Write content to the store files."""
|
||||
if self.hacs.status.background_task or self.hacs.system.disabled:
|
||||
return
|
||||
|
||||
self.logger.debug("Saving data")
|
||||
|
||||
# Hacs
|
||||
await async_save_to_store(
|
||||
self.hacs.hass,
|
||||
"hacs",
|
||||
{
|
||||
"view": self.hacs.configuration.frontend_mode,
|
||||
"compact": self.hacs.configuration.frontend_compact,
|
||||
"onboarding_done": self.hacs.configuration.onboarding_done,
|
||||
},
|
||||
)
|
||||
|
||||
# Repositories
|
||||
self.content = {}
|
||||
for repository in self.hacs.repositories or []:
|
||||
self.queue.add(self.async_store_repository_data(repository))
|
||||
|
||||
if not self.queue.has_pending_tasks:
|
||||
self.logger.debug("Nothing in the queue")
|
||||
elif self.queue.running:
|
||||
self.logger.debug("Queue is already running")
|
||||
else:
|
||||
await self.queue.execute()
|
||||
await async_save_to_store(self.hacs.hass, "repositories", self.content)
|
||||
self.hacs.hass.bus.async_fire("hacs/repository", {})
|
||||
self.hacs.hass.bus.fire("hacs/config", {})
|
||||
|
||||
async def async_store_repository_data(self, repository):
|
||||
repository_manifest = repository.repository_manifest.manifest
|
||||
data = {
|
||||
"authors": repository.data.authors,
|
||||
"category": repository.data.category,
|
||||
"description": repository.data.description,
|
||||
"domain": repository.data.domain,
|
||||
"downloads": repository.data.downloads,
|
||||
"full_name": repository.data.full_name,
|
||||
"first_install": repository.status.first_install,
|
||||
"installed_commit": repository.data.installed_commit,
|
||||
"installed": repository.data.installed,
|
||||
"last_commit": repository.data.last_commit,
|
||||
"last_release_tag": repository.data.last_version,
|
||||
"last_updated": repository.data.last_updated,
|
||||
"name": repository.data.name,
|
||||
"new": repository.data.new,
|
||||
"repository_manifest": repository_manifest,
|
||||
"selected_tag": repository.data.selected_tag,
|
||||
"show_beta": repository.data.show_beta,
|
||||
"stars": repository.data.stargazers_count,
|
||||
"topics": repository.data.topics,
|
||||
"version_installed": repository.data.installed_version,
|
||||
}
|
||||
if data:
|
||||
if repository.data.installed and (
|
||||
repository.data.installed_commit or repository.data.installed_version
|
||||
):
|
||||
await async_save_to_store(
|
||||
self.hacs.hass,
|
||||
f"hacs/{repository.data.id}.hacs",
|
||||
repository.data.to_json(),
|
||||
)
|
||||
self.content[str(repository.data.id)] = data
|
||||
|
||||
async def restore(self):
|
||||
"""Restore saved data."""
|
||||
hacs = await async_load_from_store(self.hacs.hass, "hacs")
|
||||
repositories = await async_load_from_store(self.hacs.hass, "repositories")
|
||||
try:
|
||||
if not hacs and not repositories:
|
||||
# Assume new install
|
||||
self.hacs.status.new = True
|
||||
return True
|
||||
self.logger.info("Restore started")
|
||||
self.hacs.status.new = False
|
||||
|
||||
# Hacs
|
||||
self.hacs.configuration.frontend_mode = hacs.get("view", "Grid")
|
||||
self.hacs.configuration.frontend_compact = hacs.get("compact", False)
|
||||
self.hacs.configuration.onboarding_done = hacs.get("onboarding_done", False)
|
||||
|
||||
# Repositories
|
||||
stores = {}
|
||||
for entry in repositories or []:
|
||||
stores[entry] = get_store_for_key(self.hacs.hass, f"hacs/{entry}.hacs")
|
||||
|
||||
stores_exist = {}
|
||||
|
||||
def _populate_stores():
|
||||
for entry in repositories or []:
|
||||
stores_exist[entry] = os.path.exists(stores[entry].path)
|
||||
|
||||
await self.hacs.hass.async_add_executor_job(_populate_stores)
|
||||
|
||||
# Repositories
|
||||
for entry in repositories or []:
|
||||
self.queue.add(
|
||||
self.async_restore_repository(
|
||||
entry, repositories[entry], stores[entry], stores_exist[entry]
|
||||
)
|
||||
)
|
||||
|
||||
await self.queue.execute()
|
||||
|
||||
self.logger.info("Restore done")
|
||||
except (Exception, BaseException) as exception: # pylint: disable=broad-except
|
||||
self.logger.critical(f"[{exception}] Restore Failed!")
|
||||
return False
|
||||
return True
|
||||
|
||||
async def async_restore_repository(
|
||||
self, entry, repository_data, store, store_exists
|
||||
):
|
||||
if not self.hacs.is_known(entry):
|
||||
await register_repository(
|
||||
repository_data["full_name"], repository_data["category"], False
|
||||
)
|
||||
repository = [
|
||||
x
|
||||
for x in self.hacs.repositories
|
||||
if str(x.data.id) == str(entry)
|
||||
or x.data.full_name == repository_data["full_name"]
|
||||
]
|
||||
if not repository:
|
||||
self.logger.error(f"Did not find {repository_data['full_name']} ({entry})")
|
||||
return
|
||||
|
||||
repository = repository[0]
|
||||
|
||||
# Restore repository attributes
|
||||
repository.data.id = entry
|
||||
repository.data.authors = repository_data.get("authors", [])
|
||||
repository.data.description = repository_data.get("description")
|
||||
repository.releases.last_release_object_downloads = repository_data.get(
|
||||
"downloads"
|
||||
)
|
||||
repository.data.last_updated = repository_data.get("last_updated")
|
||||
repository.data.topics = repository_data.get("topics", [])
|
||||
repository.data.domain = repository_data.get("domain", None)
|
||||
repository.data.stargazers_count = repository_data.get("stars", 0)
|
||||
repository.releases.last_release = repository_data.get("last_release_tag")
|
||||
repository.data.hide = repository_data.get("hide", False)
|
||||
repository.data.installed = repository_data.get("installed", False)
|
||||
repository.data.new = repository_data.get("new", True)
|
||||
repository.data.selected_tag = repository_data.get("selected_tag")
|
||||
repository.data.show_beta = repository_data.get("show_beta", False)
|
||||
repository.data.last_version = repository_data.get("last_release_tag")
|
||||
repository.data.last_commit = repository_data.get("last_commit")
|
||||
repository.data.installed_version = repository_data.get("version_installed")
|
||||
repository.data.installed_commit = repository_data.get("installed_commit")
|
||||
|
||||
repository.repository_manifest = HacsManifest.from_dict(
|
||||
repository_data.get("repository_manifest", {})
|
||||
)
|
||||
|
||||
if repository.data.installed:
|
||||
repository.status.first_install = False
|
||||
|
||||
if repository_data["full_name"] == "hacs/integration":
|
||||
repository.data.installed_version = INTEGRATION_VERSION
|
||||
repository.data.installed = True
|
||||
|
||||
restored = store_exists and await store.async_load() or {}
|
||||
|
||||
if restored:
|
||||
repository.data.update_data(restored)
|
||||
if not repository.data.installed:
|
||||
repository.logger.debug(
|
||||
"Should be installed but is not... Fixing that!"
|
||||
)
|
||||
repository.data.installed = True
|
|
@ -0,0 +1,360 @@
|
|||
"""Initialize the HACS base."""
|
||||
import json
|
||||
from datetime import timedelta
|
||||
|
||||
from aiogithubapi import AIOGitHubAPIException
|
||||
from queueman import QueueManager
|
||||
from queueman.exceptions import QueueManagerExecutionStillInProgress
|
||||
|
||||
from custom_components.hacs.helpers import HacsHelpers
|
||||
from custom_components.hacs.helpers.functions.get_list_from_default import (
|
||||
async_get_list_from_default,
|
||||
)
|
||||
from custom_components.hacs.helpers.functions.register_repository import (
|
||||
register_repository,
|
||||
)
|
||||
from custom_components.hacs.helpers.functions.remaining_github_calls import (
|
||||
get_fetch_updates_for,
|
||||
)
|
||||
from custom_components.hacs.helpers.functions.store import (
|
||||
async_load_from_store,
|
||||
async_save_to_store,
|
||||
)
|
||||
from custom_components.hacs.operational.setup_actions.categories import (
|
||||
async_setup_extra_stores,
|
||||
)
|
||||
from custom_components.hacs.share import (
|
||||
get_factory,
|
||||
get_queue,
|
||||
get_removed,
|
||||
is_removed,
|
||||
list_removed_repositories,
|
||||
)
|
||||
|
||||
from ..base import HacsBase
|
||||
from ..enums import HacsCategory, HacsStage
|
||||
|
||||
|
||||
class HacsStatus:
|
||||
"""HacsStatus."""
|
||||
|
||||
startup = True
|
||||
new = False
|
||||
background_task = False
|
||||
reloading_data = False
|
||||
upgrading_all = False
|
||||
|
||||
|
||||
class HacsFrontend:
|
||||
"""HacsFrontend."""
|
||||
|
||||
version_running = None
|
||||
version_available = None
|
||||
version_expected = None
|
||||
update_pending = False
|
||||
|
||||
|
||||
class HacsCommon:
|
||||
"""Common for HACS."""
|
||||
|
||||
categories = []
|
||||
default = []
|
||||
installed = []
|
||||
skip = []
|
||||
|
||||
|
||||
class System:
|
||||
"""System info."""
|
||||
|
||||
status = HacsStatus()
|
||||
config_path = None
|
||||
ha_version = None
|
||||
disabled = False
|
||||
running = False
|
||||
lovelace_mode = "storage"
|
||||
|
||||
|
||||
class Hacs(HacsBase, HacsHelpers):
|
||||
"""The base class of HACS, nested throughout the project."""
|
||||
|
||||
repositories = []
|
||||
repo = None
|
||||
data_repo = None
|
||||
data = None
|
||||
status = HacsStatus()
|
||||
configuration = None
|
||||
version = None
|
||||
session = None
|
||||
factory = get_factory()
|
||||
queue = get_queue()
|
||||
recuring_tasks = []
|
||||
common = HacsCommon()
|
||||
|
||||
def get_by_id(self, repository_id):
|
||||
"""Get repository by ID."""
|
||||
try:
|
||||
for repository in self.repositories:
|
||||
if str(repository.data.id) == str(repository_id):
|
||||
return repository
|
||||
except (Exception, BaseException): # pylint: disable=broad-except
|
||||
pass
|
||||
return None
|
||||
|
||||
def get_by_name(self, repository_full_name):
|
||||
"""Get repository by full_name."""
|
||||
try:
|
||||
repository_full_name_lower = repository_full_name.lower()
|
||||
for repository in self.repositories:
|
||||
if repository.data.full_name_lower == repository_full_name_lower:
|
||||
return repository
|
||||
except (Exception, BaseException): # pylint: disable=broad-except
|
||||
pass
|
||||
return None
|
||||
|
||||
def is_known(self, repository_id):
|
||||
"""Return a bool if the repository is known."""
|
||||
return str(repository_id) in [str(x.data.id) for x in self.repositories]
|
||||
|
||||
@property
|
||||
def sorted_by_name(self):
|
||||
"""Return a sorted(by name) list of repository objects."""
|
||||
return sorted(self.repositories, key=lambda x: x.display_name)
|
||||
|
||||
@property
|
||||
def sorted_by_repository_name(self):
|
||||
"""Return a sorted(by repository_name) list of repository objects."""
|
||||
return sorted(self.repositories, key=lambda x: x.data.full_name)
|
||||
|
||||
async def register_repository(self, full_name, category, check=True):
|
||||
"""Register a repository."""
|
||||
await register_repository(full_name, category, check=check)
|
||||
|
||||
async def startup_tasks(self, _event=None):
|
||||
"""Tasks that are started after startup."""
|
||||
await self.async_set_stage(HacsStage.STARTUP)
|
||||
self.status.background_task = True
|
||||
await async_setup_extra_stores()
|
||||
self.hass.bus.async_fire("hacs/status", {})
|
||||
|
||||
await self.handle_critical_repositories_startup()
|
||||
await self.handle_critical_repositories()
|
||||
await self.async_load_default_repositories()
|
||||
await self.clear_out_removed_repositories()
|
||||
|
||||
self.recuring_tasks.append(
|
||||
self.hass.helpers.event.async_track_time_interval(
|
||||
self.recurring_tasks_installed, timedelta(minutes=30)
|
||||
)
|
||||
)
|
||||
self.recuring_tasks.append(
|
||||
self.hass.helpers.event.async_track_time_interval(
|
||||
self.recurring_tasks_all, timedelta(minutes=800)
|
||||
)
|
||||
)
|
||||
self.recuring_tasks.append(
|
||||
self.hass.helpers.event.async_track_time_interval(
|
||||
self.prosess_queue, timedelta(minutes=10)
|
||||
)
|
||||
)
|
||||
|
||||
self.hass.bus.async_fire("hacs/reload", {"force": True})
|
||||
await self.recurring_tasks_installed()
|
||||
|
||||
await self.prosess_queue()
|
||||
|
||||
self.status.startup = False
|
||||
self.status.background_task = False
|
||||
self.hass.bus.async_fire("hacs/status", {})
|
||||
await self.async_set_stage(HacsStage.RUNNING)
|
||||
|
||||
async def handle_critical_repositories_startup(self):
|
||||
"""Handled critical repositories during startup."""
|
||||
alert = False
|
||||
critical = await async_load_from_store(self.hass, "critical")
|
||||
if not critical:
|
||||
return
|
||||
for repo in critical:
|
||||
if not repo["acknowledged"]:
|
||||
alert = True
|
||||
if alert:
|
||||
self.log.critical("URGENT!: Check the HACS panel!")
|
||||
self.hass.components.persistent_notification.create(
|
||||
title="URGENT!", message="**Check the HACS panel!**"
|
||||
)
|
||||
|
||||
async def handle_critical_repositories(self):
|
||||
"""Handled critical repositories during runtime."""
|
||||
# Get critical repositories
|
||||
critical_queue = QueueManager()
|
||||
instored = []
|
||||
critical = []
|
||||
was_installed = False
|
||||
|
||||
try:
|
||||
critical = await self.data_repo.get_contents("critical")
|
||||
critical = json.loads(critical.content)
|
||||
except AIOGitHubAPIException:
|
||||
pass
|
||||
|
||||
if not critical:
|
||||
self.log.debug("No critical repositories")
|
||||
return
|
||||
|
||||
stored_critical = await async_load_from_store(self.hass, "critical")
|
||||
|
||||
for stored in stored_critical or []:
|
||||
instored.append(stored["repository"])
|
||||
|
||||
stored_critical = []
|
||||
|
||||
for repository in critical:
|
||||
removed_repo = get_removed(repository["repository"])
|
||||
removed_repo.removal_type = "critical"
|
||||
repo = self.get_by_name(repository["repository"])
|
||||
|
||||
stored = {
|
||||
"repository": repository["repository"],
|
||||
"reason": repository["reason"],
|
||||
"link": repository["link"],
|
||||
"acknowledged": True,
|
||||
}
|
||||
if repository["repository"] not in instored:
|
||||
if repo is not None and repo.installed:
|
||||
self.log.critical(
|
||||
"Removing repository %s, it is marked as critical",
|
||||
repository["repository"],
|
||||
)
|
||||
was_installed = True
|
||||
stored["acknowledged"] = False
|
||||
# Remove from HACS
|
||||
critical_queue.add(repository.uninstall())
|
||||
repo.remove()
|
||||
|
||||
stored_critical.append(stored)
|
||||
removed_repo.update_data(stored)
|
||||
|
||||
# Uninstall
|
||||
await critical_queue.execute()
|
||||
|
||||
# Save to FS
|
||||
await async_save_to_store(self.hass, "critical", stored_critical)
|
||||
|
||||
# Restart HASS
|
||||
if was_installed:
|
||||
self.log.critical("Resarting Home Assistant")
|
||||
self.hass.async_create_task(self.hass.async_stop(100))
|
||||
|
||||
async def prosess_queue(self, _notarealarg=None):
|
||||
"""Recurring tasks for installed repositories."""
|
||||
if not self.queue.has_pending_tasks:
|
||||
self.log.debug("Nothing in the queue")
|
||||
return
|
||||
if self.queue.running:
|
||||
self.log.debug("Queue is already running")
|
||||
return
|
||||
|
||||
can_update = await get_fetch_updates_for(self.github)
|
||||
if can_update == 0:
|
||||
self.log.info("HACS is ratelimited, repository updates will resume later.")
|
||||
else:
|
||||
self.status.background_task = True
|
||||
self.hass.bus.async_fire("hacs/status", {})
|
||||
try:
|
||||
await self.queue.execute(can_update)
|
||||
except QueueManagerExecutionStillInProgress:
|
||||
pass
|
||||
self.status.background_task = False
|
||||
self.hass.bus.async_fire("hacs/status", {})
|
||||
|
||||
async def recurring_tasks_installed(self, _notarealarg=None):
|
||||
"""Recurring tasks for installed repositories."""
|
||||
self.log.debug("Starting recurring background task for installed repositories")
|
||||
self.status.background_task = True
|
||||
self.hass.bus.async_fire("hacs/status", {})
|
||||
|
||||
for repository in self.repositories:
|
||||
if (
|
||||
repository.data.installed
|
||||
and repository.data.category in self.common.categories
|
||||
):
|
||||
self.queue.add(self.factory.safe_update(repository))
|
||||
|
||||
await self.handle_critical_repositories()
|
||||
self.status.background_task = False
|
||||
self.hass.bus.async_fire("hacs/status", {})
|
||||
await self.data.async_write()
|
||||
self.log.debug("Recurring background task for installed repositories done")
|
||||
|
||||
async def recurring_tasks_all(self, _notarealarg=None):
|
||||
"""Recurring tasks for all repositories."""
|
||||
self.log.debug("Starting recurring background task for all repositories")
|
||||
await async_setup_extra_stores()
|
||||
self.status.background_task = True
|
||||
self.hass.bus.async_fire("hacs/status", {})
|
||||
|
||||
for repository in self.repositories:
|
||||
if repository.data.category in self.common.categories:
|
||||
self.queue.add(self.factory.safe_common_update(repository))
|
||||
|
||||
await self.async_load_default_repositories()
|
||||
await self.clear_out_removed_repositories()
|
||||
self.status.background_task = False
|
||||
await self.data.async_write()
|
||||
self.hass.bus.async_fire("hacs/status", {})
|
||||
self.hass.bus.async_fire("hacs/repository", {"action": "reload"})
|
||||
self.log.debug("Recurring background task for all repositories done")
|
||||
|
||||
async def clear_out_removed_repositories(self):
|
||||
"""Clear out blaclisted repositories."""
|
||||
need_to_save = False
|
||||
for removed in list_removed_repositories():
|
||||
repository = self.get_by_name(removed.repository)
|
||||
if repository is not None:
|
||||
if repository.data.installed and removed.removal_type != "critical":
|
||||
self.log.warning(
|
||||
f"You have {repository.data.full_name} installed with HACS "
|
||||
+ "this repository has been removed, please consider removing it. "
|
||||
+ f"Removal reason ({removed.removal_type})"
|
||||
)
|
||||
else:
|
||||
need_to_save = True
|
||||
repository.remove()
|
||||
|
||||
if need_to_save:
|
||||
await self.data.async_write()
|
||||
|
||||
async def async_load_default_repositories(self):
|
||||
"""Load known repositories."""
|
||||
self.log.info("Loading known repositories")
|
||||
|
||||
for item in await async_get_list_from_default(HacsCategory.REMOVED):
|
||||
removed = get_removed(item["repository"])
|
||||
removed.reason = item.get("reason")
|
||||
removed.link = item.get("link")
|
||||
removed.removal_type = item.get("removal_type")
|
||||
|
||||
for category in self.common.categories or []:
|
||||
self.queue.add(self.async_get_category_repositories(HacsCategory(category)))
|
||||
|
||||
await self.prosess_queue()
|
||||
|
||||
async def async_get_category_repositories(self, category: HacsCategory):
|
||||
"""Get repositories from category."""
|
||||
repositories = await async_get_list_from_default(category)
|
||||
for repo in repositories:
|
||||
if is_removed(repo):
|
||||
continue
|
||||
repository = self.get_by_name(repo)
|
||||
if repository is not None:
|
||||
if str(repository.data.id) not in self.common.default:
|
||||
self.common.default.append(str(repository.data.id))
|
||||
else:
|
||||
continue
|
||||
continue
|
||||
self.queue.add(self.factory.safe_register(repo, category))
|
||||
|
||||
async def async_set_stage(self, stage: str) -> None:
|
||||
"""Set the stage of HACS."""
|
||||
self.stage = HacsStage(stage)
|
||||
self.log.info("Stage changed: %s", self.stage)
|
||||
self.hass.bus.async_fire("hacs/stage", {"stage": self.stage})
|
|
@ -0,0 +1,17 @@
|
|||
# pylint: disable=missing-class-docstring,missing-module-docstring,missing-function-docstring,no-member
|
||||
from custom_components.hacs.helpers.methods import (
|
||||
HacsHelperMethods,
|
||||
RepositoryHelperMethods,
|
||||
)
|
||||
from custom_components.hacs.helpers.properties import RepositoryHelperProperties
|
||||
|
||||
|
||||
class RepositoryHelpers(
|
||||
RepositoryHelperMethods,
|
||||
RepositoryHelperProperties,
|
||||
):
|
||||
"""Helper class for repositories"""
|
||||
|
||||
|
||||
class HacsHelpers(HacsHelperMethods):
|
||||
"""Helper class for HACS"""
|
|
@ -0,0 +1,13 @@
|
|||
"""Custom Exceptions."""
|
||||
|
||||
|
||||
class HacsException(Exception):
|
||||
"""Super basic."""
|
||||
|
||||
|
||||
class HacsRepositoryArchivedException(HacsException):
|
||||
"""For repositories that are archived."""
|
||||
|
||||
|
||||
class HacsExpectedException(HacsException):
|
||||
"""For stuff that are expected."""
|
|
@ -0,0 +1,43 @@
|
|||
"""
|
||||
Manifest handling of a repository.
|
||||
|
||||
https://hacs.xyz/docs/publish/start#hacsjson
|
||||
"""
|
||||
from typing import List
|
||||
|
||||
import attr
|
||||
|
||||
from custom_components.hacs.helpers.classes.exceptions import HacsException
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True)
|
||||
class HacsManifest:
|
||||
"""HacsManifest class."""
|
||||
|
||||
name: str = None
|
||||
content_in_root: bool = False
|
||||
zip_release: bool = False
|
||||
filename: str = None
|
||||
manifest: dict = {}
|
||||
hacs: str = None
|
||||
hide_default_branch: bool = False
|
||||
domains: List[str] = []
|
||||
country: List[str] = []
|
||||
homeassistant: str = None
|
||||
persistent_directory: str = None
|
||||
iot_class: str = None
|
||||
render_readme: bool = False
|
||||
|
||||
@staticmethod
|
||||
def from_dict(manifest: dict):
|
||||
"""Set attributes from dicts."""
|
||||
if manifest is None:
|
||||
raise HacsException("Missing manifest data")
|
||||
|
||||
manifest_data = HacsManifest()
|
||||
|
||||
manifest_data.manifest = manifest
|
||||
|
||||
for key in manifest:
|
||||
setattr(manifest_data, key, manifest[key])
|
||||
return manifest_data
|
|
@ -0,0 +1,21 @@
|
|||
"""Object for removed repositories."""
|
||||
import attr
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True)
|
||||
class RemovedRepository:
|
||||
repository: str = None
|
||||
reason: str = None
|
||||
link: str = None
|
||||
removal_type: str = None # archived, not_compliant, critical, dev, broken
|
||||
acknowledged: bool = False
|
||||
|
||||
def update_data(self, data: dict):
|
||||
"""Update data of the repository."""
|
||||
for key in data:
|
||||
if key in self.__dict__:
|
||||
setattr(self, key, data[key])
|
||||
|
||||
def to_json(self):
|
||||
"""Return a JSON representation of the data."""
|
||||
return attr.asdict(self)
|
|
@ -0,0 +1,435 @@
|
|||
"""Repository."""
|
||||
# pylint: disable=broad-except, no-member
|
||||
import json
|
||||
import os
|
||||
import tempfile
|
||||
import zipfile
|
||||
|
||||
from aiogithubapi import AIOGitHubAPIException
|
||||
from queueman import QueueManager
|
||||
|
||||
from custom_components.hacs.helpers import RepositoryHelpers
|
||||
from custom_components.hacs.helpers.classes.exceptions import HacsException
|
||||
from custom_components.hacs.helpers.classes.manifest import HacsManifest
|
||||
from custom_components.hacs.helpers.classes.repositorydata import RepositoryData
|
||||
from custom_components.hacs.helpers.classes.validate import Validate
|
||||
from custom_components.hacs.helpers.functions.download import async_download_file
|
||||
from custom_components.hacs.helpers.functions.information import (
|
||||
get_info_md_content,
|
||||
get_repository,
|
||||
)
|
||||
from custom_components.hacs.helpers.functions.is_safe_to_remove import is_safe_to_remove
|
||||
from custom_components.hacs.helpers.functions.logger import getLogger
|
||||
from custom_components.hacs.helpers.functions.misc import get_repository_name
|
||||
from custom_components.hacs.helpers.functions.save import async_save_file
|
||||
from custom_components.hacs.helpers.functions.store import async_remove_store
|
||||
from custom_components.hacs.helpers.functions.validate_repository import (
|
||||
common_update_data,
|
||||
common_validate,
|
||||
)
|
||||
from custom_components.hacs.helpers.functions.version_to_install import (
|
||||
version_to_install,
|
||||
)
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
|
||||
class RepositoryVersions:
|
||||
"""Versions."""
|
||||
|
||||
available = None
|
||||
available_commit = None
|
||||
installed = None
|
||||
installed_commit = None
|
||||
|
||||
|
||||
class RepositoryStatus:
|
||||
"""Repository status."""
|
||||
|
||||
hide = False
|
||||
installed = False
|
||||
last_updated = None
|
||||
new = True
|
||||
selected_tag = None
|
||||
show_beta = False
|
||||
track = True
|
||||
updated_info = False
|
||||
first_install = True
|
||||
|
||||
|
||||
class RepositoryInformation:
|
||||
"""RepositoryInformation."""
|
||||
|
||||
additional_info = None
|
||||
authors = []
|
||||
category = None
|
||||
default_branch = None
|
||||
description = ""
|
||||
state = None
|
||||
full_name = None
|
||||
full_name_lower = None
|
||||
file_name = None
|
||||
javascript_type = None
|
||||
homeassistant_version = None
|
||||
last_updated = None
|
||||
uid = None
|
||||
stars = 0
|
||||
info = None
|
||||
name = None
|
||||
topics = []
|
||||
|
||||
|
||||
class RepositoryReleases:
|
||||
"""RepositoyReleases."""
|
||||
|
||||
last_release = None
|
||||
last_release_object = None
|
||||
last_release_object_downloads = None
|
||||
published_tags = []
|
||||
objects = []
|
||||
releases = False
|
||||
downloads = None
|
||||
|
||||
|
||||
class RepositoryPath:
|
||||
"""RepositoryPath."""
|
||||
|
||||
local = None
|
||||
remote = None
|
||||
|
||||
|
||||
class RepositoryContent:
|
||||
"""RepositoryContent."""
|
||||
|
||||
path = None
|
||||
files = []
|
||||
objects = []
|
||||
single = False
|
||||
|
||||
|
||||
class HacsRepository(RepositoryHelpers):
|
||||
"""HacsRepository."""
|
||||
|
||||
def __init__(self):
|
||||
"""Set up HacsRepository."""
|
||||
self.hacs = get_hacs()
|
||||
self.data = RepositoryData()
|
||||
self.content = RepositoryContent()
|
||||
self.content.path = RepositoryPath()
|
||||
self.information = RepositoryInformation()
|
||||
self.repository_object = None
|
||||
self.status = RepositoryStatus()
|
||||
self.state = None
|
||||
self.force_branch = False
|
||||
self.integration_manifest = {}
|
||||
self.repository_manifest = HacsManifest.from_dict({})
|
||||
self.validate = Validate()
|
||||
self.releases = RepositoryReleases()
|
||||
self.versions = RepositoryVersions()
|
||||
self.pending_restart = False
|
||||
self.tree = []
|
||||
self.treefiles = []
|
||||
self.ref = None
|
||||
self.logger = getLogger()
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Return a string representation of the repository."""
|
||||
return f"<{self.data.category.title()} {self.data.full_name}>"
|
||||
|
||||
@property
|
||||
def display_name(self):
|
||||
"""Return display name."""
|
||||
return get_repository_name(self)
|
||||
|
||||
@property
|
||||
def display_status(self):
|
||||
"""Return display_status."""
|
||||
if self.data.new:
|
||||
status = "new"
|
||||
elif self.pending_restart:
|
||||
status = "pending-restart"
|
||||
elif self.pending_upgrade:
|
||||
status = "pending-upgrade"
|
||||
elif self.data.installed:
|
||||
status = "installed"
|
||||
else:
|
||||
status = "default"
|
||||
return status
|
||||
|
||||
@property
|
||||
def display_status_description(self):
|
||||
"""Return display_status_description."""
|
||||
description = {
|
||||
"default": "Not installed.",
|
||||
"pending-restart": "Restart pending.",
|
||||
"pending-upgrade": "Upgrade pending.",
|
||||
"installed": "No action required.",
|
||||
"new": "This is a newly added repository.",
|
||||
}
|
||||
return description[self.display_status]
|
||||
|
||||
@property
|
||||
def display_installed_version(self):
|
||||
"""Return display_authors"""
|
||||
if self.data.installed_version is not None:
|
||||
installed = self.data.installed_version
|
||||
else:
|
||||
if self.data.installed_commit is not None:
|
||||
installed = self.data.installed_commit
|
||||
else:
|
||||
installed = ""
|
||||
return installed
|
||||
|
||||
@property
|
||||
def display_available_version(self):
|
||||
"""Return display_authors"""
|
||||
if self.data.last_version is not None:
|
||||
available = self.data.last_version
|
||||
else:
|
||||
if self.data.last_commit is not None:
|
||||
available = self.data.last_commit
|
||||
else:
|
||||
available = ""
|
||||
return available
|
||||
|
||||
@property
|
||||
def display_version_or_commit(self):
|
||||
"""Does the repositoriy use releases or commits?"""
|
||||
if self.data.releases:
|
||||
version_or_commit = "version"
|
||||
else:
|
||||
version_or_commit = "commit"
|
||||
return version_or_commit
|
||||
|
||||
@property
|
||||
def main_action(self):
|
||||
"""Return the main action."""
|
||||
actions = {
|
||||
"new": "INSTALL",
|
||||
"default": "INSTALL",
|
||||
"installed": "REINSTALL",
|
||||
"pending-restart": "REINSTALL",
|
||||
"pending-upgrade": "UPGRADE",
|
||||
}
|
||||
return actions[self.display_status]
|
||||
|
||||
async def common_validate(self, ignore_issues=False):
|
||||
"""Common validation steps of the repository."""
|
||||
await common_validate(self, ignore_issues)
|
||||
|
||||
async def common_registration(self):
|
||||
"""Common registration steps of the repository."""
|
||||
# Attach repository
|
||||
if self.repository_object is None:
|
||||
self.repository_object = await get_repository(
|
||||
self.hacs.session, self.hacs.configuration.token, self.data.full_name
|
||||
)
|
||||
self.data.update_data(self.repository_object.attributes)
|
||||
|
||||
# Set topics
|
||||
self.data.topics = self.data.topics
|
||||
|
||||
# Set stargazers_count
|
||||
self.data.stargazers_count = self.data.stargazers_count
|
||||
|
||||
# Set description
|
||||
self.data.description = self.data.description
|
||||
|
||||
if self.hacs.system.action:
|
||||
if self.data.description is None or len(self.data.description) == 0:
|
||||
raise HacsException("::error:: Missing repository description")
|
||||
|
||||
async def common_update(self, ignore_issues=False):
|
||||
"""Common information update steps of the repository."""
|
||||
self.logger.debug("%s Getting repository information", self)
|
||||
|
||||
# Attach repository
|
||||
await common_update_data(self, ignore_issues)
|
||||
|
||||
# Update last updaeted
|
||||
self.data.last_updated = self.repository_object.attributes.get("pushed_at", 0)
|
||||
|
||||
# Update last available commit
|
||||
await self.repository_object.set_last_commit()
|
||||
self.data.last_commit = self.repository_object.last_commit
|
||||
|
||||
# Get the content of hacs.json
|
||||
await self.get_repository_manifest_content()
|
||||
|
||||
# Update "info.md"
|
||||
self.information.additional_info = await get_info_md_content(self)
|
||||
|
||||
async def download_zip_files(self, validate):
|
||||
"""Download ZIP archive from repository release."""
|
||||
download_queue = QueueManager()
|
||||
try:
|
||||
contents = False
|
||||
|
||||
for release in self.releases.objects:
|
||||
self.logger.info(
|
||||
"%s ref: %s --- tag: %s.", self, self.ref, release.tag_name
|
||||
)
|
||||
if release.tag_name == self.ref.split("/")[1]:
|
||||
contents = release.assets
|
||||
|
||||
if not contents:
|
||||
return validate
|
||||
|
||||
for content in contents or []:
|
||||
download_queue.add(self.async_download_zip_file(content, validate))
|
||||
|
||||
await download_queue.execute()
|
||||
except (Exception, BaseException):
|
||||
validate.errors.append("Download was not completed")
|
||||
|
||||
return validate
|
||||
|
||||
async def async_download_zip_file(self, content, validate):
|
||||
"""Download ZIP archive from repository release."""
|
||||
try:
|
||||
filecontent = await async_download_file(content.download_url)
|
||||
|
||||
if filecontent is None:
|
||||
validate.errors.append(f"[{content.name}] was not downloaded")
|
||||
return
|
||||
|
||||
result = await async_save_file(
|
||||
f"{tempfile.gettempdir()}/{self.data.filename}", filecontent
|
||||
)
|
||||
with zipfile.ZipFile(
|
||||
f"{tempfile.gettempdir()}/{self.data.filename}", "r"
|
||||
) as zip_file:
|
||||
zip_file.extractall(self.content.path.local)
|
||||
|
||||
if result:
|
||||
self.logger.info("%s Download of %s completed", self, content.name)
|
||||
return
|
||||
validate.errors.append(f"[{content.name}] was not downloaded")
|
||||
except (Exception, BaseException):
|
||||
validate.errors.append("Download was not completed")
|
||||
|
||||
return validate
|
||||
|
||||
async def download_content(self, validate, _directory_path, _local_directory, _ref):
|
||||
"""Download the content of a directory."""
|
||||
from custom_components.hacs.helpers.functions.download import download_content
|
||||
|
||||
validate = await download_content(self)
|
||||
return validate
|
||||
|
||||
async def get_repository_manifest_content(self):
|
||||
"""Get the content of the hacs.json file."""
|
||||
if not "hacs.json" in [x.filename for x in self.tree]:
|
||||
if self.hacs.system.action:
|
||||
raise HacsException(
|
||||
"::error:: No hacs.json file in the root of the repository."
|
||||
)
|
||||
return
|
||||
if self.hacs.system.action:
|
||||
self.logger.info("%s Found hacs.json", self)
|
||||
|
||||
self.ref = version_to_install(self)
|
||||
|
||||
try:
|
||||
manifest = await self.repository_object.get_contents("hacs.json", self.ref)
|
||||
self.repository_manifest = HacsManifest.from_dict(
|
||||
json.loads(manifest.content)
|
||||
)
|
||||
self.data.update_data(json.loads(manifest.content))
|
||||
except (AIOGitHubAPIException, Exception) as exception: # Gotta Catch 'Em All
|
||||
if self.hacs.system.action:
|
||||
raise HacsException(
|
||||
f"::error:: hacs.json file is not valid ({exception})."
|
||||
) from None
|
||||
if self.hacs.system.action:
|
||||
self.logger.info("%s hacs.json is valid", self)
|
||||
|
||||
def remove(self):
|
||||
"""Run remove tasks."""
|
||||
self.logger.info("%s Starting removal", self)
|
||||
|
||||
if self.data.id in self.hacs.common.installed:
|
||||
self.hacs.common.installed.remove(self.data.id)
|
||||
for repository in self.hacs.repositories:
|
||||
if repository.data.id == self.data.id:
|
||||
self.hacs.repositories.remove(repository)
|
||||
|
||||
async def uninstall(self):
|
||||
"""Run uninstall tasks."""
|
||||
self.logger.info("%s Uninstalling", self)
|
||||
if not await self.remove_local_directory():
|
||||
raise HacsException("Could not uninstall")
|
||||
self.data.installed = False
|
||||
if self.data.category == "integration":
|
||||
if self.data.config_flow:
|
||||
await self.reload_custom_components()
|
||||
else:
|
||||
self.pending_restart = True
|
||||
elif self.data.category == "theme":
|
||||
try:
|
||||
await self.hacs.hass.services.async_call(
|
||||
"frontend", "reload_themes", {}
|
||||
)
|
||||
except (Exception, BaseException): # pylint: disable=broad-except
|
||||
pass
|
||||
if self.data.full_name in self.hacs.common.installed:
|
||||
self.hacs.common.installed.remove(self.data.full_name)
|
||||
|
||||
await async_remove_store(self.hacs.hass, f"hacs/{self.data.id}.hacs")
|
||||
|
||||
self.data.installed_version = None
|
||||
self.data.installed_commit = None
|
||||
self.hacs.hass.bus.async_fire(
|
||||
"hacs/repository",
|
||||
{"id": 1337, "action": "uninstall", "repository": self.data.full_name},
|
||||
)
|
||||
|
||||
async def remove_local_directory(self):
|
||||
"""Check the local directory."""
|
||||
import shutil
|
||||
from asyncio import sleep
|
||||
|
||||
try:
|
||||
if self.data.category == "python_script":
|
||||
local_path = f"{self.content.path.local}/{self.data.name}.py"
|
||||
elif self.data.category == "theme":
|
||||
if os.path.exists(
|
||||
f"{self.hacs.core.config_path}/{self.hacs.configuration.theme_path}/{self.data.name}.yaml"
|
||||
):
|
||||
os.remove(
|
||||
f"{self.hacs.core.config_path}/{self.hacs.configuration.theme_path}/{self.data.name}.yaml"
|
||||
)
|
||||
local_path = self.content.path.local
|
||||
elif self.data.category == "integration":
|
||||
if not self.data.domain:
|
||||
self.logger.error("%s Missing domain", self)
|
||||
return False
|
||||
local_path = self.content.path.local
|
||||
else:
|
||||
local_path = self.content.path.local
|
||||
|
||||
if os.path.exists(local_path):
|
||||
if not is_safe_to_remove(local_path):
|
||||
self.logger.error(
|
||||
"%s Path %s is blocked from removal", self, local_path
|
||||
)
|
||||
return False
|
||||
self.logger.debug("%s Removing %s", self, local_path)
|
||||
|
||||
if self.data.category in ["python_script"]:
|
||||
os.remove(local_path)
|
||||
else:
|
||||
shutil.rmtree(local_path)
|
||||
|
||||
while os.path.exists(local_path):
|
||||
await sleep(1)
|
||||
else:
|
||||
self.logger.debug(
|
||||
"%s Presumed local content path %s does not exist", self, local_path
|
||||
)
|
||||
|
||||
except (Exception, BaseException) as exception:
|
||||
self.logger.debug(
|
||||
"%s Removing %s failed with %s", self, local_path, exception
|
||||
)
|
||||
return False
|
||||
return True
|
|
@ -0,0 +1,128 @@
|
|||
"""Repository data."""
|
||||
from datetime import datetime
|
||||
from typing import List
|
||||
|
||||
import attr
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True)
|
||||
class RepositoryData:
|
||||
"""RepositoryData class."""
|
||||
|
||||
archived: bool = False
|
||||
authors: List[str] = []
|
||||
category: str = ""
|
||||
content_in_root: bool = False
|
||||
country: List[str] = []
|
||||
config_flow: bool = False
|
||||
default_branch: str = None
|
||||
description: str = ""
|
||||
domain: str = ""
|
||||
domains: List[str] = []
|
||||
downloads: int = 0
|
||||
file_name: str = ""
|
||||
filename: str = ""
|
||||
first_install: bool = False
|
||||
fork: bool = False
|
||||
full_name: str = ""
|
||||
hacs: str = None # Minimum HACS version
|
||||
hide: bool = False
|
||||
hide_default_branch: bool = False
|
||||
homeassistant: str = None # Minimum Home Assistant version
|
||||
id: int = 0
|
||||
iot_class: str = None
|
||||
installed: bool = False
|
||||
installed_commit: str = None
|
||||
installed_version: str = None
|
||||
open_issues: int = 0
|
||||
last_commit: str = None
|
||||
last_version: str = None
|
||||
last_updated: str = 0
|
||||
manifest_name: str = None
|
||||
new: bool = True
|
||||
persistent_directory: str = None
|
||||
pushed_at: str = ""
|
||||
releases: bool = False
|
||||
render_readme: bool = False
|
||||
published_tags: List[str] = []
|
||||
selected_tag: str = None
|
||||
show_beta: bool = False
|
||||
stargazers_count: int = 0
|
||||
topics: List[str] = []
|
||||
zip_release: bool = False
|
||||
|
||||
@property
|
||||
def stars(self):
|
||||
"""Return the stargazers count."""
|
||||
return self.stargazers_count or 0
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name."""
|
||||
if self.category in ["integration", "netdaemon"]:
|
||||
return self.domain
|
||||
return self.full_name.split("/")[-1]
|
||||
|
||||
def to_json(self):
|
||||
"""Export to json."""
|
||||
return attr.asdict(self)
|
||||
|
||||
@staticmethod
|
||||
def create_from_dict(source: dict):
|
||||
"""Set attributes from dicts."""
|
||||
data = RepositoryData()
|
||||
for key in source:
|
||||
print(key)
|
||||
if key in data.__dict__:
|
||||
if key == "pushed_at":
|
||||
if source[key] == "":
|
||||
continue
|
||||
if "Z" in source[key]:
|
||||
setattr(
|
||||
data,
|
||||
key,
|
||||
datetime.strptime(source[key], "%Y-%m-%dT%H:%M:%SZ"),
|
||||
)
|
||||
else:
|
||||
setattr(
|
||||
data,
|
||||
key,
|
||||
datetime.strptime(source[key], "%Y-%m-%dT%H:%M:%S"),
|
||||
)
|
||||
elif key == "id":
|
||||
setattr(data, key, str(source[key]))
|
||||
elif key == "country":
|
||||
if isinstance(source[key], str):
|
||||
setattr(data, key, [source[key]])
|
||||
else:
|
||||
setattr(data, key, source[key])
|
||||
else:
|
||||
setattr(data, key, source[key])
|
||||
return data
|
||||
|
||||
def update_data(self, data: dict):
|
||||
"""Update data of the repository."""
|
||||
for key in data:
|
||||
if key in self.__dict__:
|
||||
if key == "pushed_at":
|
||||
if data[key] == "":
|
||||
continue
|
||||
if "Z" in data[key]:
|
||||
setattr(
|
||||
self,
|
||||
key,
|
||||
datetime.strptime(data[key], "%Y-%m-%dT%H:%M:%SZ"),
|
||||
)
|
||||
else:
|
||||
setattr(
|
||||
self, key, datetime.strptime(data[key], "%Y-%m-%dT%H:%M:%S")
|
||||
)
|
||||
elif key == "id":
|
||||
setattr(self, key, str(data[key]))
|
||||
elif key == "country":
|
||||
if isinstance(data[key], str):
|
||||
setattr(self, key, [data[key]])
|
||||
else:
|
||||
setattr(self, key, data[key])
|
||||
else:
|
||||
setattr(self, key, data[key])
|
|
@ -0,0 +1,11 @@
|
|||
class Validate:
|
||||
"""Validate."""
|
||||
|
||||
errors = []
|
||||
|
||||
@property
|
||||
def success(self):
|
||||
"""Return bool if the validation was a success."""
|
||||
if self.errors:
|
||||
return False
|
||||
return True
|
|
@ -0,0 +1,74 @@
|
|||
"""HACS Configuration Schemas."""
|
||||
# pylint: disable=dangerous-default-value
|
||||
import voluptuous as vol
|
||||
|
||||
from custom_components.hacs.const import LOCALE
|
||||
|
||||
# Configuration:
|
||||
TOKEN = "token"
|
||||
SIDEPANEL_TITLE = "sidepanel_title"
|
||||
SIDEPANEL_ICON = "sidepanel_icon"
|
||||
FRONTEND_REPO = "frontend_repo"
|
||||
FRONTEND_REPO_URL = "frontend_repo_url"
|
||||
APPDAEMON = "appdaemon"
|
||||
NETDAEMON = "netdaemon"
|
||||
|
||||
# Options:
|
||||
COUNTRY = "country"
|
||||
DEBUG = "debug"
|
||||
RELEASE_LIMIT = "release_limit"
|
||||
EXPERIMENTAL = "experimental"
|
||||
|
||||
# Config group
|
||||
PATH_OR_URL = "frontend_repo_path_or_url"
|
||||
|
||||
|
||||
def hacs_base_config_schema(config: dict = {}) -> dict:
|
||||
"""Return a shcema configuration dict for HACS."""
|
||||
if not config:
|
||||
config = {
|
||||
TOKEN: "xxxxxxxxxxxxxxxxxxxxxxxxxxx",
|
||||
}
|
||||
return {
|
||||
vol.Required(TOKEN, default=config.get(TOKEN)): str,
|
||||
}
|
||||
|
||||
|
||||
def hacs_config_option_schema(options: dict = {}) -> dict:
|
||||
"""Return a shcema for HACS configuration options."""
|
||||
if not options:
|
||||
options = {
|
||||
APPDAEMON: False,
|
||||
COUNTRY: "ALL",
|
||||
DEBUG: False,
|
||||
EXPERIMENTAL: False,
|
||||
NETDAEMON: False,
|
||||
RELEASE_LIMIT: 5,
|
||||
SIDEPANEL_ICON: "hacs:hacs",
|
||||
SIDEPANEL_TITLE: "HACS",
|
||||
FRONTEND_REPO: "",
|
||||
FRONTEND_REPO_URL: "",
|
||||
}
|
||||
return {
|
||||
vol.Optional(SIDEPANEL_TITLE, default=options.get(SIDEPANEL_TITLE)): str,
|
||||
vol.Optional(SIDEPANEL_ICON, default=options.get(SIDEPANEL_ICON)): str,
|
||||
vol.Optional(RELEASE_LIMIT, default=options.get(RELEASE_LIMIT)): int,
|
||||
vol.Optional(COUNTRY, default=options.get(COUNTRY)): vol.In(LOCALE),
|
||||
vol.Optional(APPDAEMON, default=options.get(APPDAEMON)): bool,
|
||||
vol.Optional(NETDAEMON, default=options.get(NETDAEMON)): bool,
|
||||
vol.Optional(DEBUG, default=options.get(DEBUG)): bool,
|
||||
vol.Optional(EXPERIMENTAL, default=options.get(EXPERIMENTAL)): bool,
|
||||
vol.Exclusive(FRONTEND_REPO, PATH_OR_URL): str,
|
||||
vol.Exclusive(FRONTEND_REPO_URL, PATH_OR_URL): str,
|
||||
}
|
||||
|
||||
|
||||
def hacs_config_combined() -> dict:
|
||||
"""Combine the configuration options."""
|
||||
base = hacs_base_config_schema()
|
||||
options = hacs_config_option_schema()
|
||||
|
||||
for option in options:
|
||||
base[option] = options[option]
|
||||
|
||||
return base
|
|
@ -0,0 +1,43 @@
|
|||
"""HACS Startup constrains."""
|
||||
# pylint: disable=bad-continuation
|
||||
import os
|
||||
|
||||
from custom_components.hacs.const import (
|
||||
CUSTOM_UPDATER_LOCATIONS,
|
||||
CUSTOM_UPDATER_WARNING,
|
||||
MINIMUM_HA_VERSION,
|
||||
)
|
||||
from custom_components.hacs.helpers.functions.misc import version_left_higher_then_right
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
|
||||
def check_constrains():
|
||||
"""Check HACS constrains."""
|
||||
if not constrain_custom_updater():
|
||||
return False
|
||||
if not constrain_version():
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def constrain_custom_updater():
|
||||
"""Check if custom_updater exist."""
|
||||
hacs = get_hacs()
|
||||
for location in CUSTOM_UPDATER_LOCATIONS:
|
||||
if os.path.exists(location.format(hacs.core.config_path)):
|
||||
msg = CUSTOM_UPDATER_WARNING.format(location.format(hacs.core.config_path))
|
||||
hacs.log.critical(msg)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def constrain_version():
|
||||
"""Check if the version is valid."""
|
||||
hacs = get_hacs()
|
||||
if not version_left_higher_then_right(hacs.system.ha_version, MINIMUM_HA_VERSION):
|
||||
hacs.log.critical(
|
||||
"You need HA version %s or newer to use this integration.",
|
||||
MINIMUM_HA_VERSION,
|
||||
)
|
||||
return False
|
||||
return True
|
|
@ -0,0 +1,246 @@
|
|||
"""Helpers to download repository content."""
|
||||
import os
|
||||
import pathlib
|
||||
import tempfile
|
||||
import zipfile
|
||||
|
||||
import async_timeout
|
||||
import backoff
|
||||
from queueman import QueueManager, concurrent
|
||||
|
||||
from custom_components.hacs.helpers.classes.exceptions import HacsException
|
||||
from custom_components.hacs.helpers.functions.filters import (
|
||||
filter_content_return_one_of_type,
|
||||
)
|
||||
from custom_components.hacs.helpers.functions.logger import getLogger
|
||||
from custom_components.hacs.helpers.functions.save import async_save_file
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
_LOGGER = getLogger()
|
||||
|
||||
|
||||
class FileInformation:
|
||||
def __init__(self, url, path, name):
|
||||
self.download_url = url
|
||||
self.path = path
|
||||
self.name = name
|
||||
|
||||
|
||||
@backoff.on_exception(backoff.expo, Exception, max_tries=5)
|
||||
async def async_download_file(url):
|
||||
"""Download files, and return the content."""
|
||||
hacs = get_hacs()
|
||||
if url is None:
|
||||
return
|
||||
|
||||
if "tags/" in url:
|
||||
url = url.replace("tags/", "")
|
||||
|
||||
_LOGGER.debug("Downloading %s", url)
|
||||
|
||||
result = None
|
||||
|
||||
with async_timeout.timeout(60, loop=hacs.hass.loop):
|
||||
request = await hacs.session.get(url)
|
||||
|
||||
# Make sure that we got a valid result
|
||||
if request.status == 200:
|
||||
result = await request.read()
|
||||
else:
|
||||
raise HacsException(
|
||||
"Got status code {} when trying to download {}".format(
|
||||
request.status, url
|
||||
)
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def should_try_releases(repository):
|
||||
"""Return a boolean indicating whether to download releases or not."""
|
||||
if repository.data.zip_release:
|
||||
if repository.data.filename.endswith(".zip"):
|
||||
if repository.ref != repository.data.default_branch:
|
||||
return True
|
||||
if repository.ref == repository.data.default_branch:
|
||||
return False
|
||||
if repository.data.category not in ["plugin", "theme"]:
|
||||
return False
|
||||
if not repository.data.releases:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def gather_files_to_download(repository):
|
||||
"""Return a list of file objects to be downloaded."""
|
||||
files = []
|
||||
tree = repository.tree
|
||||
ref = f"{repository.ref}".replace("tags/", "")
|
||||
releaseobjects = repository.releases.objects
|
||||
category = repository.data.category
|
||||
remotelocation = repository.content.path.remote
|
||||
|
||||
if should_try_releases(repository):
|
||||
for release in releaseobjects or []:
|
||||
if ref == release.tag_name:
|
||||
for asset in release.assets or []:
|
||||
files.append(asset)
|
||||
if files:
|
||||
return files
|
||||
|
||||
if repository.content.single:
|
||||
for treefile in tree:
|
||||
if treefile.filename == repository.data.file_name:
|
||||
files.append(
|
||||
FileInformation(
|
||||
treefile.download_url, treefile.full_path, treefile.filename
|
||||
)
|
||||
)
|
||||
return files
|
||||
|
||||
if category == "plugin":
|
||||
for treefile in tree:
|
||||
if treefile.path in ["", "dist"]:
|
||||
if remotelocation == "dist" and not treefile.filename.startswith(
|
||||
"dist"
|
||||
):
|
||||
continue
|
||||
if not remotelocation:
|
||||
if not treefile.filename.endswith(".js"):
|
||||
continue
|
||||
if treefile.path != "":
|
||||
continue
|
||||
if not treefile.is_directory:
|
||||
files.append(
|
||||
FileInformation(
|
||||
treefile.download_url, treefile.full_path, treefile.filename
|
||||
)
|
||||
)
|
||||
if files:
|
||||
return files
|
||||
|
||||
if repository.data.content_in_root:
|
||||
if not repository.data.filename:
|
||||
if category == "theme":
|
||||
tree = filter_content_return_one_of_type(
|
||||
repository.tree, "", "yaml", "full_path"
|
||||
)
|
||||
|
||||
for path in tree:
|
||||
if path.is_directory:
|
||||
continue
|
||||
if path.full_path.startswith(repository.content.path.remote):
|
||||
files.append(
|
||||
FileInformation(path.download_url, path.full_path, path.filename)
|
||||
)
|
||||
return files
|
||||
|
||||
|
||||
async def download_zip_files(repository, validate):
|
||||
"""Download ZIP archive from repository release."""
|
||||
contents = []
|
||||
queue = QueueManager()
|
||||
try:
|
||||
for release in repository.releases.objects:
|
||||
repository.logger.info(
|
||||
f"ref: {repository.ref} --- tag: {release.tag_name}"
|
||||
)
|
||||
if release.tag_name == repository.ref.split("/")[1]:
|
||||
contents = release.assets
|
||||
|
||||
if not contents:
|
||||
return validate
|
||||
|
||||
for content in contents or []:
|
||||
queue.add(async_download_zip_file(repository, content, validate))
|
||||
|
||||
await queue.execute()
|
||||
except (Exception, BaseException) as exception: # pylint: disable=broad-except
|
||||
validate.errors.append(f"Download was not completed [{exception}]")
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
async def async_download_zip_file(repository, content, validate):
|
||||
"""Download ZIP archive from repository release."""
|
||||
try:
|
||||
filecontent = await async_download_file(content.download_url)
|
||||
|
||||
if filecontent is None:
|
||||
validate.errors.append(f"[{content.name}] was not downloaded.")
|
||||
return
|
||||
|
||||
result = await async_save_file(
|
||||
f"{tempfile.gettempdir()}/{repository.data.filename}", filecontent
|
||||
)
|
||||
with zipfile.ZipFile(
|
||||
f"{tempfile.gettempdir()}/{repository.data.filename}", "r"
|
||||
) as zip_file:
|
||||
zip_file.extractall(repository.content.path.local)
|
||||
|
||||
os.remove(f"{tempfile.gettempdir()}/{repository.data.filename}")
|
||||
|
||||
if result:
|
||||
repository.logger.info(f"Download of {content.name} completed")
|
||||
return
|
||||
validate.errors.append(f"[{content.name}] was not downloaded.")
|
||||
except (Exception, BaseException) as exception: # pylint: disable=broad-except
|
||||
validate.errors.append(f"Download was not completed [{exception}]")
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
async def download_content(repository):
|
||||
"""Download the content of a directory."""
|
||||
queue = QueueManager()
|
||||
contents = gather_files_to_download(repository)
|
||||
repository.logger.debug(repository.data.filename)
|
||||
if not contents:
|
||||
raise HacsException("No content to download")
|
||||
|
||||
for content in contents:
|
||||
if repository.data.content_in_root and repository.data.filename:
|
||||
if content.name != repository.data.filename:
|
||||
continue
|
||||
queue.add(dowload_repository_content(repository, content))
|
||||
await queue.execute()
|
||||
return repository.validate
|
||||
|
||||
|
||||
@concurrent(10)
|
||||
async def dowload_repository_content(repository, content):
|
||||
"""Download content."""
|
||||
repository.logger.debug(f"Downloading {content.name}")
|
||||
|
||||
filecontent = await async_download_file(content.download_url)
|
||||
|
||||
if filecontent is None:
|
||||
repository.validate.errors.append(f"[{content.name}] was not downloaded.")
|
||||
return
|
||||
|
||||
# Save the content of the file.
|
||||
if repository.content.single or content.path is None:
|
||||
local_directory = repository.content.path.local
|
||||
|
||||
else:
|
||||
_content_path = content.path
|
||||
if not repository.data.content_in_root:
|
||||
_content_path = _content_path.replace(
|
||||
f"{repository.content.path.remote}", ""
|
||||
)
|
||||
|
||||
local_directory = f"{repository.content.path.local}/{_content_path}"
|
||||
local_directory = local_directory.split("/")
|
||||
del local_directory[-1]
|
||||
local_directory = "/".join(local_directory)
|
||||
|
||||
# Check local directory
|
||||
pathlib.Path(local_directory).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
local_file_path = (f"{local_directory}/{content.name}").replace("//", "/")
|
||||
|
||||
result = await async_save_file(local_file_path, filecontent)
|
||||
if result:
|
||||
repository.logger.info(f"Download of {content.name} completed")
|
||||
return
|
||||
repository.validate.errors.append(f"[{content.name}] was not downloaded.")
|
|
@ -0,0 +1,55 @@
|
|||
"""Filter functions."""
|
||||
|
||||
|
||||
def filter_content_return_one_of_type(
|
||||
content, namestartswith, filterfiltype, attr="name"
|
||||
):
|
||||
"""Only match 1 of the filter."""
|
||||
contents = []
|
||||
filetypefound = False
|
||||
for filename in content:
|
||||
if isinstance(filename, str):
|
||||
if filename.startswith(namestartswith):
|
||||
if filename.endswith(f".{filterfiltype}"):
|
||||
if not filetypefound:
|
||||
contents.append(filename)
|
||||
filetypefound = True
|
||||
continue
|
||||
else:
|
||||
contents.append(filename)
|
||||
else:
|
||||
if getattr(filename, attr).startswith(namestartswith):
|
||||
if getattr(filename, attr).endswith(f".{filterfiltype}"):
|
||||
if not filetypefound:
|
||||
contents.append(filename)
|
||||
filetypefound = True
|
||||
continue
|
||||
else:
|
||||
contents.append(filename)
|
||||
return contents
|
||||
|
||||
|
||||
def find_first_of_filetype(content, filterfiltype, attr="name"):
|
||||
"""Find the first of the file type."""
|
||||
filename = ""
|
||||
for _filename in content:
|
||||
if isinstance(_filename, str):
|
||||
if _filename.endswith(f".{filterfiltype}"):
|
||||
filename = _filename
|
||||
break
|
||||
else:
|
||||
if getattr(_filename, attr).endswith(f".{filterfiltype}"):
|
||||
filename = getattr(_filename, attr)
|
||||
break
|
||||
return filename
|
||||
|
||||
|
||||
def get_first_directory_in_directory(content, dirname):
|
||||
"""Return the first directory in dirname or None."""
|
||||
directory = None
|
||||
for path in content:
|
||||
if path.full_path.startswith(dirname) and path.full_path != dirname:
|
||||
if path.is_directory:
|
||||
directory = path.filename
|
||||
break
|
||||
return directory
|
|
@ -0,0 +1,35 @@
|
|||
"""Helper to get default repositories."""
|
||||
import json
|
||||
from typing import List
|
||||
|
||||
from aiogithubapi import AIOGitHubAPIException
|
||||
|
||||
from custom_components.hacs.enums import HacsCategory
|
||||
from custom_components.hacs.helpers.classes.exceptions import HacsException
|
||||
from custom_components.hacs.helpers.functions.information import get_repository
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
|
||||
async def async_get_list_from_default(default: HacsCategory) -> List:
|
||||
"""Get repositories from default list."""
|
||||
hacs = get_hacs()
|
||||
repositories = []
|
||||
|
||||
try:
|
||||
repo = await get_repository(
|
||||
hacs.session,
|
||||
hacs.configuration.token,
|
||||
"hacs/default",
|
||||
)
|
||||
content = await repo.get_contents(default, repo.default_branch)
|
||||
repositories = json.loads(content.content)
|
||||
|
||||
except (AIOGitHubAPIException, HacsException) as exception:
|
||||
hacs.log.error(exception)
|
||||
|
||||
except (Exception, BaseException) as exception:
|
||||
hacs.log.error(exception)
|
||||
|
||||
hacs.log.debug("Got %s elements for %s", len(repositories), default)
|
||||
|
||||
return repositories
|
|
@ -0,0 +1,225 @@
|
|||
"""Return repository information if any."""
|
||||
import json
|
||||
|
||||
from aiogithubapi import AIOGitHubAPIException, GitHub
|
||||
|
||||
from custom_components.hacs.helpers.classes.exceptions import HacsException
|
||||
from custom_components.hacs.helpers.functions.template import render_template
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
|
||||
def info_file(repository):
|
||||
"""get info filename."""
|
||||
if repository.data.render_readme:
|
||||
for filename in ["readme", "readme.md", "README", "README.md", "README.MD"]:
|
||||
if filename in repository.treefiles:
|
||||
return filename
|
||||
return ""
|
||||
for filename in ["info", "info.md", "INFO", "INFO.md", "INFO.MD"]:
|
||||
if filename in repository.treefiles:
|
||||
return filename
|
||||
return ""
|
||||
|
||||
|
||||
async def get_info_md_content(repository):
|
||||
"""Get the content of info.md"""
|
||||
filename = info_file(repository)
|
||||
if not filename:
|
||||
return ""
|
||||
try:
|
||||
info = await repository.repository_object.get_contents(filename, repository.ref)
|
||||
if info is None:
|
||||
return ""
|
||||
info = info.content.replace("<svg", "<disabled").replace("</svg", "</disabled")
|
||||
return render_template(info, repository)
|
||||
except (
|
||||
ValueError,
|
||||
AIOGitHubAPIException,
|
||||
Exception, # pylint: disable=broad-except
|
||||
):
|
||||
if repository.hacs.system.action:
|
||||
raise HacsException("::error:: No info file found")
|
||||
return ""
|
||||
|
||||
|
||||
async def get_repository(session, token, repository_full_name):
|
||||
"""Return a repository object or None."""
|
||||
try:
|
||||
github = GitHub(token, session)
|
||||
repository = await github.get_repo(repository_full_name)
|
||||
return repository
|
||||
except (ValueError, AIOGitHubAPIException, Exception) as exception:
|
||||
raise HacsException(exception)
|
||||
|
||||
|
||||
async def get_tree(repository, ref):
|
||||
"""Return the repository tree."""
|
||||
try:
|
||||
tree = await repository.get_tree(ref)
|
||||
return tree
|
||||
except (ValueError, AIOGitHubAPIException) as exception:
|
||||
raise HacsException(exception)
|
||||
|
||||
|
||||
async def get_releases(repository, prerelease=False, returnlimit=5):
|
||||
"""Return the repository releases."""
|
||||
try:
|
||||
releases = await repository.get_releases(prerelease, returnlimit)
|
||||
return releases
|
||||
except (ValueError, AIOGitHubAPIException) as exception:
|
||||
raise HacsException(exception)
|
||||
|
||||
|
||||
def get_frontend_version():
|
||||
"""get the frontend version from the manifest."""
|
||||
manifest = read_hacs_manifest()
|
||||
frontend = 0
|
||||
for requirement in manifest.get("requirements", []):
|
||||
if requirement.startswith("hacs_frontend"):
|
||||
frontend = requirement.split("==")[1]
|
||||
break
|
||||
return frontend
|
||||
|
||||
|
||||
def read_hacs_manifest():
|
||||
"""Reads the HACS manifest file and returns the contents."""
|
||||
hacs = get_hacs()
|
||||
content = {}
|
||||
with open(
|
||||
f"{hacs.core.config_path}/custom_components/hacs/manifest.json"
|
||||
) as manifest:
|
||||
content = json.loads(manifest.read())
|
||||
return content
|
||||
|
||||
|
||||
async def get_integration_manifest(repository):
|
||||
"""Return the integration manifest."""
|
||||
if repository.data.content_in_root:
|
||||
manifest_path = "manifest.json"
|
||||
else:
|
||||
manifest_path = f"{repository.content.path.remote}/manifest.json"
|
||||
if not manifest_path in [x.full_path for x in repository.tree]:
|
||||
raise HacsException(f"No file found '{manifest_path}'")
|
||||
try:
|
||||
manifest = await repository.repository_object.get_contents(
|
||||
manifest_path, repository.ref
|
||||
)
|
||||
manifest = json.loads(manifest.content)
|
||||
except (Exception, BaseException) as exception: # pylint: disable=broad-except
|
||||
raise HacsException(f"Could not read manifest.json [{exception}]")
|
||||
|
||||
try:
|
||||
repository.integration_manifest = manifest
|
||||
repository.data.authors = manifest["codeowners"]
|
||||
repository.data.domain = manifest["domain"]
|
||||
repository.data.manifest_name = manifest["name"]
|
||||
repository.data.config_flow = manifest.get("config_flow", False)
|
||||
|
||||
if repository.hacs.system.action:
|
||||
if manifest.get("documentation") is None:
|
||||
raise HacsException("::error:: manifest.json is missing documentation")
|
||||
if manifest.get("homeassistant") is not None:
|
||||
raise HacsException(
|
||||
"::error:: The homeassistant key in manifest.json is no longer valid"
|
||||
)
|
||||
# if manifest.get("issue_tracker") is None:
|
||||
# raise HacsException("The 'issue_tracker' is missing in manifest.json")
|
||||
|
||||
# Set local path
|
||||
repository.content.path.local = repository.localpath
|
||||
|
||||
except KeyError as exception:
|
||||
raise HacsException(f"Missing expected key {exception} in '{manifest_path}'")
|
||||
|
||||
|
||||
def find_file_name(repository):
|
||||
"""Get the filename to target."""
|
||||
if repository.data.category == "plugin":
|
||||
get_file_name_plugin(repository)
|
||||
elif repository.data.category == "integration":
|
||||
get_file_name_integration(repository)
|
||||
elif repository.data.category == "theme":
|
||||
get_file_name_theme(repository)
|
||||
elif repository.data.category == "appdaemon":
|
||||
get_file_name_appdaemon(repository)
|
||||
elif repository.data.category == "python_script":
|
||||
get_file_name_python_script(repository)
|
||||
|
||||
if repository.hacs.system.action:
|
||||
repository.logger.info(f"filename {repository.data.file_name}")
|
||||
repository.logger.info(f"location {repository.content.path.remote}")
|
||||
|
||||
|
||||
def get_file_name_plugin(repository):
|
||||
"""Get the filename to target."""
|
||||
tree = repository.tree
|
||||
releases = repository.releases.objects
|
||||
|
||||
if repository.data.content_in_root:
|
||||
possible_locations = [""]
|
||||
else:
|
||||
possible_locations = ["release", "dist", ""]
|
||||
|
||||
# Handler for plug requirement 3
|
||||
if repository.data.filename:
|
||||
valid_filenames = [repository.data.filename]
|
||||
else:
|
||||
valid_filenames = [
|
||||
f"{repository.data.name.replace('lovelace-', '')}.js",
|
||||
f"{repository.data.name}.js",
|
||||
f"{repository.data.name}.umd.js",
|
||||
f"{repository.data.name}-bundle.js",
|
||||
]
|
||||
|
||||
for location in possible_locations:
|
||||
if location == "release":
|
||||
if not releases:
|
||||
continue
|
||||
release = releases[0]
|
||||
if not release.assets:
|
||||
continue
|
||||
asset = release.assets[0]
|
||||
for filename in valid_filenames:
|
||||
if filename == asset.name:
|
||||
repository.data.file_name = filename
|
||||
repository.content.path.remote = "release"
|
||||
break
|
||||
|
||||
else:
|
||||
for filename in valid_filenames:
|
||||
if f"{location+'/' if location else ''}{filename}" in [
|
||||
x.full_path for x in tree
|
||||
]:
|
||||
repository.data.file_name = filename.split("/")[-1]
|
||||
repository.content.path.remote = location
|
||||
break
|
||||
|
||||
|
||||
def get_file_name_integration(repository):
|
||||
"""Get the filename to target."""
|
||||
|
||||
|
||||
def get_file_name_theme(repository):
|
||||
"""Get the filename to target."""
|
||||
tree = repository.tree
|
||||
|
||||
for treefile in tree:
|
||||
if treefile.full_path.startswith(
|
||||
repository.content.path.remote
|
||||
) and treefile.full_path.endswith(".yaml"):
|
||||
repository.data.file_name = treefile.filename
|
||||
|
||||
|
||||
def get_file_name_appdaemon(repository):
|
||||
"""Get the filename to target."""
|
||||
|
||||
|
||||
def get_file_name_python_script(repository):
|
||||
"""Get the filename to target."""
|
||||
tree = repository.tree
|
||||
|
||||
for treefile in tree:
|
||||
if treefile.full_path.startswith(
|
||||
repository.content.path.remote
|
||||
) and treefile.full_path.endswith(".py"):
|
||||
repository.data.file_name = treefile.filename
|
|
@ -0,0 +1,20 @@
|
|||
"""Helper to check if path is safe to remove."""
|
||||
from pathlib import Path
|
||||
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
|
||||
def is_safe_to_remove(path: str) -> bool:
|
||||
"""Helper to check if path is safe to remove."""
|
||||
hacs = get_hacs()
|
||||
paths = [
|
||||
Path(f"{hacs.core.config_path}/{hacs.configuration.appdaemon_path}"),
|
||||
Path(f"{hacs.core.config_path}/{hacs.configuration.netdaemon_path}"),
|
||||
Path(f"{hacs.core.config_path}/{hacs.configuration.plugin_path}"),
|
||||
Path(f"{hacs.core.config_path}/{hacs.configuration.python_script_path}"),
|
||||
Path(f"{hacs.core.config_path}/{hacs.configuration.theme_path}"),
|
||||
Path(f"{hacs.core.config_path}/custom_components/"),
|
||||
]
|
||||
if Path(path) in paths:
|
||||
return False
|
||||
return True
|
|
@ -0,0 +1,19 @@
|
|||
"""Custom logger for HACS."""
|
||||
# pylint: disable=invalid-name
|
||||
import logging
|
||||
import os
|
||||
|
||||
from ...const import PACKAGE_NAME
|
||||
|
||||
_HACSLogger: logging.Logger = logging.getLogger(PACKAGE_NAME)
|
||||
|
||||
if "GITHUB_ACTION" in os.environ:
|
||||
logging.basicConfig(
|
||||
format="::%(levelname)s:: %(message)s",
|
||||
level="DEBUG",
|
||||
)
|
||||
|
||||
|
||||
def getLogger(_name: str = None) -> logging.Logger:
|
||||
"""Return a Logger instance."""
|
||||
return _HACSLogger
|
|
@ -0,0 +1,42 @@
|
|||
"""Helper functions: misc"""
|
||||
import re
|
||||
from functools import lru_cache
|
||||
|
||||
from awesomeversion import AwesomeVersion
|
||||
|
||||
RE_REPOSITORY = re.compile(
|
||||
r"(?:(?:.*github.com.)|^)([A-Za-z0-9-]+\/[\w.-]+?)(?:(?:\.git)?|(?:[^\w.-].*)?)$"
|
||||
)
|
||||
|
||||
|
||||
def get_repository_name(repository) -> str:
|
||||
"""Return the name of the repository for use in the frontend."""
|
||||
|
||||
if repository.repository_manifest.name is not None:
|
||||
return repository.repository_manifest.name
|
||||
|
||||
if repository.data.category == "integration":
|
||||
if repository.integration_manifest:
|
||||
if "name" in repository.integration_manifest:
|
||||
return repository.integration_manifest["name"]
|
||||
|
||||
return (
|
||||
repository.data.full_name.split("/")[-1]
|
||||
.replace("-", " ")
|
||||
.replace("_", " ")
|
||||
.title()
|
||||
)
|
||||
|
||||
|
||||
@lru_cache(maxsize=1024)
|
||||
def version_left_higher_then_right(left: str, right: str) -> bool:
|
||||
"""Return a bool if source is newer than target, will also be true if identical."""
|
||||
return AwesomeVersion(left) >= AwesomeVersion(right)
|
||||
|
||||
|
||||
def extract_repository_from_url(url: str) -> str or None:
|
||||
"""Extract the owner/repo part form a URL."""
|
||||
match = re.match(RE_REPOSITORY, url)
|
||||
if not match:
|
||||
return None
|
||||
return match.group(1).lower()
|
|
@ -0,0 +1,13 @@
|
|||
# pylint: disable=missing-class-docstring,missing-module-docstring,missing-function-docstring,no-member
|
||||
import os
|
||||
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
|
||||
def path_exsist(path) -> bool:
|
||||
return os.path.exists(path)
|
||||
|
||||
|
||||
async def async_path_exsist(path) -> bool:
|
||||
hass = get_hacs().hass
|
||||
return await hass.async_add_executor_job(path_exsist, path)
|
|
@ -0,0 +1,70 @@
|
|||
"""Register a repository."""
|
||||
from aiogithubapi import AIOGitHubAPIException
|
||||
|
||||
from custom_components.hacs.helpers.classes.exceptions import (
|
||||
HacsException,
|
||||
HacsExpectedException,
|
||||
)
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
from ...repositories import RERPOSITORY_CLASSES
|
||||
|
||||
|
||||
# @concurrent(15, 5)
|
||||
async def register_repository(full_name, category, check=True, ref=None):
|
||||
"""Register a repository."""
|
||||
hacs = get_hacs()
|
||||
|
||||
if full_name in hacs.common.skip:
|
||||
if full_name != "hacs/integration":
|
||||
raise HacsExpectedException(f"Skipping {full_name}")
|
||||
|
||||
if category not in RERPOSITORY_CLASSES:
|
||||
raise HacsException(f"{category} is not a valid repository category.")
|
||||
|
||||
repository = RERPOSITORY_CLASSES[category](full_name)
|
||||
if check:
|
||||
try:
|
||||
await repository.async_registration(ref)
|
||||
if hacs.status.new:
|
||||
repository.data.new = False
|
||||
if repository.validate.errors:
|
||||
hacs.common.skip.append(repository.data.full_name)
|
||||
if not hacs.status.startup:
|
||||
hacs.log.error("Validation for %s failed.", full_name)
|
||||
if hacs.system.action:
|
||||
raise HacsException(f"::error:: Validation for {full_name} failed.")
|
||||
return repository.validate.errors
|
||||
if hacs.system.action:
|
||||
repository.logger.info("%s Validation completed", repository)
|
||||
else:
|
||||
repository.logger.info("%s Registration completed", repository)
|
||||
except AIOGitHubAPIException as exception:
|
||||
hacs.common.skip.append(repository.data.full_name)
|
||||
raise HacsException(
|
||||
f"Validation for {full_name} failed with {exception}."
|
||||
) from None
|
||||
|
||||
exists = (
|
||||
False
|
||||
if str(repository.data.id) == "0"
|
||||
else [x for x in hacs.repositories if str(x.data.id) == str(repository.data.id)]
|
||||
)
|
||||
|
||||
if exists:
|
||||
if exists[0] in hacs.repositories:
|
||||
hacs.repositories.remove(exists[0])
|
||||
|
||||
else:
|
||||
if hacs.hass is not None and (
|
||||
(check and repository.data.new) or hacs.status.new
|
||||
):
|
||||
hacs.hass.bus.async_fire(
|
||||
"hacs/repository",
|
||||
{
|
||||
"action": "registration",
|
||||
"repository": repository.data.full_name,
|
||||
"repository_id": repository.data.id,
|
||||
},
|
||||
)
|
||||
hacs.repositories.append(repository)
|
|
@ -0,0 +1,32 @@
|
|||
"""Helper to calculate the remaining calls to github."""
|
||||
import math
|
||||
|
||||
from custom_components.hacs.helpers.functions.logger import getLogger
|
||||
|
||||
_LOGGER = getLogger()
|
||||
|
||||
|
||||
async def remaining(github):
|
||||
"""Helper to calculate the remaining calls to github."""
|
||||
try:
|
||||
ratelimits = await github.get_rate_limit()
|
||||
except (BaseException, Exception) as exception: # pylint: disable=broad-except
|
||||
_LOGGER.error(exception)
|
||||
return None
|
||||
if ratelimits.get("remaining") is not None:
|
||||
return int(ratelimits["remaining"])
|
||||
return 0
|
||||
|
||||
|
||||
async def get_fetch_updates_for(github):
|
||||
"""Helper to calculate the number of repositories we can fetch data for."""
|
||||
margin = 1000
|
||||
limit = await remaining(github)
|
||||
pr_repo = 15
|
||||
|
||||
if limit is None:
|
||||
return None
|
||||
|
||||
if limit - margin <= pr_repo:
|
||||
return 0
|
||||
return math.floor((limit - margin) / pr_repo)
|
|
@ -0,0 +1,52 @@
|
|||
"""Download."""
|
||||
import gzip
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import aiofiles
|
||||
|
||||
from custom_components.hacs.helpers.functions.logger import getLogger
|
||||
|
||||
_LOGGER = getLogger()
|
||||
|
||||
|
||||
async def async_save_file(location, content):
|
||||
"""Save files."""
|
||||
_LOGGER.debug("Saving %s", location)
|
||||
mode = "w"
|
||||
encoding = "utf-8"
|
||||
errors = "ignore"
|
||||
|
||||
if not isinstance(content, str):
|
||||
mode = "wb"
|
||||
encoding = None
|
||||
errors = None
|
||||
|
||||
try:
|
||||
async with aiofiles.open(
|
||||
location, mode=mode, encoding=encoding, errors=errors
|
||||
) as outfile:
|
||||
await outfile.write(content)
|
||||
outfile.close()
|
||||
|
||||
# Create gz for .js files
|
||||
if os.path.isfile(location):
|
||||
if location.endswith(".js") or location.endswith(".css"):
|
||||
with open(location, "rb") as f_in:
|
||||
with gzip.open(location + ".gz", "wb") as f_out:
|
||||
shutil.copyfileobj(f_in, f_out)
|
||||
|
||||
# Remove with 2.0
|
||||
if "themes" in location and location.endswith(".yaml"):
|
||||
filename = location.split("/")[-1]
|
||||
base = location.split("/themes/")[0]
|
||||
combined = f"{base}/themes/{filename}"
|
||||
if os.path.exists(combined):
|
||||
_LOGGER.info("Removing old theme file %s", combined)
|
||||
os.remove(combined)
|
||||
|
||||
except (Exception, BaseException) as error: # pylint: disable=broad-except
|
||||
_LOGGER.error("Could not write data to %s - %s", location, error)
|
||||
return False
|
||||
|
||||
return os.path.exists(location)
|
|
@ -0,0 +1,34 @@
|
|||
"""Storage handers."""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from homeassistant.helpers.json import JSONEncoder
|
||||
|
||||
from custom_components.hacs.const import VERSION_STORAGE
|
||||
|
||||
|
||||
def get_store_for_key(hass, key):
|
||||
"""Create a Store object for the key."""
|
||||
key = key if "/" in key else f"hacs.{key}"
|
||||
from homeassistant.helpers.storage import Store
|
||||
|
||||
return Store(hass, VERSION_STORAGE, key, encoder=JSONEncoder)
|
||||
|
||||
|
||||
async def async_load_from_store(hass, key):
|
||||
"""Load the retained data from store and return de-serialized data."""
|
||||
store = get_store_for_key(hass, key)
|
||||
restored = await store.async_load()
|
||||
if restored is None:
|
||||
return {}
|
||||
return restored
|
||||
|
||||
|
||||
async def async_save_to_store(hass, key, data):
|
||||
"""Generate dynamic data to store and save it to the filesystem."""
|
||||
await get_store_for_key(hass, key).async_save(data)
|
||||
|
||||
|
||||
async def async_remove_store(hass, key):
|
||||
"""Remove a store element that should no longer be used"""
|
||||
if "/" not in key:
|
||||
return
|
||||
await get_store_for_key(hass, key).async_remove()
|
|
@ -0,0 +1,32 @@
|
|||
"""Custom template support."""
|
||||
# pylint: disable=broad-except
|
||||
from jinja2 import Template
|
||||
|
||||
from custom_components.hacs.helpers.functions.logger import getLogger
|
||||
|
||||
_LOGGER = getLogger()
|
||||
|
||||
|
||||
def render_template(content, context):
|
||||
"""Render templates in content."""
|
||||
# Fix None issues
|
||||
if context.releases.last_release_object is not None:
|
||||
prerelease = context.releases.last_release_object.prerelease
|
||||
else:
|
||||
prerelease = False
|
||||
|
||||
# Render the template
|
||||
try:
|
||||
render = Template(content)
|
||||
render = render.render(
|
||||
installed=context.data.installed,
|
||||
pending_update=context.pending_upgrade,
|
||||
prerelease=prerelease,
|
||||
selected_tag=context.data.selected_tag,
|
||||
version_available=context.releases.last_release,
|
||||
version_installed=context.display_installed_version,
|
||||
)
|
||||
return render
|
||||
except (Exception, BaseException) as exception:
|
||||
_LOGGER.debug(exception)
|
||||
return content
|
|
@ -0,0 +1,101 @@
|
|||
"""Helper to do common validation for repositories."""
|
||||
from aiogithubapi import AIOGitHubAPIException
|
||||
|
||||
from custom_components.hacs.helpers.classes.exceptions import (
|
||||
HacsException,
|
||||
HacsRepositoryArchivedException,
|
||||
)
|
||||
from custom_components.hacs.helpers.functions.information import (
|
||||
get_releases,
|
||||
get_repository,
|
||||
get_tree,
|
||||
)
|
||||
from custom_components.hacs.helpers.functions.version_to_install import (
|
||||
version_to_install,
|
||||
)
|
||||
from custom_components.hacs.share import get_hacs, is_removed
|
||||
|
||||
|
||||
async def common_validate(repository, ignore_issues=False):
|
||||
"""Common validation steps of the repository."""
|
||||
repository.validate.errors = []
|
||||
|
||||
# Make sure the repository exist.
|
||||
repository.logger.debug("%s Checking repository.", repository)
|
||||
await common_update_data(repository, ignore_issues)
|
||||
|
||||
# Step 6: Get the content of hacs.json
|
||||
await repository.get_repository_manifest_content()
|
||||
|
||||
|
||||
async def common_update_data(repository, ignore_issues=False):
|
||||
"""Common update data."""
|
||||
hacs = get_hacs()
|
||||
releases = []
|
||||
try:
|
||||
repository_object = await get_repository(
|
||||
hacs.session, hacs.configuration.token, repository.data.full_name
|
||||
)
|
||||
repository.repository_object = repository_object
|
||||
repository.data.update_data(repository_object.attributes)
|
||||
except (AIOGitHubAPIException, HacsException) as exception:
|
||||
if not hacs.status.startup:
|
||||
repository.logger.error("%s %s", repository, exception)
|
||||
if not ignore_issues:
|
||||
repository.validate.errors.append("Repository does not exist.")
|
||||
raise HacsException(exception) from None
|
||||
|
||||
# Make sure the repository is not archived.
|
||||
if repository.data.archived and not ignore_issues:
|
||||
repository.validate.errors.append("Repository is archived.")
|
||||
raise HacsRepositoryArchivedException("Repository is archived.")
|
||||
|
||||
# Make sure the repository is not in the blacklist.
|
||||
if is_removed(repository.data.full_name) and not ignore_issues:
|
||||
repository.validate.errors.append("Repository is in the blacklist.")
|
||||
raise HacsException("Repository is in the blacklist.")
|
||||
|
||||
# Get releases.
|
||||
try:
|
||||
releases = await get_releases(
|
||||
repository.repository_object,
|
||||
repository.data.show_beta,
|
||||
hacs.configuration.release_limit,
|
||||
)
|
||||
if releases:
|
||||
repository.data.releases = True
|
||||
repository.releases.objects = [x for x in releases if not x.draft]
|
||||
repository.data.published_tags = [
|
||||
x.tag_name for x in repository.releases.objects
|
||||
]
|
||||
repository.data.last_version = next(iter(repository.data.published_tags))
|
||||
|
||||
except (AIOGitHubAPIException, HacsException):
|
||||
repository.data.releases = False
|
||||
|
||||
if not repository.force_branch:
|
||||
repository.ref = version_to_install(repository)
|
||||
if repository.data.releases:
|
||||
for release in repository.releases.objects or []:
|
||||
if release.tag_name == repository.ref:
|
||||
assets = release.assets
|
||||
if assets:
|
||||
downloads = next(iter(assets)).attributes.get("download_count")
|
||||
repository.data.downloads = downloads
|
||||
|
||||
repository.logger.debug(
|
||||
"%s Running checks against %s", repository, repository.ref.replace("tags/", "")
|
||||
)
|
||||
|
||||
try:
|
||||
repository.tree = await get_tree(repository.repository_object, repository.ref)
|
||||
if not repository.tree:
|
||||
raise HacsException("No files in tree")
|
||||
repository.treefiles = []
|
||||
for treefile in repository.tree:
|
||||
repository.treefiles.append(treefile.full_path)
|
||||
except (AIOGitHubAPIException, HacsException) as exception:
|
||||
if not hacs.status.startup:
|
||||
repository.logger.error("%s %s", repository, exception)
|
||||
if not ignore_issues:
|
||||
raise HacsException(exception) from None
|
|
@ -0,0 +1,20 @@
|
|||
"""Install helper for repositories."""
|
||||
|
||||
|
||||
def version_to_install(repository):
|
||||
"""Determine which version to isntall."""
|
||||
if repository.data.last_version is not None:
|
||||
if repository.data.selected_tag is not None:
|
||||
if repository.data.selected_tag == repository.data.last_version:
|
||||
repository.data.selected_tag = None
|
||||
return repository.data.last_version
|
||||
return repository.data.selected_tag
|
||||
return repository.data.last_version
|
||||
if repository.data.selected_tag is not None:
|
||||
if repository.data.selected_tag == repository.data.default_branch:
|
||||
return repository.data.default_branch
|
||||
if repository.data.selected_tag in repository.data.published_tags:
|
||||
return repository.data.selected_tag
|
||||
if repository.data.default_branch is None:
|
||||
return "main"
|
||||
return repository.data.default_branch
|
|
@ -0,0 +1,30 @@
|
|||
# pylint: disable=missing-class-docstring,missing-module-docstring,missing-function-docstring,no-member
|
||||
from custom_components.hacs.helpers.methods.installation import (
|
||||
RepositoryMethodInstall,
|
||||
RepositoryMethodPostInstall,
|
||||
RepositoryMethodPreInstall,
|
||||
)
|
||||
from custom_components.hacs.helpers.methods.registration import (
|
||||
RepositoryMethodPostRegistration,
|
||||
RepositoryMethodPreRegistration,
|
||||
RepositoryMethodRegistration,
|
||||
)
|
||||
from custom_components.hacs.helpers.methods.reinstall_if_needed import (
|
||||
RepositoryMethodReinstallIfNeeded,
|
||||
)
|
||||
|
||||
|
||||
class RepositoryHelperMethods(
|
||||
RepositoryMethodReinstallIfNeeded,
|
||||
RepositoryMethodInstall,
|
||||
RepositoryMethodPostInstall,
|
||||
RepositoryMethodPreInstall,
|
||||
RepositoryMethodPreRegistration,
|
||||
RepositoryMethodRegistration,
|
||||
RepositoryMethodPostRegistration,
|
||||
):
|
||||
"""Collection of repository methods that are nested to all repositories."""
|
||||
|
||||
|
||||
class HacsHelperMethods:
|
||||
"""Helper class for HACS methods"""
|
|
@ -0,0 +1,117 @@
|
|||
# pylint: disable=missing-class-docstring,missing-module-docstring,missing-function-docstring,no-member
|
||||
import os
|
||||
import tempfile
|
||||
from abc import ABC
|
||||
|
||||
from custom_components.hacs.helpers.classes.exceptions import HacsException
|
||||
from custom_components.hacs.helpers.functions.download import download_content
|
||||
from custom_components.hacs.helpers.functions.version_to_install import (
|
||||
version_to_install,
|
||||
)
|
||||
from custom_components.hacs.operational.backup import Backup, BackupNetDaemon
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
|
||||
class RepositoryMethodPreInstall(ABC):
|
||||
async def async_pre_install(self) -> None:
|
||||
pass
|
||||
|
||||
async def _async_pre_install(self) -> None:
|
||||
self.logger.info("Running pre installation steps")
|
||||
await self.async_pre_install()
|
||||
self.logger.info("Pre installation steps completed")
|
||||
|
||||
|
||||
class RepositoryMethodInstall(ABC):
|
||||
async def async_install(self) -> None:
|
||||
await self._async_pre_install()
|
||||
self.logger.info("Running installation steps")
|
||||
await async_install_repository(self)
|
||||
self.logger.info("Installation steps completed")
|
||||
await self._async_post_install()
|
||||
|
||||
|
||||
class RepositoryMethodPostInstall(ABC):
|
||||
async def async_post_installation(self) -> None:
|
||||
pass
|
||||
|
||||
async def _async_post_install(self) -> None:
|
||||
self.logger.info("Running post installation steps")
|
||||
await self.async_post_installation()
|
||||
self.data.new = False
|
||||
self.hacs.hass.bus.async_fire(
|
||||
"hacs/repository",
|
||||
{"id": 1337, "action": "install", "repository": self.data.full_name},
|
||||
)
|
||||
self.logger.info("Post installation steps completed")
|
||||
|
||||
|
||||
async def async_install_repository(repository):
|
||||
"""Common installation steps of the repository."""
|
||||
hacs = get_hacs()
|
||||
persistent_directory = None
|
||||
await repository.update_repository()
|
||||
if repository.content.path.local is None:
|
||||
raise HacsException("repository.content.path.local is None")
|
||||
repository.validate.errors = []
|
||||
|
||||
if not repository.can_install:
|
||||
raise HacsException(
|
||||
"The version of Home Assistant is not compatible with this version"
|
||||
)
|
||||
|
||||
version = version_to_install(repository)
|
||||
if version == repository.data.default_branch:
|
||||
repository.ref = version
|
||||
else:
|
||||
repository.ref = f"tags/{version}"
|
||||
|
||||
if repository.data.installed and repository.data.category == "netdaemon":
|
||||
persistent_directory = await hacs.hass.async_add_executor_job(
|
||||
BackupNetDaemon, repository
|
||||
)
|
||||
await hacs.hass.async_add_executor_job(persistent_directory.create)
|
||||
|
||||
elif repository.data.persistent_directory:
|
||||
if os.path.exists(
|
||||
f"{repository.content.path.local}/{repository.data.persistent_directory}"
|
||||
):
|
||||
persistent_directory = Backup(
|
||||
f"{repository.content.path.local}/{repository.data.persistent_directory}",
|
||||
tempfile.gettempdir() + "/hacs_persistent_directory/",
|
||||
)
|
||||
await hacs.hass.async_add_executor_job(persistent_directory.create)
|
||||
|
||||
if repository.data.installed and not repository.content.single:
|
||||
backup = Backup(repository.content.path.local)
|
||||
await hacs.hass.async_add_executor_job(backup.create)
|
||||
|
||||
if repository.data.zip_release and version != repository.data.default_branch:
|
||||
await repository.download_zip_files(repository)
|
||||
else:
|
||||
await download_content(repository)
|
||||
|
||||
if repository.validate.errors:
|
||||
for error in repository.validate.errors:
|
||||
repository.logger.error(error)
|
||||
if repository.data.installed and not repository.content.single:
|
||||
await hacs.hass.async_add_executor_job(backup.restore)
|
||||
|
||||
if repository.data.installed and not repository.content.single:
|
||||
await hacs.hass.async_add_executor_job(backup.cleanup)
|
||||
|
||||
if persistent_directory is not None:
|
||||
await hacs.hass.async_add_executor_job(persistent_directory.restore)
|
||||
await hacs.hass.async_add_executor_job(persistent_directory.cleanup)
|
||||
|
||||
if repository.validate.success:
|
||||
if repository.data.full_name not in repository.hacs.common.installed:
|
||||
if repository.data.full_name == "hacs/integration":
|
||||
repository.hacs.common.installed.append(repository.data.full_name)
|
||||
repository.data.installed = True
|
||||
repository.data.installed_commit = repository.data.last_commit
|
||||
|
||||
if version == repository.data.default_branch:
|
||||
repository.data.installed_version = None
|
||||
else:
|
||||
repository.data.installed_version = version
|
|
@ -0,0 +1,43 @@
|
|||
# pylint: disable=missing-class-docstring,missing-module-docstring,missing-function-docstring,no-member, attribute-defined-outside-init
|
||||
from abc import ABC
|
||||
|
||||
from custom_components.hacs.validate import async_run_repository_checks
|
||||
|
||||
|
||||
class RepositoryMethodPreRegistration(ABC):
|
||||
async def async_pre_registration(self):
|
||||
pass
|
||||
|
||||
|
||||
class RepositoryMethodRegistration(ABC):
|
||||
async def registration(self, ref=None) -> None:
|
||||
self.logger.warning(
|
||||
"'registration' is deprecated, use 'async_registration' instead"
|
||||
)
|
||||
await self.async_registration(ref)
|
||||
|
||||
async def async_registration(self, ref=None) -> None:
|
||||
# Run local pre registration steps.
|
||||
await self.async_pre_registration()
|
||||
|
||||
if ref is not None:
|
||||
self.data.selected_tag = ref
|
||||
self.ref = ref
|
||||
self.force_branch = True
|
||||
|
||||
if not await self.validate_repository():
|
||||
return False
|
||||
|
||||
# Run common registration steps.
|
||||
await self.common_registration()
|
||||
|
||||
# Set correct local path
|
||||
self.content.path.local = self.localpath
|
||||
|
||||
# Run local post registration steps.
|
||||
await self.async_post_registration()
|
||||
|
||||
|
||||
class RepositoryMethodPostRegistration(ABC):
|
||||
async def async_post_registration(self):
|
||||
await async_run_repository_checks(self)
|
|
@ -0,0 +1,12 @@
|
|||
# pylint: disable=missing-class-docstring,missing-module-docstring,missing-function-docstring,no-member
|
||||
from abc import ABC
|
||||
|
||||
from custom_components.hacs.helpers.functions.path_exsist import async_path_exsist
|
||||
|
||||
|
||||
class RepositoryMethodReinstallIfNeeded(ABC):
|
||||
async def async_reinstall_if_needed(self) -> None:
|
||||
if self.data.installed:
|
||||
if not await async_path_exsist(self.content.path.local):
|
||||
self.logger.error("Missing from local FS, should be reinstalled.")
|
||||
# await self.async_install()
|
|
@ -0,0 +1,16 @@
|
|||
# pylint: disable=missing-class-docstring,missing-module-docstring,missing-function-docstring,no-member
|
||||
from custom_components.hacs.helpers.properties.can_be_installed import (
|
||||
RepositoryPropertyCanBeInstalled,
|
||||
)
|
||||
from custom_components.hacs.helpers.properties.custom import RepositoryPropertyCustom
|
||||
from custom_components.hacs.helpers.properties.pending_update import (
|
||||
RepositoryPropertyPendingUpdate,
|
||||
)
|
||||
|
||||
|
||||
class RepositoryHelperProperties(
|
||||
RepositoryPropertyPendingUpdate,
|
||||
RepositoryPropertyCustom,
|
||||
RepositoryPropertyCanBeInstalled,
|
||||
):
|
||||
pass
|
|
@ -0,0 +1,21 @@
|
|||
# pylint: disable=missing-class-docstring,missing-module-docstring,missing-function-docstring,no-member
|
||||
from abc import ABC
|
||||
|
||||
from custom_components.hacs.helpers.functions.misc import version_left_higher_then_right
|
||||
|
||||
|
||||
class RepositoryPropertyCanBeInstalled(ABC):
|
||||
@property
|
||||
def can_be_installed(self) -> bool:
|
||||
if self.data.homeassistant is not None:
|
||||
if self.data.releases:
|
||||
if not version_left_higher_then_right(
|
||||
self.hacs.system.ha_version, self.data.homeassistant
|
||||
):
|
||||
return False
|
||||
return True
|
||||
|
||||
@property
|
||||
def can_install(self):
|
||||
"""kept for legacy compatibility"""
|
||||
return self.can_be_installed
|
|
@ -0,0 +1,13 @@
|
|||
# pylint: disable=missing-class-docstring,missing-module-docstring,missing-function-docstring,no-member
|
||||
from abc import ABC
|
||||
|
||||
|
||||
class RepositoryPropertyCustom(ABC):
|
||||
@property
|
||||
def custom(self):
|
||||
"""Return flag if the repository is custom."""
|
||||
if str(self.data.id) in self.hacs.common.default:
|
||||
return False
|
||||
if self.data.full_name == "hacs/integration":
|
||||
return False
|
||||
return True
|
|
@ -0,0 +1,23 @@
|
|||
# pylint: disable=missing-class-docstring,missing-module-docstring,missing-function-docstring,no-member
|
||||
from abc import ABC
|
||||
|
||||
|
||||
class RepositoryPropertyPendingUpdate(ABC):
|
||||
@property
|
||||
def pending_update(self) -> bool:
|
||||
if not self.can_install:
|
||||
return False
|
||||
if self.data.installed:
|
||||
if self.data.selected_tag is not None:
|
||||
if self.data.selected_tag == self.data.default_branch:
|
||||
if self.data.installed_commit != self.data.last_commit:
|
||||
return True
|
||||
return False
|
||||
if self.display_installed_version != self.display_available_version:
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def pending_upgrade(self) -> bool:
|
||||
"""kept for legacy compatibility"""
|
||||
return self.pending_update
|
|
@ -0,0 +1,7 @@
|
|||
window.customIconsets = window.customIconsets || {};
|
||||
window.customIconsets["hacs"] = async () => {
|
||||
return {
|
||||
path:
|
||||
"m 20.064849,22.306912 c -0.0319,0.369835 -0.280561,0.707789 -0.656773,0.918212 -0.280572,0.153036 -0.605773,0.229553 -0.950094,0.229553 -0.0765,0 -0.146661,-0.0064 -0.216801,-0.01275 -0.605774,-0.05739 -1.135016,-0.344329 -1.402827,-0.7588 l 0.784304,-0.516495 c 0.0893,0.146659 0.344331,0.312448 0.707793,0.34433 0.235931,0.02551 0.471852,-0.01913 0.637643,-0.108401 0.101998,-0.05101 0.172171,-0.127529 0.17854,-0.191295 0.0065,-0.08289 -0.0255,-0.369835 -0.733293,-0.439975 -1.013854,-0.09565 -1.645127,-0.688661 -1.568606,-1.460214 0.0319,-0.382589 0.280561,-0.714165 0.663153,-0.930965 0.331571,-0.172165 0.752423,-0.25506 1.166895,-0.210424 0.599382,0.05739 1.128635,0.344329 1.402816,0.7588 l -0.784304,0.510118 c -0.0893,-0.140282 -0.344331,-0.299694 -0.707782,-0.331576 -0.235932,-0.02551 -0.471863,0.01913 -0.637654,0.10202 -0.0956,0.05739 -0.165791,0.133906 -0.17216,0.191295 -0.0255,0.293317 0.465482,0.420847 0.726913,0.439976 v 0.0064 c 1.020234,0.09565 1.638757,0.66953 1.562237,1.460213 z m -7.466854,-0.988354 c 0,-1.192401 0.962855,-2.155249 2.15525,-2.155249 0.599393,0 1.179645,0.25506 1.594117,0.707789 l -0.695033,0.624895 c -0.235931,-0.25506 -0.561133,-0.401718 -0.899084,-0.401718 -0.675903,0 -1.217906,0.542 -1.217906,1.217906 0,0.66953 0.542003,1.217908 1.217906,1.217908 0.337951,0 0.663153,-0.140283 0.899084,-0.401718 l 0.695033,0.631271 c -0.414472,0.452729 -0.988355,0.707788 -1.594117,0.707788 -1.192395,0 -2.15525,-0.969224 -2.15525,-2.148872 z M 8.6573365,23.461054 10.353474,19.14418 h 0.624893 l 1.568618,4.316874 H 11.52037 L 11.265308,22.734136 H 9.964513 l -0.274192,0.726918 z m 1.6833885,-1.68339 h 0.580263 L 10.646796,21.012487 Z M 8.1089536,19.156932 v 4.297745 H 7.1461095 v -1.645131 h -1.606867 v 1.645131 H 4.5763876 v -4.297745 h 0.9628549 v 1.696143 h 1.606867 V 19.156932 Z M 20.115859,4.2997436 C 20.090359,4.159461 19.969198,4.0574375 19.822548,4.0574375 H 14.141102 10.506516 4.8250686 c -0.14665,0 -0.2678112,0.1020202 -0.2933108,0.2423061 L 3.690064,8.8461703 c -0.00651,0.01913 -0.00651,0.03826 -0.00651,0.057391 v 1.5239797 c 0,0.165789 0.133911,0.299694 0.2996911,0.299694 H 4.5762579 20.0711 20.664112 c 0.165781,0 0.299691,-0.133905 0.299691,-0.299694 V 8.8971848 c 0,-0.01913 0,-0.03826 -0.0065,-0.05739 z M 4.5763876,17.358767 c 0,0.184917 0.1466608,0.331577 0.3315819,0.331577 h 5.5985465 3.634586 0.924594 c 0.184911,0 0.331571,-0.14666 0.331571,-0.331577 v -4.744098 c 0,-0.184918 0.146661,-0.331577 0.331582,-0.331577 h 2.894913 c 0.184921,0 0.331582,0.146659 0.331582,0.331577 v 4.744098 c 0,0.184917 0.146661,0.331577 0.331571,0.331577 h 0.446363 c 0.18491,0 0.331571,-0.14666 0.331571,-0.331577 v -5.636804 c 0,-0.184918 -0.146661,-0.331577 -0.331571,-0.331577 H 4.9079695 c -0.1849211,0 -0.3315819,0.146659 -0.3315819,0.331577 z m 1.6578879,-4.852498 h 5.6495565 c 0.15303,0 0.280561,0.12753 0.280561,0.280564 v 3.513438 c 0,0.153036 -0.127531,0.280566 -0.280561,0.280566 H 6.2342755 c -0.1530412,0 -0.2805719,-0.12753 -0.2805719,-0.280566 v -3.513438 c 0,-0.159411 0.1275307,-0.280564 0.2805719,-0.280564 z M 19.790657,3.3879075 H 4.8569594 c -0.1530412,0 -0.2805718,-0.1275296 -0.2805718,-0.2805642 V 1.3665653 C 4.5763876,1.2135296 4.7039182,1.086 4.8569594,1.086 H 19.790657 c 0.153041,0 0.280572,0.1275296 0.280572,0.2805653 v 1.740778 c 0,0.1530346 -0.127531,0.2805642 -0.280572,0.2805642 z",
|
||||
};
|
||||
};
|
|
@ -0,0 +1,25 @@
|
|||
{
|
||||
"codeowners": [
|
||||
"@ludeeus"
|
||||
],
|
||||
"config_flow": true,
|
||||
"dependencies": [
|
||||
"http",
|
||||
"websocket_api",
|
||||
"frontend",
|
||||
"persistent_notification",
|
||||
"lovelace"
|
||||
],
|
||||
"documentation": "https://hacs.xyz/docs/configuration/start",
|
||||
"domain": "hacs",
|
||||
"issue_tracker": "https://github.com/hacs/integration/issues",
|
||||
"name": "HACS",
|
||||
"requirements": [
|
||||
"aiofiles>=0.6.0",
|
||||
"aiogithubapi>=2.0.0<3.0.0",
|
||||
"awesomeversion>=20.12.5",
|
||||
"backoff>=1.10.0",
|
||||
"hacs_frontend==20210103144316",
|
||||
"queueman==0.5"
|
||||
]
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
"""Hacs models."""
|
|
@ -0,0 +1,15 @@
|
|||
"""HACS Core info."""
|
||||
from pathlib import Path
|
||||
|
||||
import attr
|
||||
|
||||
from ..enums import LovelaceMode
|
||||
|
||||
|
||||
@attr.s
|
||||
class HacsCore:
|
||||
"""HACS Core info."""
|
||||
|
||||
config_path = attr.ib(Path)
|
||||
ha_version = attr.ib(str)
|
||||
lovelace_mode = LovelaceMode("storage")
|
|
@ -0,0 +1,10 @@
|
|||
"""HacsFrontend."""
|
||||
|
||||
|
||||
class HacsFrontend:
|
||||
"""HacsFrontend."""
|
||||
|
||||
version_running: bool = None
|
||||
version_available: bool = None
|
||||
version_expected: bool = None
|
||||
update_pending: bool = False
|
|
@ -0,0 +1,16 @@
|
|||
"""HACS System info."""
|
||||
import attr
|
||||
|
||||
from ..const import INTEGRATION_VERSION
|
||||
from ..enums import HacsStage
|
||||
|
||||
|
||||
@attr.s
|
||||
class HacsSystem:
|
||||
"""HACS System info."""
|
||||
|
||||
disabled: bool = False
|
||||
running: bool = False
|
||||
version: str = INTEGRATION_VERSION
|
||||
stage: HacsStage = attr.ib(HacsStage)
|
||||
action: bool = False
|
|
@ -0,0 +1,124 @@
|
|||
"""Backup."""
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
from time import sleep
|
||||
|
||||
from custom_components.hacs.helpers.functions.is_safe_to_remove import is_safe_to_remove
|
||||
from custom_components.hacs.helpers.functions.logger import getLogger
|
||||
|
||||
BACKUP_PATH = tempfile.gettempdir() + "/hacs_backup/"
|
||||
|
||||
_LOGGER = getLogger()
|
||||
|
||||
|
||||
class Backup:
|
||||
"""Backup."""
|
||||
|
||||
def __init__(self, local_path, backup_path=BACKUP_PATH):
|
||||
"""initialize."""
|
||||
self.local_path = local_path
|
||||
self.backup_path = backup_path
|
||||
self.backup_path_full = f"{self.backup_path}{self.local_path.split('/')[-1]}"
|
||||
|
||||
def create(self):
|
||||
"""Create a backup in /tmp"""
|
||||
if not os.path.exists(self.local_path):
|
||||
return
|
||||
if not is_safe_to_remove(self.local_path):
|
||||
return
|
||||
if os.path.exists(self.backup_path):
|
||||
shutil.rmtree(self.backup_path)
|
||||
while os.path.exists(self.backup_path):
|
||||
sleep(0.1)
|
||||
os.makedirs(self.backup_path, exist_ok=True)
|
||||
|
||||
try:
|
||||
if os.path.isfile(self.local_path):
|
||||
shutil.copyfile(self.local_path, self.backup_path_full)
|
||||
os.remove(self.local_path)
|
||||
else:
|
||||
shutil.copytree(self.local_path, self.backup_path_full)
|
||||
shutil.rmtree(self.local_path)
|
||||
while os.path.exists(self.local_path):
|
||||
sleep(0.1)
|
||||
_LOGGER.debug(
|
||||
"Backup for %s, created in %s",
|
||||
self.local_path,
|
||||
self.backup_path_full,
|
||||
)
|
||||
except (Exception, BaseException): # pylint: disable=broad-except
|
||||
pass
|
||||
|
||||
def restore(self):
|
||||
"""Restore from backup."""
|
||||
if not os.path.exists(self.backup_path_full):
|
||||
return
|
||||
|
||||
if os.path.isfile(self.backup_path_full):
|
||||
if os.path.exists(self.local_path):
|
||||
os.remove(self.local_path)
|
||||
shutil.copyfile(self.backup_path_full, self.local_path)
|
||||
else:
|
||||
if os.path.exists(self.local_path):
|
||||
shutil.rmtree(self.local_path)
|
||||
while os.path.exists(self.local_path):
|
||||
sleep(0.1)
|
||||
shutil.copytree(self.backup_path_full, self.local_path)
|
||||
_LOGGER.debug(
|
||||
"Restored %s, from backup %s", self.local_path, self.backup_path_full
|
||||
)
|
||||
|
||||
def cleanup(self):
|
||||
"""Cleanup backup files."""
|
||||
if os.path.exists(self.backup_path):
|
||||
shutil.rmtree(self.backup_path)
|
||||
while os.path.exists(self.backup_path):
|
||||
sleep(0.1)
|
||||
_LOGGER.debug("Backup dir %s cleared", self.backup_path)
|
||||
|
||||
|
||||
class BackupNetDaemon:
|
||||
"""BackupNetDaemon."""
|
||||
|
||||
def __init__(self, repository):
|
||||
"""Initialize."""
|
||||
self.repository = repository
|
||||
self.backup_path = (
|
||||
tempfile.gettempdir() + "/hacs_persistent_netdaemon/" + repository.data.name
|
||||
)
|
||||
|
||||
def create(self):
|
||||
"""Create a backup in /tmp"""
|
||||
if not is_safe_to_remove(self.repository.content.path.local):
|
||||
return
|
||||
if os.path.exists(self.backup_path):
|
||||
shutil.rmtree(self.backup_path)
|
||||
while os.path.exists(self.backup_path):
|
||||
sleep(0.1)
|
||||
os.makedirs(self.backup_path, exist_ok=True)
|
||||
|
||||
for filename in os.listdir(self.repository.content.path.local):
|
||||
if filename.endswith(".yaml"):
|
||||
source_file_name = f"{self.repository.content.path.local}/{filename}"
|
||||
target_file_name = f"{self.backup_path}/{filename}"
|
||||
shutil.copyfile(source_file_name, target_file_name)
|
||||
|
||||
def restore(self):
|
||||
"""Create a backup in /tmp"""
|
||||
if os.path.exists(self.backup_path):
|
||||
for filename in os.listdir(self.backup_path):
|
||||
if filename.endswith(".yaml"):
|
||||
source_file_name = f"{self.backup_path}/{filename}"
|
||||
target_file_name = (
|
||||
f"{self.repository.content.path.local}/{filename}"
|
||||
)
|
||||
shutil.copyfile(source_file_name, target_file_name)
|
||||
|
||||
def cleanup(self):
|
||||
"""Create a backup in /tmp"""
|
||||
if os.path.exists(self.backup_path):
|
||||
shutil.rmtree(self.backup_path)
|
||||
while os.path.exists(self.backup_path):
|
||||
sleep(0.1)
|
||||
_LOGGER.debug("Backup dir %s cleared", self.backup_path)
|
|
@ -0,0 +1,56 @@
|
|||
# pylint: disable=missing-docstring,invalid-name
|
||||
import asyncio
|
||||
|
||||
from aiogithubapi import AIOGitHubAPIException
|
||||
|
||||
from custom_components.hacs.helpers.classes.exceptions import (
|
||||
HacsException,
|
||||
HacsRepositoryArchivedException,
|
||||
)
|
||||
from custom_components.hacs.helpers.functions.logger import getLogger
|
||||
from custom_components.hacs.helpers.functions.register_repository import (
|
||||
register_repository,
|
||||
)
|
||||
|
||||
max_concurrent_tasks = asyncio.Semaphore(15)
|
||||
sleeper = 5
|
||||
|
||||
_LOGGER = getLogger()
|
||||
|
||||
|
||||
class HacsTaskFactory:
|
||||
def __init__(self):
|
||||
self.tasks = []
|
||||
self.running = False
|
||||
|
||||
async def safe_common_update(self, repository):
|
||||
async with max_concurrent_tasks:
|
||||
try:
|
||||
await repository.common_update()
|
||||
except (AIOGitHubAPIException, HacsException) as exception:
|
||||
_LOGGER.error("%s - %s", repository.data.full_name, exception)
|
||||
|
||||
# Due to GitHub ratelimits we need to sleep a bit
|
||||
await asyncio.sleep(sleeper)
|
||||
|
||||
async def safe_update(self, repository):
|
||||
async with max_concurrent_tasks:
|
||||
try:
|
||||
await repository.update_repository()
|
||||
except HacsRepositoryArchivedException as exception:
|
||||
_LOGGER.warning("%s - %s", repository.data.full_name, exception)
|
||||
except (AIOGitHubAPIException, HacsException) as exception:
|
||||
_LOGGER.error("%s - %s", repository.data.full_name, exception)
|
||||
|
||||
# Due to GitHub ratelimits we need to sleep a bit
|
||||
await asyncio.sleep(sleeper)
|
||||
|
||||
async def safe_register(self, repo, category):
|
||||
async with max_concurrent_tasks:
|
||||
try:
|
||||
await register_repository(repo, category)
|
||||
except (AIOGitHubAPIException, HacsException) as exception:
|
||||
_LOGGER.error("%s - %s", repo, exception)
|
||||
|
||||
# Due to GitHub ratelimits we need to sleep a bit
|
||||
await asyncio.sleep(sleeper)
|
|
@ -0,0 +1,10 @@
|
|||
"""Reload HACS"""
|
||||
|
||||
|
||||
async def async_reload_entry(hass, config_entry):
|
||||
"""Reload HACS."""
|
||||
from custom_components.hacs.operational.remove import async_remove_entry
|
||||
from custom_components.hacs.operational.setup import async_setup_entry
|
||||
|
||||
await async_remove_entry(hass, config_entry)
|
||||
await async_setup_entry(hass, config_entry)
|
|
@ -0,0 +1,24 @@
|
|||
"""Remove HACS."""
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
|
||||
async def async_remove_entry(hass, config_entry):
|
||||
"""Handle removal of an entry."""
|
||||
hacs = get_hacs()
|
||||
hacs.log.info("Disabling HACS")
|
||||
hacs.log.info("Removing recurring tasks")
|
||||
for task in hacs.recuring_tasks:
|
||||
task()
|
||||
if config_entry.state == "loaded":
|
||||
hacs.log.info("Removing sensor")
|
||||
try:
|
||||
await hass.config_entries.async_forward_entry_unload(config_entry, "sensor")
|
||||
except ValueError:
|
||||
pass
|
||||
hacs.log.info("Removing sidepanel")
|
||||
try:
|
||||
hass.components.frontend.async_remove_panel("hacs")
|
||||
except AttributeError:
|
||||
pass
|
||||
hacs.system.disabled = True
|
||||
hacs.log.info("HACS is now disabled")
|
|
@ -0,0 +1 @@
|
|||
"""Runtime..."""
|
|
@ -0,0 +1,198 @@
|
|||
"""Setup HACS."""
|
||||
from aiogithubapi import AIOGitHubAPIException, GitHub
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STARTED
|
||||
from homeassistant.const import __version__ as HAVERSION
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
|
||||
from custom_components.hacs.const import DOMAIN, INTEGRATION_VERSION, STARTUP
|
||||
from custom_components.hacs.enums import HacsStage
|
||||
from custom_components.hacs.hacsbase.configuration import Configuration
|
||||
from custom_components.hacs.hacsbase.data import HacsData
|
||||
from custom_components.hacs.helpers.functions.constrains import check_constrains
|
||||
from custom_components.hacs.helpers.functions.remaining_github_calls import (
|
||||
get_fetch_updates_for,
|
||||
)
|
||||
from custom_components.hacs.operational.reload import async_reload_entry
|
||||
from custom_components.hacs.operational.remove import async_remove_entry
|
||||
from custom_components.hacs.operational.setup_actions.clear_storage import (
|
||||
async_clear_storage,
|
||||
)
|
||||
from custom_components.hacs.operational.setup_actions.frontend import (
|
||||
async_setup_frontend,
|
||||
)
|
||||
from custom_components.hacs.operational.setup_actions.load_hacs_repository import (
|
||||
async_load_hacs_repository,
|
||||
)
|
||||
from custom_components.hacs.operational.setup_actions.sensor import async_add_sensor
|
||||
from custom_components.hacs.operational.setup_actions.websocket_api import (
|
||||
async_setup_hacs_websockt_api,
|
||||
)
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
try:
|
||||
from homeassistant.components.lovelace import system_health_info
|
||||
except ImportError:
|
||||
from homeassistant.components.lovelace.system_health import system_health_info
|
||||
|
||||
|
||||
async def _async_common_setup(hass):
|
||||
"""Common setup stages."""
|
||||
hacs = get_hacs()
|
||||
hacs.hass = hass
|
||||
hacs.system.running = True
|
||||
hacs.session = async_create_clientsession(hass)
|
||||
|
||||
|
||||
async def async_setup_entry(hass, config_entry):
|
||||
"""Set up this integration using UI."""
|
||||
from homeassistant import config_entries
|
||||
|
||||
hacs = get_hacs()
|
||||
if hass.data.get(DOMAIN) is not None:
|
||||
return False
|
||||
if config_entry.source == config_entries.SOURCE_IMPORT:
|
||||
hass.async_create_task(hass.config_entries.async_remove(config_entry.entry_id))
|
||||
return False
|
||||
|
||||
await _async_common_setup(hass)
|
||||
|
||||
hacs.configuration = Configuration.from_dict(
|
||||
config_entry.data, config_entry.options
|
||||
)
|
||||
hacs.configuration.config_type = "flow"
|
||||
hacs.configuration.config_entry = config_entry
|
||||
|
||||
return await async_startup_wrapper_for_config_entry()
|
||||
|
||||
|
||||
async def async_setup(hass, config):
|
||||
"""Set up this integration using yaml."""
|
||||
hacs = get_hacs()
|
||||
if DOMAIN not in config:
|
||||
return True
|
||||
if hacs.configuration and hacs.configuration.config_type == "flow":
|
||||
return True
|
||||
|
||||
await _async_common_setup(hass)
|
||||
|
||||
hacs.configuration = Configuration.from_dict(config[DOMAIN])
|
||||
hacs.configuration.config_type = "yaml"
|
||||
await async_startup_wrapper_for_yaml()
|
||||
return True
|
||||
|
||||
|
||||
async def async_startup_wrapper_for_config_entry():
|
||||
"""Startup wrapper for ui config."""
|
||||
hacs = get_hacs()
|
||||
hacs.configuration.config_entry.add_update_listener(async_reload_entry)
|
||||
try:
|
||||
startup_result = await async_hacs_startup()
|
||||
except AIOGitHubAPIException:
|
||||
startup_result = False
|
||||
if not startup_result:
|
||||
hacs.system.disabled = True
|
||||
raise ConfigEntryNotReady
|
||||
hacs.system.disabled = False
|
||||
return startup_result
|
||||
|
||||
|
||||
async def async_startup_wrapper_for_yaml(_=None):
|
||||
"""Startup wrapper for yaml config."""
|
||||
hacs = get_hacs()
|
||||
try:
|
||||
startup_result = await async_hacs_startup()
|
||||
except AIOGitHubAPIException:
|
||||
startup_result = False
|
||||
if not startup_result:
|
||||
hacs.system.disabled = True
|
||||
hacs.log.info("Could not setup HACS, trying again in 15 min")
|
||||
async_call_later(hacs.hass, 900, async_startup_wrapper_for_yaml)
|
||||
return
|
||||
hacs.system.disabled = False
|
||||
|
||||
|
||||
async def async_hacs_startup():
|
||||
"""HACS startup tasks."""
|
||||
hacs = get_hacs()
|
||||
hacs.hass.data[DOMAIN] = hacs
|
||||
|
||||
try:
|
||||
lovelace_info = await system_health_info(hacs.hass)
|
||||
except TypeError:
|
||||
# If this happens, the users YAML is not valid, we assume YAML mode
|
||||
lovelace_info = {"mode": "yaml"}
|
||||
hacs.log.debug(f"Configuration type: {hacs.configuration.config_type}")
|
||||
hacs.version = INTEGRATION_VERSION
|
||||
hacs.log.info(STARTUP)
|
||||
hacs.core.config_path = hacs.hass.config.path()
|
||||
hacs.system.ha_version = HAVERSION
|
||||
|
||||
# Setup websocket API
|
||||
await async_setup_hacs_websockt_api()
|
||||
|
||||
# Set up frontend
|
||||
await async_setup_frontend()
|
||||
|
||||
# Clear old storage files
|
||||
await async_clear_storage()
|
||||
|
||||
hacs.system.lovelace_mode = lovelace_info.get("mode", "yaml")
|
||||
hacs.system.disabled = False
|
||||
hacs.github = GitHub(
|
||||
hacs.configuration.token, async_create_clientsession(hacs.hass)
|
||||
)
|
||||
hacs.data = HacsData()
|
||||
|
||||
can_update = await get_fetch_updates_for(hacs.github)
|
||||
if can_update is None:
|
||||
hacs.log.critical("Your GitHub token is not valid")
|
||||
return False
|
||||
|
||||
if can_update != 0:
|
||||
hacs.log.debug(f"Can update {can_update} repositories")
|
||||
else:
|
||||
hacs.log.info(
|
||||
"HACS is ratelimited, repository updates will resume when the limit is cleared, this can take up to 1 hour"
|
||||
)
|
||||
return False
|
||||
|
||||
# Check HACS Constrains
|
||||
if not await hacs.hass.async_add_executor_job(check_constrains):
|
||||
if hacs.configuration.config_type == "flow":
|
||||
if hacs.configuration.config_entry is not None:
|
||||
await async_remove_entry(hacs.hass, hacs.configuration.config_entry)
|
||||
return False
|
||||
|
||||
# Load HACS
|
||||
if not await async_load_hacs_repository():
|
||||
if hacs.configuration.config_type == "flow":
|
||||
if hacs.configuration.config_entry is not None:
|
||||
await async_remove_entry(hacs.hass, hacs.configuration.config_entry)
|
||||
return False
|
||||
|
||||
# Restore from storefiles
|
||||
if not await hacs.data.restore():
|
||||
hacs_repo = hacs.get_by_name("hacs/integration")
|
||||
hacs_repo.pending_restart = True
|
||||
if hacs.configuration.config_type == "flow":
|
||||
if hacs.configuration.config_entry is not None:
|
||||
await async_remove_entry(hacs.hass, hacs.configuration.config_entry)
|
||||
return False
|
||||
|
||||
# Setup startup tasks
|
||||
if hacs.status.new or hacs.configuration.config_type == "flow":
|
||||
async_call_later(hacs.hass, 5, hacs.startup_tasks)
|
||||
else:
|
||||
hacs.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, hacs.startup_tasks)
|
||||
|
||||
# Set up sensor
|
||||
await async_add_sensor()
|
||||
|
||||
# Mischief managed!
|
||||
await hacs.async_set_stage(HacsStage.WAITING)
|
||||
hacs.log.info(
|
||||
"Setup complete, waiting for Home Assistant before startup tasks starts"
|
||||
)
|
||||
return True
|
|
@ -0,0 +1,43 @@
|
|||
"""Starting setup task: extra stores."""
|
||||
from custom_components.hacs.const import ELEMENT_TYPES
|
||||
|
||||
from ...enums import HacsCategory, HacsSetupTask
|
||||
from ...share import get_hacs
|
||||
|
||||
|
||||
def _setup_extra_stores():
|
||||
"""Set up extra stores in HACS if enabled in Home Assistant."""
|
||||
hacs = get_hacs()
|
||||
hacs.log.debug("Starting setup task: Extra stores")
|
||||
hacs.common.categories = set()
|
||||
for category in ELEMENT_TYPES:
|
||||
enable_category(hacs, HacsCategory(category))
|
||||
|
||||
if HacsCategory.PYTHON_SCRIPT in hacs.hass.config.components:
|
||||
if HacsCategory.PYTHON_SCRIPT not in hacs.common.categories:
|
||||
enable_category(hacs, HacsCategory.PYTHON_SCRIPT)
|
||||
|
||||
if (
|
||||
hacs.hass.services._services.get("frontend", {}).get("reload_themes")
|
||||
is not None
|
||||
):
|
||||
if HacsCategory.THEME not in hacs.common.categories:
|
||||
enable_category(hacs, HacsCategory.THEME)
|
||||
|
||||
if hacs.configuration.appdaemon:
|
||||
enable_category(hacs, HacsCategory.APPDAEMON)
|
||||
if hacs.configuration.netdaemon:
|
||||
enable_category(hacs, HacsCategory.NETDAEMON)
|
||||
|
||||
|
||||
async def async_setup_extra_stores():
|
||||
"""Async wrapper for setup_extra_stores"""
|
||||
hacs = get_hacs()
|
||||
hacs.log.info("setup task %s", HacsSetupTask.CATEGORIES)
|
||||
await hacs.hass.async_add_executor_job(_setup_extra_stores)
|
||||
|
||||
|
||||
def enable_category(hacs, category: HacsCategory):
|
||||
"""Add category."""
|
||||
hacs.log.debug("Enable category: %s", category)
|
||||
hacs.common.categories.add(category)
|
|
@ -0,0 +1,24 @@
|
|||
"""Starting setup task: clear storage."""
|
||||
import os
|
||||
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
from ...enums import HacsSetupTask
|
||||
|
||||
|
||||
async def async_clear_storage():
|
||||
"""Async wrapper for clear_storage"""
|
||||
hacs = get_hacs()
|
||||
hacs.log.info("Setup task %s", HacsSetupTask.CATEGORIES)
|
||||
await hacs.hass.async_add_executor_job(_clear_storage)
|
||||
|
||||
|
||||
def _clear_storage():
|
||||
"""Clear old files from storage."""
|
||||
hacs = get_hacs()
|
||||
storagefiles = ["hacs"]
|
||||
for s_f in storagefiles:
|
||||
path = f"{hacs.core.config_path}/.storage/{s_f}"
|
||||
if os.path.isfile(path):
|
||||
hacs.log.info(f"Cleaning up old storage file {path}")
|
||||
os.remove(path)
|
|
@ -0,0 +1,110 @@
|
|||
from hacs_frontend.version import VERSION as FE_VERSION
|
||||
from hacs_frontend import locate_dir
|
||||
|
||||
from custom_components.hacs.helpers.functions.logger import getLogger
|
||||
from custom_components.hacs.webresponses.frontend import HacsFrontendDev
|
||||
from custom_components.hacs.helpers.functions.information import get_frontend_version
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
from ...enums import HacsSetupTask
|
||||
|
||||
|
||||
URL_BASE = "/hacsfiles"
|
||||
|
||||
|
||||
async def async_setup_frontend():
|
||||
"""Configure the HACS frontend elements."""
|
||||
hacs = get_hacs()
|
||||
hacs.log.info("Setup task %s", HacsSetupTask.FRONTEND)
|
||||
hass = hacs.hass
|
||||
|
||||
# Register themes
|
||||
hass.http.register_static_path(f"{URL_BASE}/themes", hass.config.path("themes"))
|
||||
|
||||
# Register frontend
|
||||
if hacs.configuration.frontend_repo_url:
|
||||
getLogger().warning(
|
||||
"Frontend development mode enabled. Do not run in production."
|
||||
)
|
||||
hass.http.register_view(HacsFrontendDev())
|
||||
else:
|
||||
#
|
||||
hass.http.register_static_path(f"{URL_BASE}/frontend", locate_dir())
|
||||
|
||||
# Custom iconset
|
||||
hass.http.register_static_path(
|
||||
f"{URL_BASE}/iconset.js", str(hacs.integration_dir / "iconset.js")
|
||||
)
|
||||
if "frontend_extra_module_url" not in hass.data:
|
||||
hass.data["frontend_extra_module_url"] = set()
|
||||
hass.data["frontend_extra_module_url"].add("/hacsfiles/iconset.js")
|
||||
|
||||
# Register www/community for all other files
|
||||
hass.http.register_static_path(
|
||||
URL_BASE, hass.config.path("www/community"), cache_headers=False
|
||||
)
|
||||
|
||||
hacs.frontend.version_running = FE_VERSION
|
||||
hacs.frontend.version_expected = await hass.async_add_executor_job(
|
||||
get_frontend_version
|
||||
)
|
||||
|
||||
# Add to sidepanel
|
||||
if "hacs" not in hass.data.get("frontend_panels", {}):
|
||||
hass.components.frontend.async_register_built_in_panel(
|
||||
component_name="custom",
|
||||
sidebar_title=hacs.configuration.sidepanel_title,
|
||||
sidebar_icon=hacs.configuration.sidepanel_icon,
|
||||
frontend_url_path="hacs",
|
||||
config={
|
||||
"_panel_custom": {
|
||||
"name": "hacs-frontend",
|
||||
"embed_iframe": True,
|
||||
"trust_external": False,
|
||||
"js_url": "/hacsfiles/frontend/entrypoint.js",
|
||||
}
|
||||
},
|
||||
require_admin=True,
|
||||
)
|
||||
|
||||
|
||||
async def async_serve_frontend(requested_file):
|
||||
hacs = get_hacs()
|
||||
requested = requested_file.split("/")[-1]
|
||||
servefile = None
|
||||
dev = False
|
||||
|
||||
if hacs.configuration.frontend_repo_url or hacs.configuration.frontend_repo:
|
||||
dev = True
|
||||
|
||||
if hacs.configuration.frontend_repo_url:
|
||||
_LOGGER.debug("Serving REMOTE DEVELOPMENT frontend")
|
||||
try:
|
||||
request = await hacs.session.get(
|
||||
f"{hacs.configuration.frontend_repo_url}/{requested}"
|
||||
)
|
||||
if request.status == 200:
|
||||
result = await request.read()
|
||||
response = web.Response(body=result)
|
||||
response.headers["Content-Type"] = "application/javascript"
|
||||
|
||||
return response
|
||||
except (Exception, BaseException) as exception:
|
||||
_LOGGER.error(exception)
|
||||
|
||||
elif hacs.configuration.frontend_repo:
|
||||
_LOGGER.debug("Serving LOCAL DEVELOPMENT frontend")
|
||||
servefile = f"{hacs.configuration.frontend_repo}/hacs_frontend/{requested}"
|
||||
else:
|
||||
servefile = f"{locate_dir()}/{requested}"
|
||||
|
||||
if servefile is None or not await async_path_exsist(servefile):
|
||||
return web.Response(status=404)
|
||||
|
||||
response = web.FileResponse(servefile)
|
||||
response.headers["Content-Type"] = "application/javascript"
|
||||
|
||||
if dev:
|
||||
response.headers["Cache-Control"] = "no-store, max-age=0"
|
||||
response.headers["Pragma"] = "no-store"
|
||||
return response
|
|
@ -0,0 +1,38 @@
|
|||
"""Starting setup task: load HACS repository."""
|
||||
from custom_components.hacs.const import INTEGRATION_VERSION
|
||||
from custom_components.hacs.helpers.classes.exceptions import HacsException
|
||||
from custom_components.hacs.helpers.functions.information import get_repository
|
||||
from custom_components.hacs.helpers.functions.register_repository import (
|
||||
register_repository,
|
||||
)
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
from ...enums import HacsSetupTask
|
||||
|
||||
|
||||
async def async_load_hacs_repository():
|
||||
"""Load HACS repositroy."""
|
||||
hacs = get_hacs()
|
||||
hacs.log.info("Setup task %s", HacsSetupTask.HACS_REPO)
|
||||
|
||||
try:
|
||||
repository = hacs.get_by_name("hacs/integration")
|
||||
if repository is None:
|
||||
await register_repository("hacs/integration", "integration")
|
||||
repository = hacs.get_by_name("hacs/integration")
|
||||
if repository is None:
|
||||
raise HacsException("Unknown error")
|
||||
repository.data.installed = True
|
||||
repository.data.installed_version = INTEGRATION_VERSION
|
||||
repository.data.new = False
|
||||
hacs.repo = repository.repository_object
|
||||
hacs.data_repo = await get_repository(
|
||||
hacs.session, hacs.configuration.token, "hacs/default"
|
||||
)
|
||||
except HacsException as exception:
|
||||
if "403" in f"{exception}":
|
||||
hacs.log.critical("GitHub API is ratelimited, or the token is wrong.")
|
||||
else:
|
||||
hacs.log.critical(f"[{exception}] - Could not load HACS!")
|
||||
return False
|
||||
return True
|
|
@ -0,0 +1,25 @@
|
|||
""""Starting setup task: Sensor"."""
|
||||
from homeassistant.helpers import discovery
|
||||
|
||||
from custom_components.hacs.const import DOMAIN
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
from ...enums import HacsSetupTask
|
||||
|
||||
|
||||
async def async_add_sensor():
|
||||
"""Async wrapper for add sensor"""
|
||||
hacs = get_hacs()
|
||||
hacs.log.info("Setup task %s", HacsSetupTask.SENSOR)
|
||||
if hacs.configuration.config_type == "yaml":
|
||||
hacs.hass.async_create_task(
|
||||
discovery.async_load_platform(
|
||||
hacs.hass, "sensor", DOMAIN, {}, hacs.configuration.config
|
||||
)
|
||||
)
|
||||
else:
|
||||
hacs.hass.async_add_job(
|
||||
hacs.hass.config_entries.async_forward_entry_setup(
|
||||
hacs.configuration.config_entry, "sensor"
|
||||
)
|
||||
)
|
|
@ -0,0 +1,36 @@
|
|||
"""Register WS API endpoints for HACS."""
|
||||
from homeassistant.components import websocket_api
|
||||
|
||||
from custom_components.hacs.api.acknowledge_critical_repository import (
|
||||
acknowledge_critical_repository,
|
||||
)
|
||||
from custom_components.hacs.api.check_local_path import check_local_path
|
||||
from custom_components.hacs.api.get_critical_repositories import (
|
||||
get_critical_repositories,
|
||||
)
|
||||
from custom_components.hacs.api.hacs_config import hacs_config
|
||||
from custom_components.hacs.api.hacs_removed import hacs_removed
|
||||
from custom_components.hacs.api.hacs_repositories import hacs_repositories
|
||||
from custom_components.hacs.api.hacs_repository import hacs_repository
|
||||
from custom_components.hacs.api.hacs_repository_data import hacs_repository_data
|
||||
from custom_components.hacs.api.hacs_settings import hacs_settings
|
||||
from custom_components.hacs.api.hacs_status import hacs_status
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
from ...enums import HacsSetupTask
|
||||
|
||||
|
||||
async def async_setup_hacs_websockt_api():
|
||||
"""Set up WS API handlers."""
|
||||
hacs = get_hacs()
|
||||
hacs.log.info("Setup task %s", HacsSetupTask.WEBSOCKET)
|
||||
websocket_api.async_register_command(hacs.hass, hacs_settings)
|
||||
websocket_api.async_register_command(hacs.hass, hacs_config)
|
||||
websocket_api.async_register_command(hacs.hass, hacs_repositories)
|
||||
websocket_api.async_register_command(hacs.hass, hacs_repository)
|
||||
websocket_api.async_register_command(hacs.hass, hacs_repository_data)
|
||||
websocket_api.async_register_command(hacs.hass, check_local_path)
|
||||
websocket_api.async_register_command(hacs.hass, hacs_status)
|
||||
websocket_api.async_register_command(hacs.hass, hacs_removed)
|
||||
websocket_api.async_register_command(hacs.hass, acknowledge_critical_repository)
|
||||
websocket_api.async_register_command(hacs.hass, get_critical_repositories)
|
|
@ -0,0 +1,16 @@
|
|||
"""Initialize repositories."""
|
||||
from custom_components.hacs.repositories.appdaemon import HacsAppdaemon
|
||||
from custom_components.hacs.repositories.integration import HacsIntegration
|
||||
from custom_components.hacs.repositories.netdaemon import HacsNetdaemon
|
||||
from custom_components.hacs.repositories.plugin import HacsPlugin
|
||||
from custom_components.hacs.repositories.python_script import HacsPythonScript
|
||||
from custom_components.hacs.repositories.theme import HacsTheme
|
||||
|
||||
RERPOSITORY_CLASSES = {
|
||||
"theme": HacsTheme,
|
||||
"integration": HacsIntegration,
|
||||
"python_script": HacsPythonScript,
|
||||
"appdaemon": HacsAppdaemon,
|
||||
"netdaemon": HacsNetdaemon,
|
||||
"plugin": HacsPlugin,
|
||||
}
|
|
@ -0,0 +1,72 @@
|
|||
"""Class for appdaemon apps in HACS."""
|
||||
from aiogithubapi import AIOGitHubAPIException
|
||||
|
||||
from custom_components.hacs.enums import HacsCategory
|
||||
from custom_components.hacs.helpers.classes.exceptions import HacsException
|
||||
from custom_components.hacs.helpers.classes.repository import HacsRepository
|
||||
|
||||
|
||||
class HacsAppdaemon(HacsRepository):
|
||||
"""Appdaemon apps in HACS."""
|
||||
|
||||
def __init__(self, full_name):
|
||||
"""Initialize."""
|
||||
super().__init__()
|
||||
self.data.full_name = full_name
|
||||
self.data.full_name_lower = full_name.lower()
|
||||
self.data.category = HacsCategory.APPDAEMON
|
||||
self.content.path.local = self.localpath
|
||||
self.content.path.remote = "apps"
|
||||
|
||||
@property
|
||||
def localpath(self):
|
||||
"""Return localpath."""
|
||||
return f"{self.hacs.core.config_path}/appdaemon/apps/{self.data.name}"
|
||||
|
||||
async def validate_repository(self):
|
||||
"""Validate."""
|
||||
await self.common_validate()
|
||||
|
||||
# Custom step 1: Validate content.
|
||||
try:
|
||||
addir = await self.repository_object.get_contents("apps", self.ref)
|
||||
except AIOGitHubAPIException:
|
||||
raise HacsException(
|
||||
f"Repostitory structure for {self.ref.replace('tags/','')} is not compliant"
|
||||
) from None
|
||||
|
||||
if not isinstance(addir, list):
|
||||
self.validate.errors.append("Repostitory structure not compliant")
|
||||
|
||||
self.content.path.remote = addir[0].path
|
||||
self.content.objects = await self.repository_object.get_contents(
|
||||
self.content.path.remote, self.ref
|
||||
)
|
||||
|
||||
# Handle potential errors
|
||||
if self.validate.errors:
|
||||
for error in self.validate.errors:
|
||||
if not self.hacs.status.startup:
|
||||
self.logger.error("%s %s", self, error)
|
||||
return self.validate.success
|
||||
|
||||
async def update_repository(self, ignore_issues=False):
|
||||
"""Update."""
|
||||
await self.common_update(ignore_issues)
|
||||
|
||||
# Get appdaemon objects.
|
||||
if self.repository_manifest:
|
||||
if self.data.content_in_root:
|
||||
self.content.path.remote = ""
|
||||
|
||||
if self.content.path.remote == "apps":
|
||||
addir = await self.repository_object.get_contents(
|
||||
self.content.path.remote, self.ref
|
||||
)
|
||||
self.content.path.remote = addir[0].path
|
||||
self.content.objects = await self.repository_object.get_contents(
|
||||
self.content.path.remote, self.ref
|
||||
)
|
||||
|
||||
# Set local path
|
||||
self.content.path.local = self.localpath
|
|
@ -0,0 +1,97 @@
|
|||
"""Class for integrations in HACS."""
|
||||
from homeassistant.loader import async_get_custom_components
|
||||
|
||||
from custom_components.hacs.enums import HacsCategory
|
||||
from custom_components.hacs.helpers.classes.exceptions import HacsException
|
||||
from custom_components.hacs.helpers.classes.repository import HacsRepository
|
||||
from custom_components.hacs.helpers.functions.filters import (
|
||||
get_first_directory_in_directory,
|
||||
)
|
||||
from custom_components.hacs.helpers.functions.information import (
|
||||
get_integration_manifest,
|
||||
)
|
||||
from custom_components.hacs.helpers.functions.logger import getLogger
|
||||
|
||||
|
||||
class HacsIntegration(HacsRepository):
|
||||
"""Integrations in HACS."""
|
||||
|
||||
def __init__(self, full_name):
|
||||
"""Initialize."""
|
||||
super().__init__()
|
||||
self.data.full_name = full_name
|
||||
self.data.full_name_lower = full_name.lower()
|
||||
self.data.category = HacsCategory.INTEGRATION
|
||||
self.content.path.remote = "custom_components"
|
||||
self.content.path.local = self.localpath
|
||||
|
||||
@property
|
||||
def localpath(self):
|
||||
"""Return localpath."""
|
||||
return f"{self.hacs.core.config_path}/custom_components/{self.data.domain}"
|
||||
|
||||
async def async_post_installation(self):
|
||||
"""Run post installation steps."""
|
||||
if self.data.config_flow:
|
||||
if self.data.full_name != "hacs/integration":
|
||||
await self.reload_custom_components()
|
||||
if self.data.first_install:
|
||||
self.pending_restart = False
|
||||
return
|
||||
self.pending_restart = True
|
||||
|
||||
async def validate_repository(self):
|
||||
"""Validate."""
|
||||
await self.common_validate()
|
||||
|
||||
# Custom step 1: Validate content.
|
||||
if self.data.content_in_root:
|
||||
self.content.path.remote = ""
|
||||
|
||||
if self.content.path.remote == "custom_components":
|
||||
name = get_first_directory_in_directory(self.tree, "custom_components")
|
||||
if name is None:
|
||||
raise HacsException(
|
||||
f"Repostitory structure for {self.ref.replace('tags/','')} is not compliant"
|
||||
)
|
||||
self.content.path.remote = f"custom_components/{name}"
|
||||
|
||||
try:
|
||||
await get_integration_manifest(self)
|
||||
except HacsException as exception:
|
||||
if self.hacs.system.action:
|
||||
raise HacsException(f"::error:: {exception}") from exception
|
||||
self.logger.error("%s %s", self, exception)
|
||||
|
||||
# Handle potential errors
|
||||
if self.validate.errors:
|
||||
for error in self.validate.errors:
|
||||
if not self.hacs.status.startup:
|
||||
self.logger.error("%s %s", self, error)
|
||||
return self.validate.success
|
||||
|
||||
async def update_repository(self, ignore_issues=False):
|
||||
"""Update."""
|
||||
await self.common_update(ignore_issues)
|
||||
|
||||
if self.data.content_in_root:
|
||||
self.content.path.remote = ""
|
||||
|
||||
if self.content.path.remote == "custom_components":
|
||||
name = get_first_directory_in_directory(self.tree, "custom_components")
|
||||
self.content.path.remote = f"custom_components/{name}"
|
||||
|
||||
try:
|
||||
await get_integration_manifest(self)
|
||||
except HacsException as exception:
|
||||
self.logger.error("%s %s", self, exception)
|
||||
|
||||
# Set local path
|
||||
self.content.path.local = self.localpath
|
||||
|
||||
async def reload_custom_components(self):
|
||||
"""Reload custom_components (and config flows)in HA."""
|
||||
self.logger.info("Reloading custom_component cache")
|
||||
del self.hacs.hass.data["custom_components"]
|
||||
await async_get_custom_components(self.hacs.hass)
|
||||
self.logger.info("Custom_component cache reloaded")
|
|
@ -0,0 +1,87 @@
|
|||
"""Class for netdaemon apps in HACS."""
|
||||
from custom_components.hacs.enums import HacsCategory
|
||||
from custom_components.hacs.helpers.classes.exceptions import HacsException
|
||||
from custom_components.hacs.helpers.classes.repository import HacsRepository
|
||||
from custom_components.hacs.helpers.functions.filters import (
|
||||
get_first_directory_in_directory,
|
||||
)
|
||||
from custom_components.hacs.helpers.functions.logger import getLogger
|
||||
|
||||
|
||||
class HacsNetdaemon(HacsRepository):
|
||||
"""Netdaemon apps in HACS."""
|
||||
|
||||
def __init__(self, full_name):
|
||||
"""Initialize."""
|
||||
super().__init__()
|
||||
self.data.full_name = full_name
|
||||
self.data.full_name_lower = full_name.lower()
|
||||
self.data.category = HacsCategory.NETDAEMON
|
||||
self.content.path.local = self.localpath
|
||||
self.content.path.remote = "apps"
|
||||
|
||||
@property
|
||||
def localpath(self):
|
||||
"""Return localpath."""
|
||||
return f"{self.hacs.core.config_path}/netdaemon/apps/{self.data.name}"
|
||||
|
||||
async def validate_repository(self):
|
||||
"""Validate."""
|
||||
await self.common_validate()
|
||||
|
||||
# Custom step 1: Validate content.
|
||||
if self.repository_manifest:
|
||||
if self.data.content_in_root:
|
||||
self.content.path.remote = ""
|
||||
|
||||
if self.content.path.remote == "apps":
|
||||
self.data.domain = get_first_directory_in_directory(
|
||||
self.tree, self.content.path.remote
|
||||
)
|
||||
self.content.path.remote = f"apps/{self.data.name}"
|
||||
|
||||
compliant = False
|
||||
for treefile in self.treefiles:
|
||||
if treefile.startswith(f"{self.content.path.remote}") and treefile.endswith(
|
||||
".cs"
|
||||
):
|
||||
compliant = True
|
||||
break
|
||||
if not compliant:
|
||||
raise HacsException(
|
||||
f"Repostitory structure for {self.ref.replace('tags/','')} is not compliant"
|
||||
)
|
||||
|
||||
# Handle potential errors
|
||||
if self.validate.errors:
|
||||
for error in self.validate.errors:
|
||||
if not self.hacs.status.startup:
|
||||
self.logger.error("%s %s", self, error)
|
||||
return self.validate.success
|
||||
|
||||
async def update_repository(self, ignore_issues=False):
|
||||
"""Update."""
|
||||
await self.common_update(ignore_issues)
|
||||
|
||||
# Get appdaemon objects.
|
||||
if self.repository_manifest:
|
||||
if self.data.content_in_root:
|
||||
self.content.path.remote = ""
|
||||
|
||||
if self.content.path.remote == "apps":
|
||||
self.data.domain = get_first_directory_in_directory(
|
||||
self.tree, self.content.path.remote
|
||||
)
|
||||
self.content.path.remote = f"apps/{self.data.name}"
|
||||
|
||||
# Set local path
|
||||
self.content.path.local = self.localpath
|
||||
|
||||
async def async_post_installation(self):
|
||||
"""Run post installation steps."""
|
||||
try:
|
||||
await self.hacs.hass.services.async_call(
|
||||
"hassio", "addon_restart", {"addon": "c6a2317c_netdaemon"}
|
||||
)
|
||||
except (Exception, BaseException): # pylint: disable=broad-except
|
||||
pass
|
|
@ -0,0 +1,77 @@
|
|||
"""Class for plugins in HACS."""
|
||||
import json
|
||||
|
||||
from custom_components.hacs.helpers.classes.exceptions import HacsException
|
||||
from custom_components.hacs.helpers.classes.repository import HacsRepository
|
||||
from custom_components.hacs.helpers.functions.information import find_file_name
|
||||
from custom_components.hacs.helpers.functions.logger import getLogger
|
||||
|
||||
|
||||
class HacsPlugin(HacsRepository):
|
||||
"""Plugins in HACS."""
|
||||
|
||||
def __init__(self, full_name):
|
||||
"""Initialize."""
|
||||
super().__init__()
|
||||
self.data.full_name = full_name
|
||||
self.data.full_name_lower = full_name.lower()
|
||||
self.data.file_name = None
|
||||
self.data.category = "plugin"
|
||||
self.information.javascript_type = None
|
||||
self.content.path.local = self.localpath
|
||||
|
||||
@property
|
||||
def localpath(self):
|
||||
"""Return localpath."""
|
||||
return f"{self.hacs.core.config_path}/www/community/{self.data.full_name.split('/')[-1]}"
|
||||
|
||||
async def validate_repository(self):
|
||||
"""Validate."""
|
||||
# Run common validation steps.
|
||||
await self.common_validate()
|
||||
|
||||
# Custom step 1: Validate content.
|
||||
find_file_name(self)
|
||||
|
||||
if self.content.path.remote is None:
|
||||
raise HacsException(
|
||||
f"Repostitory structure for {self.ref.replace('tags/','')} is not compliant"
|
||||
)
|
||||
|
||||
if self.content.path.remote == "release":
|
||||
self.content.single = True
|
||||
|
||||
# Handle potential errors
|
||||
if self.validate.errors:
|
||||
for error in self.validate.errors:
|
||||
if not self.hacs.status.startup:
|
||||
self.logger.error("%s %s", self, error)
|
||||
return self.validate.success
|
||||
|
||||
async def update_repository(self, ignore_issues=False):
|
||||
"""Update."""
|
||||
await self.common_update(ignore_issues)
|
||||
|
||||
# Get plugin objects.
|
||||
find_file_name(self)
|
||||
|
||||
if self.content.path.remote is None:
|
||||
self.validate.errors.append(
|
||||
f"Repostitory structure for {self.ref.replace('tags/','')} is not compliant"
|
||||
)
|
||||
|
||||
if self.content.path.remote == "release":
|
||||
self.content.single = True
|
||||
|
||||
async def get_package_content(self):
|
||||
"""Get package content."""
|
||||
try:
|
||||
package = await self.repository_object.get_contents(
|
||||
"package.json", self.ref
|
||||
)
|
||||
package = json.loads(package.content)
|
||||
|
||||
if package:
|
||||
self.data.authors = package["author"]
|
||||
except (Exception, BaseException): # pylint: disable=broad-except
|
||||
pass
|
|
@ -0,0 +1,83 @@
|
|||
"""Class for python_scripts in HACS."""
|
||||
from custom_components.hacs.enums import HacsCategory
|
||||
from custom_components.hacs.helpers.classes.exceptions import HacsException
|
||||
from custom_components.hacs.helpers.classes.repository import HacsRepository
|
||||
from custom_components.hacs.helpers.functions.information import find_file_name
|
||||
from custom_components.hacs.helpers.functions.logger import getLogger
|
||||
|
||||
|
||||
class HacsPythonScript(HacsRepository):
|
||||
"""python_scripts in HACS."""
|
||||
|
||||
category = "python_script"
|
||||
|
||||
def __init__(self, full_name):
|
||||
"""Initialize."""
|
||||
super().__init__()
|
||||
self.data.full_name = full_name
|
||||
self.data.full_name_lower = full_name.lower()
|
||||
self.data.category = HacsCategory.PYTHON_SCRIPT
|
||||
self.content.path.remote = "python_scripts"
|
||||
self.content.path.local = self.localpath
|
||||
self.content.single = True
|
||||
|
||||
@property
|
||||
def localpath(self):
|
||||
"""Return localpath."""
|
||||
return f"{self.hacs.core.config_path}/python_scripts"
|
||||
|
||||
async def validate_repository(self):
|
||||
"""Validate."""
|
||||
# Run common validation steps.
|
||||
await self.common_validate()
|
||||
|
||||
# Custom step 1: Validate content.
|
||||
if self.data.content_in_root:
|
||||
self.content.path.remote = ""
|
||||
|
||||
compliant = False
|
||||
for treefile in self.treefiles:
|
||||
if treefile.startswith(f"{self.content.path.remote}") and treefile.endswith(
|
||||
".py"
|
||||
):
|
||||
compliant = True
|
||||
break
|
||||
if not compliant:
|
||||
raise HacsException(
|
||||
f"Repository structure for {self.ref.replace('tags/','')} is not compliant"
|
||||
)
|
||||
|
||||
# Handle potential errors
|
||||
if self.validate.errors:
|
||||
for error in self.validate.errors:
|
||||
if not self.hacs.status.startup:
|
||||
self.logger.error("%s %s", self, error)
|
||||
return self.validate.success
|
||||
|
||||
async def async_post_registration(self):
|
||||
"""Registration."""
|
||||
# Set name
|
||||
find_file_name(self)
|
||||
|
||||
async def update_repository(self, ignore_issues=False):
|
||||
"""Update."""
|
||||
await self.common_update(ignore_issues)
|
||||
|
||||
# Get python_script objects.
|
||||
if self.data.content_in_root:
|
||||
self.content.path.remote = ""
|
||||
|
||||
compliant = False
|
||||
for treefile in self.treefiles:
|
||||
if treefile.startswith(f"{self.content.path.remote}") and treefile.endswith(
|
||||
".py"
|
||||
):
|
||||
compliant = True
|
||||
break
|
||||
if not compliant:
|
||||
raise HacsException(
|
||||
f"Repository structure for {self.ref.replace('tags/','')} is not compliant"
|
||||
)
|
||||
|
||||
# Update name
|
||||
find_file_name(self)
|
|
@ -0,0 +1,76 @@
|
|||
"""Class for themes in HACS."""
|
||||
from custom_components.hacs.enums import HacsCategory
|
||||
from custom_components.hacs.helpers.classes.exceptions import HacsException
|
||||
from custom_components.hacs.helpers.classes.repository import HacsRepository
|
||||
from custom_components.hacs.helpers.functions.information import find_file_name
|
||||
from custom_components.hacs.helpers.functions.logger import getLogger
|
||||
|
||||
|
||||
class HacsTheme(HacsRepository):
|
||||
"""Themes in HACS."""
|
||||
|
||||
def __init__(self, full_name):
|
||||
"""Initialize."""
|
||||
super().__init__()
|
||||
self.data.full_name = full_name
|
||||
self.data.full_name_lower = full_name.lower()
|
||||
self.data.category = HacsCategory.THEME
|
||||
self.content.path.remote = "themes"
|
||||
self.content.path.local = self.localpath
|
||||
self.content.single = False
|
||||
|
||||
@property
|
||||
def localpath(self):
|
||||
"""Return localpath."""
|
||||
return f"{self.hacs.core.config_path}/themes/{self.data.file_name.replace('.yaml', '')}"
|
||||
|
||||
async def async_post_installation(self):
|
||||
"""Run post installation steps."""
|
||||
try:
|
||||
await self.hacs.hass.services.async_call("frontend", "reload_themes", {})
|
||||
except (Exception, BaseException): # pylint: disable=broad-except
|
||||
pass
|
||||
|
||||
async def validate_repository(self):
|
||||
"""Validate."""
|
||||
# Run common validation steps.
|
||||
await self.common_validate()
|
||||
|
||||
# Custom step 1: Validate content.
|
||||
compliant = False
|
||||
for treefile in self.treefiles:
|
||||
if treefile.startswith("themes/") and treefile.endswith(".yaml"):
|
||||
compliant = True
|
||||
break
|
||||
if not compliant:
|
||||
raise HacsException(
|
||||
f"Repostitory structure for {self.ref.replace('tags/','')} is not compliant"
|
||||
)
|
||||
|
||||
if self.data.content_in_root:
|
||||
self.content.path.remote = ""
|
||||
|
||||
# Handle potential errors
|
||||
if self.validate.errors:
|
||||
for error in self.validate.errors:
|
||||
if not self.hacs.status.startup:
|
||||
self.logger.error("%s %s", self, error)
|
||||
return self.validate.success
|
||||
|
||||
async def async_post_registration(self):
|
||||
"""Registration."""
|
||||
# Set name
|
||||
find_file_name(self)
|
||||
self.content.path.local = self.localpath
|
||||
|
||||
async def update_repository(self, ignore_issues=False):
|
||||
"""Update."""
|
||||
await self.common_update(ignore_issues)
|
||||
|
||||
# Get theme objects.
|
||||
if self.data.content_in_root:
|
||||
self.content.path.remote = ""
|
||||
|
||||
# Update name
|
||||
find_file_name(self)
|
||||
self.content.path.local = self.localpath
|
|
@ -0,0 +1,123 @@
|
|||
"""Sensor platform for HACS."""
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from custom_components.hacs.const import DOMAIN, INTEGRATION_VERSION, NAME_SHORT
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
_hass, _config, async_add_entities, _discovery_info=None
|
||||
):
|
||||
"""Setup sensor platform."""
|
||||
async_add_entities([HACSSensor()])
|
||||
|
||||
|
||||
async def async_setup_entry(_hass, _config_entry, async_add_devices):
|
||||
"""Setup sensor platform."""
|
||||
async_add_devices([HACSSensor()])
|
||||
|
||||
|
||||
class HACSDevice(Entity):
|
||||
"""HACS Device class."""
|
||||
|
||||
@property
|
||||
def device_info(self):
|
||||
"""Return device information about HACS."""
|
||||
return {
|
||||
"identifiers": {(DOMAIN, self.unique_id)},
|
||||
"name": NAME_SHORT,
|
||||
"manufacturer": "hacs.xyz",
|
||||
"model": "",
|
||||
"sw_version": INTEGRATION_VERSION,
|
||||
"entry_type": "service",
|
||||
}
|
||||
|
||||
|
||||
class HACSSensor(HACSDevice):
|
||||
"""HACS Sensor class."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize."""
|
||||
self._state = None
|
||||
self.repositories = []
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
"""No polling needed."""
|
||||
return False
|
||||
|
||||
async def async_update(self):
|
||||
"""Manual updates of the sensor."""
|
||||
self._update()
|
||||
|
||||
@callback
|
||||
def _update_and_write_state(self, *_):
|
||||
"""Update the sensor and write state."""
|
||||
self._update()
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def _update(self):
|
||||
"""Update the sensor."""
|
||||
hacs = get_hacs()
|
||||
if hacs.status.background_task:
|
||||
return
|
||||
|
||||
self.repositories = []
|
||||
|
||||
for repository in hacs.repositories:
|
||||
if (
|
||||
repository.pending_upgrade
|
||||
and repository.data.category in hacs.common.categories
|
||||
):
|
||||
self.repositories.append(repository)
|
||||
self._state = len(self.repositories)
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
"""Return a unique ID to use for this sensor."""
|
||||
return (
|
||||
"0717a0cd-745c-48fd-9b16-c8534c9704f9-bc944b0f-fd42-4a58-a072-ade38d1444cd"
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the sensor."""
|
||||
return "hacs"
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
"""Return the state of the sensor."""
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
"""Return the icon of the sensor."""
|
||||
return "hacs:hacs"
|
||||
|
||||
@property
|
||||
def unit_of_measurement(self):
|
||||
"""Return the unit of measurement."""
|
||||
return "pending update(s)"
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return attributes for the sensor."""
|
||||
repositories = []
|
||||
for repository in self.repositories:
|
||||
repositories.append(
|
||||
{
|
||||
"name": repository.data.full_name,
|
||||
"display_name": repository.display_name,
|
||||
"installed_version": repository.display_installed_version,
|
||||
"available_version": repository.display_available_version,
|
||||
}
|
||||
)
|
||||
return {"repositories": repositories}
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register for status events."""
|
||||
self.async_on_remove(
|
||||
self.hass.bus.async_listen("hacs/status", self._update_and_write_state)
|
||||
)
|
|
@ -0,0 +1,68 @@
|
|||
"""Shared HACS elements."""
|
||||
import os
|
||||
|
||||
from .base import HacsBase
|
||||
|
||||
SHARE = {
|
||||
"hacs": None,
|
||||
"factory": None,
|
||||
"queue": None,
|
||||
"removed_repositories": [],
|
||||
"rules": {},
|
||||
}
|
||||
|
||||
|
||||
def get_hacs() -> HacsBase:
|
||||
if SHARE["hacs"] is None:
|
||||
from custom_components.hacs.hacsbase.hacs import Hacs as Legacy
|
||||
|
||||
_hacs = Legacy()
|
||||
|
||||
if not "PYTEST" in os.environ and "GITHUB_ACTION" in os.environ:
|
||||
_hacs.system.action = True
|
||||
|
||||
SHARE["hacs"] = _hacs
|
||||
|
||||
return SHARE["hacs"]
|
||||
|
||||
|
||||
def get_factory():
|
||||
if SHARE["factory"] is None:
|
||||
from custom_components.hacs.operational.factory import HacsTaskFactory
|
||||
|
||||
SHARE["factory"] = HacsTaskFactory()
|
||||
|
||||
return SHARE["factory"]
|
||||
|
||||
|
||||
def get_queue():
|
||||
if SHARE["queue"] is None:
|
||||
from queueman import QueueManager
|
||||
|
||||
SHARE["queue"] = QueueManager()
|
||||
|
||||
return SHARE["queue"]
|
||||
|
||||
|
||||
def is_removed(repository):
|
||||
return repository in [x.repository for x in SHARE["removed_repositories"]]
|
||||
|
||||
|
||||
def get_removed(repository):
|
||||
if not is_removed(repository):
|
||||
from custom_components.hacs.helpers.classes.removed import RemovedRepository
|
||||
|
||||
removed_repo = RemovedRepository()
|
||||
removed_repo.repository = repository
|
||||
SHARE["removed_repositories"].append(removed_repo)
|
||||
filter_repos = [
|
||||
x
|
||||
for x in SHARE["removed_repositories"]
|
||||
if x.repository.lower() == repository.lower()
|
||||
]
|
||||
|
||||
return filter_repos.pop() or None
|
||||
|
||||
|
||||
def list_removed_repositories():
|
||||
return SHARE["removed_repositories"]
|
|
@ -0,0 +1,37 @@
|
|||
"""Provide info to system health."""
|
||||
from aiogithubapi.common.const import BASE_API_URL
|
||||
from homeassistant.components import system_health
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .base import HacsBase
|
||||
from .const import DOMAIN
|
||||
|
||||
GITHUB_STATUS = "https://www.githubstatus.com/"
|
||||
|
||||
|
||||
@callback
|
||||
def async_register(
|
||||
hass: HomeAssistant, register: system_health.SystemHealthRegistration
|
||||
) -> None:
|
||||
"""Register system health callbacks."""
|
||||
register.domain = "Home Assistant Community Store"
|
||||
register.async_register_info(system_health_info, "/hacs")
|
||||
|
||||
|
||||
async def system_health_info(hass):
|
||||
"""Get info for the info page."""
|
||||
client: HacsBase = hass.data[DOMAIN]
|
||||
rate_limit = await client.github.get_rate_limit()
|
||||
|
||||
return {
|
||||
"GitHub API": system_health.async_check_can_reach_url(
|
||||
hass, BASE_API_URL, GITHUB_STATUS
|
||||
),
|
||||
"Github API Calls Remaining": rate_limit.get("remaining", "0"),
|
||||
"Installed Version": client.version,
|
||||
"Stage": client.stage,
|
||||
"Available Repositories": len(client.repositories),
|
||||
"Installed Repositories": len(
|
||||
[repo for repo in client.repositories if repo.data.installed]
|
||||
),
|
||||
}
|
|
@ -0,0 +1,48 @@
|
|||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"single_instance_allowed": "Only a single configuration of HACS is allowed.",
|
||||
"min_ha_version": "You need at least version {version} of Home Assistant to setup HACS.",
|
||||
"github": "Could not authenticate with GitHub, try again later."
|
||||
},
|
||||
"error": {
|
||||
"auth": "Personal Access Token is not correct",
|
||||
"acc": "You need to acknowledge all the statements before continuing"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"acc_logs": "I know how to access Home Assistant logs",
|
||||
"acc_addons": "I know that there are no add-ons in HACS",
|
||||
"acc_untested": "I know that everything inside HACS is custom and untested by Home Assistant",
|
||||
"acc_disable": "I know that if I get issues with Home Assistant I should disable all my custom_components"
|
||||
},
|
||||
"description": "Before you can setup HACS you need to acknowledge the following",
|
||||
"title": "HACS"
|
||||
},
|
||||
"device": {
|
||||
"title": "Waiting for device activation"
|
||||
}
|
||||
},
|
||||
"progress": {
|
||||
"wait_for_device": "1. Open {url} \n2.Paste the following key to authorize HACS: \n```\n{code}\n```\n"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"not_in_use": "Not in use with YAML",
|
||||
"country": "Filter with country code.",
|
||||
"experimental": "Enable experimental features",
|
||||
"release_limit": "Number of releases to show.",
|
||||
"debug": "Enable debug.",
|
||||
"appdaemon": "Enable AppDaemon apps discovery & tracking",
|
||||
"netdaemon": "Enable NetDaemon apps discovery & tracking",
|
||||
"sidepanel_icon": "Side panel icon",
|
||||
"sidepanel_title": "Side panel title"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,38 @@
|
|||
# Repository validation
|
||||
|
||||
This is where the validation rules that run against the various repository categories live.
|
||||
|
||||
## Structure
|
||||
|
||||
- All validation rules are in the directory for their category.
|
||||
- Validation rules that aplies to all categories are in the `common` directory.
|
||||
- There is one file pr. rule.
|
||||
- All rule needs tests to verify every possible outcome for the rule.
|
||||
- It's better with multiple files than a big rule.
|
||||
- All rules uses `ValidationBase` or `ActionValidationBase` as the base class.
|
||||
- The `ActionValidationBase` are for checks that will breaks compatibility with with existing repositories (default), so these are only run in github actions.
|
||||
- The class name should describe what the check does.
|
||||
- Only use `validate` or `async_validate` methods to define validation rules.
|
||||
- If a rule should fail, raise `ValidationException` with the failure message.
|
||||
|
||||
|
||||
## Example
|
||||
|
||||
```python
|
||||
from custom_components.hacs.validate.base import (
|
||||
ActionValidationBase,
|
||||
ValidationBase,
|
||||
ValidationException,
|
||||
)
|
||||
|
||||
|
||||
class AwesomeRepository(ValidationBase):
|
||||
def validate(self):
|
||||
if self.repository != "awesome":
|
||||
raise ValidationException("The repository is not awesome")
|
||||
|
||||
class SuperAwesomeRepository(ActionValidationBase, category="integration"):
|
||||
async def async_validate(self):
|
||||
if self.repository != "super-awesome":
|
||||
raise ValidationException("The repository is not super-awesome")
|
||||
```
|
|
@ -0,0 +1,51 @@
|
|||
import asyncio
|
||||
import glob
|
||||
import importlib
|
||||
from os.path import dirname, join, sep
|
||||
|
||||
from custom_components.hacs.share import SHARE, get_hacs
|
||||
|
||||
|
||||
def _initialize_rules():
|
||||
rules = glob.glob(join(dirname(__file__), "**/*.py"))
|
||||
for rule in rules:
|
||||
rule = rule.replace(sep, "/")
|
||||
rule = rule.split("custom_components/hacs")[-1]
|
||||
rule = f"custom_components/hacs{rule}".replace("/", ".")[:-3]
|
||||
importlib.import_module(rule)
|
||||
|
||||
|
||||
async def async_initialize_rules():
|
||||
hass = get_hacs().hass
|
||||
await hass.async_add_executor_job(_initialize_rules)
|
||||
|
||||
|
||||
async def async_run_repository_checks(repository):
|
||||
hacs = get_hacs()
|
||||
if not SHARE["rules"]:
|
||||
await async_initialize_rules()
|
||||
if not hacs.system.running:
|
||||
return
|
||||
checks = []
|
||||
for check in SHARE["rules"].get("common", []):
|
||||
checks.append(check(repository))
|
||||
for check in SHARE["rules"].get(repository.data.category, []):
|
||||
checks.append(check(repository))
|
||||
|
||||
await asyncio.gather(
|
||||
*[
|
||||
check._async_run_check()
|
||||
for check in checks or []
|
||||
if hacs.system.action or not check.action_only
|
||||
]
|
||||
)
|
||||
|
||||
total = len([x for x in checks if hacs.system.action or not x.action_only])
|
||||
failed = len([x for x in checks if x.failed])
|
||||
|
||||
if failed != 0:
|
||||
repository.logger.error("%s %s/%s checks failed", repository, failed, total)
|
||||
if hacs.system.action:
|
||||
exit(1)
|
||||
else:
|
||||
repository.logger.debug("%s All (%s) checks passed", repository, total)
|
|
@ -0,0 +1,48 @@
|
|||
from custom_components.hacs.share import SHARE, get_hacs
|
||||
|
||||
|
||||
class ValidationException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class ValidationBase:
|
||||
def __init__(self, repository) -> None:
|
||||
self.repository = repository
|
||||
self.hacs = get_hacs()
|
||||
self.failed = False
|
||||
self.logger = repository.logger
|
||||
|
||||
def __init_subclass__(cls, category="common", **kwargs) -> None:
|
||||
"""Initialize a subclass, register if possible."""
|
||||
super().__init_subclass__(**kwargs)
|
||||
if SHARE["rules"].get(category) is None:
|
||||
SHARE["rules"][category] = []
|
||||
if cls not in SHARE["rules"][category]:
|
||||
SHARE["rules"][category].append(cls)
|
||||
|
||||
@property
|
||||
def action_only(self):
|
||||
return False
|
||||
|
||||
async def _async_run_check(self):
|
||||
"""DO NOT OVERRIDE THIS IN SUBCLASSES!"""
|
||||
if self.hacs.system.action:
|
||||
self.logger.info(f"Running check '{self.__class__.__name__}'")
|
||||
try:
|
||||
await self.hacs.hass.async_add_executor_job(self.check)
|
||||
await self.async_check()
|
||||
except ValidationException as exception:
|
||||
self.failed = True
|
||||
self.logger.error(exception)
|
||||
|
||||
def check(self):
|
||||
pass
|
||||
|
||||
async def async_check(self):
|
||||
pass
|
||||
|
||||
|
||||
class ActionValidationBase(ValidationBase):
|
||||
@property
|
||||
def action_only(self):
|
||||
return True
|
|
@ -0,0 +1,10 @@
|
|||
from custom_components.hacs.validate.base import (
|
||||
ActionValidationBase,
|
||||
ValidationException,
|
||||
)
|
||||
|
||||
|
||||
class HacsManifest(ActionValidationBase):
|
||||
def check(self):
|
||||
if "hacs.json" not in [x.filename for x in self.repository.tree]:
|
||||
raise ValidationException("The repository has no 'hacs.json' file")
|
|
@ -0,0 +1,10 @@
|
|||
from custom_components.hacs.validate.base import (
|
||||
ActionValidationBase,
|
||||
ValidationException,
|
||||
)
|
||||
|
||||
|
||||
class RepositoryDescription(ActionValidationBase):
|
||||
def check(self):
|
||||
if not self.repository.data.description:
|
||||
raise ValidationException("The repository has no description")
|
|
@ -0,0 +1,19 @@
|
|||
from custom_components.hacs.validate.base import (
|
||||
ActionValidationBase,
|
||||
ValidationException,
|
||||
)
|
||||
|
||||
|
||||
class RepositoryInformationFile(ActionValidationBase):
|
||||
async def async_check(self):
|
||||
filenames = [x.filename.lower() for x in self.repository.tree]
|
||||
if self.repository.data.render_readme and "readme" in filenames:
|
||||
pass
|
||||
elif self.repository.data.render_readme and "readme.md" in filenames:
|
||||
pass
|
||||
elif "info" in filenames:
|
||||
pass
|
||||
elif "info.md" in filenames:
|
||||
pass
|
||||
else:
|
||||
raise ValidationException("The repository has no information file")
|
|
@ -0,0 +1,10 @@
|
|||
from custom_components.hacs.validate.base import (
|
||||
ActionValidationBase,
|
||||
ValidationException,
|
||||
)
|
||||
|
||||
|
||||
class RepositoryTopics(ActionValidationBase):
|
||||
def check(self):
|
||||
if not self.repository.data.topics:
|
||||
raise ValidationException("The repository has no topics")
|
|
@ -0,0 +1,10 @@
|
|||
from custom_components.hacs.validate.base import (
|
||||
ActionValidationBase,
|
||||
ValidationException,
|
||||
)
|
||||
|
||||
|
||||
class IntegrationManifest(ActionValidationBase, category="integration"):
|
||||
def check(self):
|
||||
if "manifest.json" not in [x.filename for x in self.repository.tree]:
|
||||
raise ValidationException("The repository has no 'hacs.json' file")
|
|
@ -0,0 +1 @@
|
|||
"""Initialize HACS Web responses"""
|
|
@ -0,0 +1,26 @@
|
|||
from aiohttp import web
|
||||
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
|
||||
class HacsFrontendDev(HomeAssistantView):
|
||||
"""Dev View Class for HACS."""
|
||||
|
||||
requires_auth = False
|
||||
name = "hacs_files:frontend"
|
||||
url = r"/hacsfiles/frontend/{requested_file:.+}"
|
||||
|
||||
async def get(self, request, requested_file): # pylint: disable=unused-argument
|
||||
"""Handle HACS Web requests."""
|
||||
hacs = get_hacs()
|
||||
requested = requested_file.split("/")[-1]
|
||||
request = await hacs.session.get(
|
||||
f"{hacs.configuration.frontend_repo_url}/{requested}"
|
||||
)
|
||||
if request.status == 200:
|
||||
result = await request.read()
|
||||
response = web.Response(body=result)
|
||||
response.headers["Content-Type"] = "application/javascript"
|
||||
|
||||
return response
|
Loading…
Reference in New Issue