.
This commit is contained in:
@@ -5,13 +5,17 @@ import hashlib
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
import shutil
|
||||
import tempfile
|
||||
import zipfile
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from urllib.parse import parse_qsl, urlencode, urlsplit, urlunsplit
|
||||
from urllib.parse import parse_qsl, urlencode, urlsplit, urlunsplit, urlparse
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.components import persistent_notification
|
||||
from homeassistant.util import yaml as ha_yaml
|
||||
|
||||
from .storage import BCSStorage, CustomRepo
|
||||
@@ -27,6 +31,10 @@ class BCSError(Exception):
|
||||
"""BCS core error."""
|
||||
|
||||
|
||||
class BCSInstallError(BCSError):
|
||||
"""BCS installation/update error."""
|
||||
|
||||
|
||||
@dataclass
|
||||
class BCSConfig:
|
||||
store_url: str
|
||||
@@ -75,9 +83,13 @@ class BCSCore:
|
||||
self.last_index_hash: str | None = None
|
||||
self.last_index_loaded_at: float | None = None
|
||||
|
||||
self._install_lock = asyncio.Lock()
|
||||
self._installed_cache: dict[str, Any] = {}
|
||||
|
||||
async def async_initialize(self) -> None:
|
||||
"""Async initialization that avoids blocking file IO."""
|
||||
self.version = await self._read_manifest_version_async()
|
||||
await self._refresh_installed_cache()
|
||||
|
||||
async def _read_manifest_version_async(self) -> str:
|
||||
def _read() -> str:
|
||||
@@ -293,24 +305,43 @@ class BCSCore:
|
||||
|
||||
def list_repos_public(self) -> list[dict[str, Any]]:
|
||||
out: list[dict[str, Any]] = []
|
||||
|
||||
installed_map: dict[str, Any] = getattr(self, '_installed_cache', {}) or {}
|
||||
if not isinstance(installed_map, dict):
|
||||
installed_map = {}
|
||||
|
||||
for r in self.repos.values():
|
||||
inst = installed_map.get(r.id)
|
||||
installed = bool(inst)
|
||||
installed_domains: list[str] = []
|
||||
installed_version: str | None = None
|
||||
if isinstance(inst, dict):
|
||||
d = inst.get('domains') or []
|
||||
if isinstance(d, list):
|
||||
installed_domains = [str(x) for x in d if str(x).strip()]
|
||||
v = inst.get('installed_version')
|
||||
installed_version = str(v) if v is not None else None
|
||||
|
||||
out.append(
|
||||
{
|
||||
"id": r.id,
|
||||
"name": r.name,
|
||||
"url": r.url,
|
||||
"source": r.source,
|
||||
"owner": r.owner,
|
||||
"provider": r.provider,
|
||||
"repo_name": r.provider_repo_name,
|
||||
"description": r.provider_description or r.meta_description,
|
||||
"default_branch": r.default_branch,
|
||||
"latest_version": r.latest_version,
|
||||
"latest_version_source": r.latest_version_source,
|
||||
"category": r.meta_category,
|
||||
"meta_author": r.meta_author,
|
||||
"meta_maintainer": r.meta_maintainer,
|
||||
"meta_source": r.meta_source,
|
||||
'id': r.id,
|
||||
'name': r.name,
|
||||
'url': r.url,
|
||||
'source': r.source,
|
||||
'owner': r.owner,
|
||||
'provider': r.provider,
|
||||
'repo_name': r.provider_repo_name,
|
||||
'description': r.provider_description or r.meta_description,
|
||||
'default_branch': r.default_branch,
|
||||
'latest_version': r.latest_version,
|
||||
'latest_version_source': r.latest_version_source,
|
||||
'category': r.meta_category,
|
||||
'meta_author': r.meta_author,
|
||||
'meta_maintainer': r.meta_maintainer,
|
||||
'meta_source': r.meta_source,
|
||||
'installed': installed,
|
||||
'installed_version': installed_version,
|
||||
'installed_domains': installed_domains,
|
||||
}
|
||||
)
|
||||
return out
|
||||
@@ -326,3 +357,205 @@ class BCSCore:
|
||||
provider=repo.provider,
|
||||
default_branch=repo.default_branch,
|
||||
)
|
||||
|
||||
def _pick_ref_for_install(self, repo: RepoItem) -> str:
|
||||
# Prefer latest_version (release/tag/atom-derived), fallback to default branch, then main.
|
||||
if repo.latest_version and str(repo.latest_version).strip():
|
||||
return str(repo.latest_version).strip()
|
||||
if repo.default_branch and str(repo.default_branch).strip():
|
||||
return str(repo.default_branch).strip()
|
||||
return "main"
|
||||
|
||||
def _build_zip_url(self, repo_url: str, ref: str) -> str:
|
||||
"""Build a public ZIP download URL (provider-neutral, no tokens).
|
||||
|
||||
Supports:
|
||||
- GitHub: codeload
|
||||
- GitLab: /-/archive/
|
||||
- Gitea (incl. Bahmcloud): /archive/<ref>.zip
|
||||
"""
|
||||
ref = (ref or "").strip()
|
||||
if not ref:
|
||||
raise BCSInstallError("Missing ref for ZIP download")
|
||||
|
||||
u = urlparse(repo_url.rstrip("/"))
|
||||
host = (u.netloc or "").lower()
|
||||
parts = [p for p in u.path.strip("/").split("/") if p]
|
||||
if len(parts) < 2:
|
||||
raise BCSInstallError("Invalid repository URL (missing owner/repo)")
|
||||
|
||||
owner = parts[0]
|
||||
repo = parts[1]
|
||||
if repo.endswith(".git"):
|
||||
repo = repo[:-4]
|
||||
|
||||
if "github.com" in host:
|
||||
return f"https://codeload.github.com/{owner}/{repo}/zip/{ref}"
|
||||
|
||||
if "gitlab" in host:
|
||||
base = f"{u.scheme}://{u.netloc}"
|
||||
path = u.path.strip("/")
|
||||
if path.endswith(".git"):
|
||||
path = path[:-4]
|
||||
return f"{base}/{path}/-/archive/{ref}/{repo}-{ref}.zip"
|
||||
|
||||
base = f"{u.scheme}://{u.netloc}"
|
||||
path = u.path.strip("/")
|
||||
if path.endswith(".git"):
|
||||
path = path[:-4]
|
||||
return f"{base}/{path}/archive/{ref}.zip"
|
||||
|
||||
async def _download_zip(self, url: str, dest: Path) -> None:
|
||||
session = async_get_clientsession(self.hass)
|
||||
headers = {
|
||||
"User-Agent": "BahmcloudStore (Home Assistant)",
|
||||
"Cache-Control": "no-cache, no-store, max-age=0",
|
||||
"Pragma": "no-cache",
|
||||
}
|
||||
|
||||
async with session.get(url, timeout=120, headers=headers) as resp:
|
||||
if resp.status != 200:
|
||||
raise BCSInstallError(f"zip_url returned {resp.status}")
|
||||
data = await resp.read()
|
||||
|
||||
await self.hass.async_add_executor_job(dest.write_bytes, data)
|
||||
|
||||
async def _extract_zip(self, zip_path: Path, extract_dir: Path) -> None:
|
||||
def _extract() -> None:
|
||||
with zipfile.ZipFile(zip_path, "r") as zf:
|
||||
zf.extractall(extract_dir)
|
||||
|
||||
await self.hass.async_add_executor_job(_extract)
|
||||
|
||||
@staticmethod
|
||||
def _find_custom_components_root(extract_root: Path) -> Path | None:
|
||||
direct = extract_root / "custom_components"
|
||||
if direct.exists() and direct.is_dir():
|
||||
return direct
|
||||
|
||||
for child in extract_root.iterdir():
|
||||
candidate = child / "custom_components"
|
||||
if candidate.exists() and candidate.is_dir():
|
||||
return candidate
|
||||
return None
|
||||
|
||||
async def _copy_domain_dir(self, src_domain_dir: Path, domain: str) -> None:
|
||||
dest_root = Path(self.hass.config.path("custom_components"))
|
||||
target = dest_root / domain
|
||||
tmp_target = dest_root / f".bcs_tmp_{domain}_{int(time.time())}"
|
||||
|
||||
def _copy() -> None:
|
||||
if tmp_target.exists():
|
||||
shutil.rmtree(tmp_target, ignore_errors=True)
|
||||
|
||||
shutil.copytree(src_domain_dir, tmp_target, dirs_exist_ok=True)
|
||||
|
||||
if target.exists():
|
||||
shutil.rmtree(target, ignore_errors=True)
|
||||
|
||||
tmp_target.rename(target)
|
||||
|
||||
await self.hass.async_add_executor_job(_copy)
|
||||
|
||||
async def _read_installed_version(self, domain: str) -> str | None:
|
||||
def _read() -> str | None:
|
||||
try:
|
||||
p = Path(self.hass.config.path("custom_components", domain, "manifest.json"))
|
||||
if not p.exists():
|
||||
return None
|
||||
data = json.loads(p.read_text(encoding="utf-8"))
|
||||
v = data.get("version")
|
||||
return str(v) if v else None
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
return await self.hass.async_add_executor_job(_read)
|
||||
|
||||
async def _refresh_installed_cache(self) -> None:
|
||||
try:
|
||||
items = await self.storage.list_installed_repos()
|
||||
cache: dict[str, Any] = {}
|
||||
for it in items:
|
||||
cache[it.repo_id] = {
|
||||
"domains": it.domains,
|
||||
"installed_version": it.installed_version,
|
||||
"ref": it.ref,
|
||||
"installed_at": it.installed_at,
|
||||
}
|
||||
self._installed_cache = cache
|
||||
except Exception:
|
||||
self._installed_cache = {}
|
||||
|
||||
async def install_repo(self, repo_id: str) -> dict[str, Any]:
|
||||
repo = self.get_repo(repo_id)
|
||||
if not repo:
|
||||
raise BCSInstallError(f"repo_id not found: {repo_id}")
|
||||
|
||||
async with self._install_lock:
|
||||
ref = self._pick_ref_for_install(repo)
|
||||
zip_url = self._build_zip_url(repo.url, ref)
|
||||
|
||||
_LOGGER.info("BCS install started: repo_id=%s ref=%s zip_url=%s", repo_id, ref, zip_url)
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="bcs_install_") as td:
|
||||
tmp = Path(td)
|
||||
zip_path = tmp / "repo.zip"
|
||||
extract_dir = tmp / "extract"
|
||||
extract_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
await self._download_zip(zip_url, zip_path)
|
||||
await self._extract_zip(zip_path, extract_dir)
|
||||
|
||||
cc_root = self._find_custom_components_root(extract_dir)
|
||||
if not cc_root:
|
||||
raise BCSInstallError("custom_components folder not found in repository ZIP")
|
||||
|
||||
installed_domains: list[str] = []
|
||||
for domain_dir in cc_root.iterdir():
|
||||
if not domain_dir.is_dir():
|
||||
continue
|
||||
manifest = domain_dir / "manifest.json"
|
||||
if not manifest.exists():
|
||||
continue
|
||||
|
||||
domain = domain_dir.name
|
||||
await self._copy_domain_dir(domain_dir, domain)
|
||||
installed_domains.append(domain)
|
||||
|
||||
if not installed_domains:
|
||||
raise BCSInstallError("No integrations found under custom_components/ (missing manifest.json)")
|
||||
|
||||
installed_version = await self._read_installed_version(installed_domains[0])
|
||||
|
||||
await self.storage.set_installed_repo(
|
||||
repo_id=repo_id,
|
||||
url=repo.url,
|
||||
domains=installed_domains,
|
||||
installed_version=installed_version,
|
||||
ref=ref,
|
||||
)
|
||||
await self._refresh_installed_cache()
|
||||
|
||||
persistent_notification.async_create(
|
||||
self.hass,
|
||||
"Bahmcloud Store installation finished. A Home Assistant restart is required to load the integration.",
|
||||
title="Bahmcloud Store",
|
||||
notification_id="bcs_restart_required",
|
||||
)
|
||||
|
||||
_LOGGER.info("BCS install complete: repo_id=%s domains=%s", repo_id, installed_domains)
|
||||
self.signal_updated()
|
||||
return {
|
||||
"ok": True,
|
||||
"repo_id": repo_id,
|
||||
"domains": installed_domains,
|
||||
"installed_version": installed_version,
|
||||
"restart_required": True,
|
||||
}
|
||||
|
||||
async def update_repo(self, repo_id: str) -> dict[str, Any]:
|
||||
_LOGGER.info("BCS update started: repo_id=%s", repo_id)
|
||||
return await self.install_repo(repo_id)
|
||||
|
||||
async def request_restart(self) -> None:
|
||||
await self.hass.services.async_call("homeassistant", "restart", {}, blocking=False)
|
||||
Reference in New Issue
Block a user