Compare commits
35 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 870e77ec13 | |||
| 38fb9fb073 | |||
| c20bd4dd07 | |||
| 296c816633 | |||
| 18a2b5529c | |||
| 246fab7e1e | |||
| ce5802721f | |||
| 2f46966fe2 | |||
| 132f9e27c1 | |||
| 618511be73 | |||
| 6488b434d8 | |||
| bffc594da5 | |||
| d78217100c | |||
| 09e1ef1af5 | |||
| 9ad558c9ab | |||
| 19df0eea22 | |||
| 745979b9a6 | |||
| f861b2490a | |||
| 32946c1a98 | |||
| a9a681d801 | |||
| 2ae6ac43a5 | |||
| 504c126c2c | |||
| 85cc97b557 | |||
| 4ca80a9c88 | |||
| ac5bc8a6f4 | |||
| c4361cc8bd | |||
| 1794d579d2 | |||
| bcfbf7151c | |||
| 38730cdd31 | |||
| 5d5d78d727 | |||
| 67297bfc9c | |||
| 82fda5dfc4 | |||
| 907f14b73c | |||
| 3eefd447ac | |||
| 72ce95525c |
63
CHANGELOG.md
63
CHANGELOG.md
@@ -11,19 +11,62 @@ Sections:
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.5.0] - 2026-01-15
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Manual refresh button that triggers a full backend refresh (store index + provider data).
|
||||||
|
- Unified refresh pipeline: startup, timer and UI now use the same refresh logic.
|
||||||
|
- Cache-busting for store index requests to always fetch the latest store.yaml.
|
||||||
|
|
||||||
|
### Improved
|
||||||
|
- Logging for store index loading and parsing.
|
||||||
|
- Refresh behavior now deterministic and verifiable via logs.
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Refresh button previously only reloaded cached data.
|
||||||
|
- Store index was not always reloaded immediately on user action.
|
||||||
|
|
||||||
|
## [0.4.1] - 2026-01-15
|
||||||
|
### Fixed
|
||||||
|
- Fixed GitLab README loading by using robust raw file endpoints.
|
||||||
|
- Added support for nested GitLab groups when resolving README paths.
|
||||||
|
- Added fallback handling for multiple README filenames (`README.md`, `README`, `README.rst`, etc.).
|
||||||
|
- Added branch fallback logic for README loading (`default`, `main`, `master`).
|
||||||
|
- Improved error resilience so README loading failures never break the store core.
|
||||||
|
- No behavior change for GitHub and Gitea providers.
|
||||||
|
|
||||||
## [0.4.0] - 2026-01-15
|
## [0.4.0] - 2026-01-15
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
- Repository detail view (second page) in the Store UI.
|
- Initial public release of the Bahmcloud Store integration.
|
||||||
- README rendering using Home Assistant's `ha-markdown` element.
|
- Sidebar panel with repository browser UI.
|
||||||
- Floating action buttons (FAB):
|
- Support for loading repositories from a central `store.yaml` index.
|
||||||
- Open repository
|
- Support for custom repositories added by the user.
|
||||||
- Reload README
|
- Provider abstraction for GitHub, GitLab and Gitea:
|
||||||
- Install (coming soon)
|
- Fetch repository information (name, description, default branch).
|
||||||
- Update (coming soon)
|
- Resolve latest version from:
|
||||||
- Search field and category filter on the repository list page.
|
- Releases
|
||||||
- New authenticated API endpoint:
|
- Tags
|
||||||
- `GET /api/bcs/readme?repo_id=<id>` returns README markdown (best-effort).
|
- Fallback mechanisms.
|
||||||
|
- Repository metadata support via:
|
||||||
|
- `bcs.yaml`
|
||||||
|
- `hacs.yaml`
|
||||||
|
- `hacs.json`
|
||||||
|
- README loading and rendering pipeline:
|
||||||
|
- Fetch raw README files.
|
||||||
|
- Server-side Markdown rendering.
|
||||||
|
- Sanitized HTML output for the panel UI.
|
||||||
|
- Auto refresh mechanism for store index and repository metadata.
|
||||||
|
- API endpoints:
|
||||||
|
- List repositories
|
||||||
|
- Add custom repository
|
||||||
|
- Remove repository
|
||||||
|
Persisted via Home Assistant storage (`.storage/bcs_store`).
|
||||||
|
- Public static asset endpoint for panel JS (`/api/bahmcloud_store_static/...`) without auth (required for HA custom panels).
|
||||||
|
- Initial API namespace:
|
||||||
|
- `GET /api/bcs` list merged repositories (index + custom)
|
||||||
|
- `POST /api/bcs` add custom repository
|
||||||
|
- `DELETE /api/bcs/custom_repo` remove custom repository
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
- Repository cards are now clickable to open the detail view.
|
- Repository cards are now clickable to open the detail view.
|
||||||
|
|||||||
@@ -4,10 +4,8 @@ import logging
|
|||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.const import Platform
|
|
||||||
from homeassistant.helpers.discovery import async_load_platform
|
|
||||||
from homeassistant.helpers.event import async_track_time_interval
|
|
||||||
from homeassistant.components.panel_custom import async_register_panel
|
from homeassistant.components.panel_custom import async_register_panel
|
||||||
|
from homeassistant.helpers.event import async_track_time_interval
|
||||||
|
|
||||||
from .core import BCSCore, BCSConfig, BCSError
|
from .core import BCSCore, BCSConfig, BCSError
|
||||||
|
|
||||||
@@ -20,20 +18,38 @@ CONF_STORE_URL = "store_url"
|
|||||||
|
|
||||||
|
|
||||||
async def async_setup(hass: HomeAssistant, config: dict) -> bool:
|
async def async_setup(hass: HomeAssistant, config: dict) -> bool:
|
||||||
cfg = config.get(DOMAIN, {})
|
cfg = config.get(DOMAIN, {}) or {}
|
||||||
store_url = cfg.get(CONF_STORE_URL, DEFAULT_STORE_URL)
|
store_url = cfg.get(CONF_STORE_URL, DEFAULT_STORE_URL)
|
||||||
|
|
||||||
core = BCSCore(hass, BCSConfig(store_url=store_url))
|
core = BCSCore(hass, BCSConfig(store_url=store_url))
|
||||||
hass.data[DOMAIN] = core
|
hass.data[DOMAIN] = core
|
||||||
|
|
||||||
await core.register_http_views()
|
await core.async_initialize()
|
||||||
|
|
||||||
|
from .views import (
|
||||||
|
StaticAssetsView,
|
||||||
|
BCSApiView,
|
||||||
|
BCSReadmeView,
|
||||||
|
BCSCustomRepoView,
|
||||||
|
BCSInstallView,
|
||||||
|
BCSUpdateView,
|
||||||
|
BCSRestartView,
|
||||||
|
)
|
||||||
|
|
||||||
|
hass.http.register_view(StaticAssetsView())
|
||||||
|
hass.http.register_view(BCSApiView(core))
|
||||||
|
hass.http.register_view(BCSReadmeView(core))
|
||||||
|
hass.http.register_view(BCSCustomRepoView(core))
|
||||||
|
hass.http.register_view(BCSInstallView(core))
|
||||||
|
hass.http.register_view(BCSUpdateView(core))
|
||||||
|
hass.http.register_view(BCSRestartView(core))
|
||||||
|
|
||||||
# RESTORE: keep the module_url pattern that worked for you
|
|
||||||
await async_register_panel(
|
await async_register_panel(
|
||||||
hass,
|
hass,
|
||||||
frontend_url_path="bahmcloud-store",
|
frontend_url_path="bahmcloud-store",
|
||||||
webcomponent_name="bahmcloud-store-panel",
|
webcomponent_name="bahmcloud-store-panel",
|
||||||
module_url="/api/bahmcloud_store_static/panel.js?v=42",
|
# IMPORTANT: bump v to avoid caching old JS
|
||||||
|
module_url="/api/bahmcloud_store_static/panel.js?v=101",
|
||||||
sidebar_title="Bahmcloud Store",
|
sidebar_title="Bahmcloud Store",
|
||||||
sidebar_icon="mdi:store",
|
sidebar_icon="mdi:store",
|
||||||
require_admin=True,
|
require_admin=True,
|
||||||
@@ -41,19 +57,19 @@ async def async_setup(hass: HomeAssistant, config: dict) -> bool:
|
|||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await core.refresh()
|
await core.full_refresh(source="startup")
|
||||||
except BCSError as e:
|
except BCSError as e:
|
||||||
_LOGGER.error("Initial refresh failed: %s", e)
|
_LOGGER.error("Initial refresh failed: %s", e)
|
||||||
|
|
||||||
async def periodic(_now) -> None:
|
async def periodic(_now) -> None:
|
||||||
try:
|
try:
|
||||||
await core.refresh()
|
await core.full_refresh(source="timer")
|
||||||
core.signal_updated()
|
|
||||||
except BCSError as e:
|
except BCSError as e:
|
||||||
_LOGGER.warning("Periodic refresh failed: %s", e)
|
_LOGGER.warning("Periodic refresh failed: %s", e)
|
||||||
|
except Exception as e: # pylint: disable=broad-exception-caught
|
||||||
|
_LOGGER.exception("Unexpected error during periodic refresh: %s", e)
|
||||||
|
|
||||||
interval = timedelta(seconds=int(core.refresh_seconds or 300))
|
interval_seconds = int(getattr(core, "refresh_seconds", 300) or 300)
|
||||||
async_track_time_interval(hass, periodic, interval)
|
async_track_time_interval(hass, periodic, timedelta(seconds=interval_seconds))
|
||||||
|
|
||||||
await async_load_platform(hass, Platform.UPDATE, DOMAIN, {}, config)
|
return True
|
||||||
return True
|
|
||||||
@@ -1,21 +1,25 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
import hashlib
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
import time
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
import zipfile
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import parse_qsl, urlencode, urlsplit, urlunsplit, urlparse
|
||||||
|
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
from homeassistant.components import persistent_notification
|
||||||
from homeassistant.util import yaml as ha_yaml
|
from homeassistant.util import yaml as ha_yaml
|
||||||
|
|
||||||
from .storage import BCSStorage, CustomRepo
|
from .storage import BCSStorage, CustomRepo
|
||||||
from .views import StaticAssetsView, BCSApiView, BCSReadmeView
|
from .providers import fetch_repo_info, detect_provider, RepoInfo, fetch_readme_markdown
|
||||||
from .custom_repo_view import BCSCustomRepoView
|
|
||||||
from .providers import fetch_repo_info, detect_provider, RepoInfo
|
|
||||||
from .metadata import fetch_repo_metadata, RepoMetadata
|
from .metadata import fetch_repo_metadata, RepoMetadata
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@@ -27,6 +31,10 @@ class BCSError(Exception):
|
|||||||
"""BCS core error."""
|
"""BCS core error."""
|
||||||
|
|
||||||
|
|
||||||
|
class BCSInstallError(BCSError):
|
||||||
|
"""BCS installation/update error."""
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class BCSConfig:
|
class BCSConfig:
|
||||||
store_url: str
|
store_url: str
|
||||||
@@ -46,7 +54,7 @@ class RepoItem:
|
|||||||
default_branch: str | None = None
|
default_branch: str | None = None
|
||||||
|
|
||||||
latest_version: str | None = None
|
latest_version: str | None = None
|
||||||
latest_version_source: str | None = None # "release" | "tag" | None
|
latest_version_source: str | None = None # "release" | "tag" | "atom" | None
|
||||||
|
|
||||||
meta_source: str | None = None
|
meta_source: str | None = None
|
||||||
meta_name: str | None = None
|
meta_name: str | None = None
|
||||||
@@ -66,16 +74,34 @@ class BCSCore:
|
|||||||
self.repos: dict[str, RepoItem] = {}
|
self.repos: dict[str, RepoItem] = {}
|
||||||
self._listeners: list[callable] = []
|
self._listeners: list[callable] = []
|
||||||
|
|
||||||
self.version: str = self._read_manifest_version()
|
# Will be loaded asynchronously (no blocking IO in event loop)
|
||||||
|
self.version: str = "unknown"
|
||||||
|
|
||||||
def _read_manifest_version(self) -> str:
|
# Diagnostics (helps verify refresh behavior)
|
||||||
try:
|
self.last_index_url: str | None = None
|
||||||
manifest_path = Path(__file__).resolve().parent / "manifest.json"
|
self.last_index_bytes: int | None = None
|
||||||
data = json.loads(manifest_path.read_text(encoding="utf-8"))
|
self.last_index_hash: str | None = None
|
||||||
v = data.get("version")
|
self.last_index_loaded_at: float | None = None
|
||||||
return str(v) if v else "unknown"
|
|
||||||
except Exception:
|
self._install_lock = asyncio.Lock()
|
||||||
return "unknown"
|
self._installed_cache: dict[str, Any] = {}
|
||||||
|
|
||||||
|
async def async_initialize(self) -> None:
|
||||||
|
"""Async initialization that avoids blocking file IO."""
|
||||||
|
self.version = await self._read_manifest_version_async()
|
||||||
|
await self._refresh_installed_cache()
|
||||||
|
|
||||||
|
async def _read_manifest_version_async(self) -> str:
|
||||||
|
def _read() -> str:
|
||||||
|
try:
|
||||||
|
manifest_path = Path(__file__).resolve().parent / "manifest.json"
|
||||||
|
data = json.loads(manifest_path.read_text(encoding="utf-8"))
|
||||||
|
v = data.get("version")
|
||||||
|
return str(v) if v else "unknown"
|
||||||
|
except Exception:
|
||||||
|
return "unknown"
|
||||||
|
|
||||||
|
return await self.hass.async_add_executor_job(_read)
|
||||||
|
|
||||||
def add_listener(self, cb) -> None:
|
def add_listener(self, cb) -> None:
|
||||||
self._listeners.append(cb)
|
self._listeners.append(cb)
|
||||||
@@ -87,11 +113,11 @@ class BCSCore:
|
|||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
async def register_http_views(self) -> None:
|
async def full_refresh(self, source: str = "manual") -> None:
|
||||||
self.hass.http.register_view(StaticAssetsView())
|
"""Single refresh entry-point used by both timer and manual button."""
|
||||||
self.hass.http.register_view(BCSApiView(self))
|
_LOGGER.info("BCS full refresh triggered (source=%s)", source)
|
||||||
self.hass.http.register_view(BCSReadmeView(self))
|
await self.refresh()
|
||||||
self.hass.http.register_view(BCSCustomRepoView(self))
|
self.signal_updated()
|
||||||
|
|
||||||
def get_repo(self, repo_id: str) -> RepoItem | None:
|
def get_repo(self, repo_id: str) -> RepoItem | None:
|
||||||
return self.repos.get(repo_id)
|
return self.repos.get(repo_id)
|
||||||
@@ -121,6 +147,13 @@ class BCSCore:
|
|||||||
await self._enrich_and_resolve(merged)
|
await self._enrich_and_resolve(merged)
|
||||||
self.repos = merged
|
self.repos = merged
|
||||||
|
|
||||||
|
_LOGGER.info(
|
||||||
|
"BCS refresh complete: repos=%s (index=%s, custom=%s)",
|
||||||
|
len(self.repos),
|
||||||
|
len([r for r in self.repos.values() if r.source == "index"]),
|
||||||
|
len([r for r in self.repos.values() if r.source == "custom"]),
|
||||||
|
)
|
||||||
|
|
||||||
async def _enrich_and_resolve(self, merged: dict[str, RepoItem]) -> None:
|
async def _enrich_and_resolve(self, merged: dict[str, RepoItem]) -> None:
|
||||||
sem = asyncio.Semaphore(6)
|
sem = asyncio.Semaphore(6)
|
||||||
|
|
||||||
@@ -155,16 +188,72 @@ class BCSCore:
|
|||||||
|
|
||||||
await asyncio.gather(*(process_one(r) for r in merged.values()), return_exceptions=True)
|
await asyncio.gather(*(process_one(r) for r in merged.values()), return_exceptions=True)
|
||||||
|
|
||||||
async def _load_index_repos(self) -> tuple[list[RepoItem], int]:
|
def _add_cache_buster(self, url: str) -> str:
|
||||||
|
parts = urlsplit(url)
|
||||||
|
q = dict(parse_qsl(parts.query, keep_blank_values=True))
|
||||||
|
q["t"] = str(int(time.time()))
|
||||||
|
new_query = urlencode(q)
|
||||||
|
return urlunsplit((parts.scheme, parts.netloc, parts.path, new_query, parts.fragment))
|
||||||
|
|
||||||
|
def _gitea_src_to_raw(self, url: str) -> str:
|
||||||
|
parts = urlsplit(url)
|
||||||
|
path = parts.path
|
||||||
|
path2 = path.replace("/src/branch/", "/raw/branch/")
|
||||||
|
if path2 == path:
|
||||||
|
return url
|
||||||
|
return urlunsplit((parts.scheme, parts.netloc, path2, parts.query, parts.fragment))
|
||||||
|
|
||||||
|
async def _fetch_store_text(self, url: str) -> str:
|
||||||
session = async_get_clientsession(self.hass)
|
session = async_get_clientsession(self.hass)
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
"User-Agent": "BahmcloudStore (Home Assistant)",
|
||||||
|
"Cache-Control": "no-cache, no-store, max-age=0",
|
||||||
|
"Pragma": "no-cache",
|
||||||
|
"Expires": "0",
|
||||||
|
}
|
||||||
|
|
||||||
|
async with session.get(url, timeout=30, headers=headers) as resp:
|
||||||
|
if resp.status != 200:
|
||||||
|
raise BCSError(f"store_url returned {resp.status}")
|
||||||
|
return await resp.text()
|
||||||
|
|
||||||
|
async def _load_index_repos(self) -> tuple[list[RepoItem], int]:
|
||||||
|
store_url = (self.config.store_url or "").strip()
|
||||||
|
if not store_url:
|
||||||
|
raise BCSError("store_url is empty")
|
||||||
|
|
||||||
|
url = self._add_cache_buster(store_url)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
async with session.get(self.config.store_url, timeout=20) as resp:
|
raw = await self._fetch_store_text(url)
|
||||||
if resp.status != 200:
|
|
||||||
raise BCSError(f"store_url returned {resp.status}")
|
# If we fetched a HTML page (wrong endpoint), attempt raw conversion.
|
||||||
raw = await resp.text()
|
if "<html" in raw.lower() or "<!doctype html" in raw.lower():
|
||||||
|
fallback = self._add_cache_buster(self._gitea_src_to_raw(store_url))
|
||||||
|
if fallback != url:
|
||||||
|
_LOGGER.warning("BCS store index looked like HTML, retrying raw URL")
|
||||||
|
raw = await self._fetch_store_text(fallback)
|
||||||
|
url = fallback
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise BCSError(f"Failed fetching store index: {e}") from e
|
raise BCSError(f"Failed fetching store index: {e}") from e
|
||||||
|
|
||||||
|
# Diagnostics
|
||||||
|
b = raw.encode("utf-8", errors="replace")
|
||||||
|
h = hashlib.sha256(b).hexdigest()[:12]
|
||||||
|
self.last_index_url = url
|
||||||
|
self.last_index_bytes = len(b)
|
||||||
|
self.last_index_hash = h
|
||||||
|
self.last_index_loaded_at = time.time()
|
||||||
|
|
||||||
|
_LOGGER.info(
|
||||||
|
"BCS index loaded: url=%s bytes=%s sha=%s",
|
||||||
|
self.last_index_url,
|
||||||
|
self.last_index_bytes,
|
||||||
|
self.last_index_hash,
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = ha_yaml.parse_yaml(raw)
|
data = ha_yaml.parse_yaml(raw)
|
||||||
if not isinstance(data, dict):
|
if not isinstance(data, dict):
|
||||||
@@ -179,20 +268,21 @@ class BCSCore:
|
|||||||
for i, r in enumerate(repos):
|
for i, r in enumerate(repos):
|
||||||
if not isinstance(r, dict):
|
if not isinstance(r, dict):
|
||||||
continue
|
continue
|
||||||
url = str(r.get("url", "")).strip()
|
repo_url = str(r.get("url", "")).strip()
|
||||||
if not url:
|
if not repo_url:
|
||||||
continue
|
continue
|
||||||
name = str(r.get("name") or url).strip()
|
name = str(r.get("name") or repo_url).strip()
|
||||||
|
|
||||||
items.append(
|
items.append(
|
||||||
RepoItem(
|
RepoItem(
|
||||||
id=f"index:{i}",
|
id=f"index:{i}",
|
||||||
name=name,
|
name=name,
|
||||||
url=url,
|
url=repo_url,
|
||||||
source="index",
|
source="index",
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
_LOGGER.info("BCS index parsed: repos=%s refresh_seconds=%s", len(items), refresh_seconds)
|
||||||
return items, refresh_seconds
|
return items, refresh_seconds
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise BCSError(f"Invalid store.yaml: {e}") from e
|
raise BCSError(f"Invalid store.yaml: {e}") from e
|
||||||
@@ -203,159 +293,269 @@ class BCSCore:
|
|||||||
raise BCSError("Missing url")
|
raise BCSError("Missing url")
|
||||||
|
|
||||||
c = await self.storage.add_custom_repo(url, name)
|
c = await self.storage.add_custom_repo(url, name)
|
||||||
await self.refresh()
|
await self.full_refresh(source="custom_repo_add")
|
||||||
self.signal_updated()
|
|
||||||
return c
|
return c
|
||||||
|
|
||||||
async def remove_custom_repo(self, repo_id: str) -> None:
|
async def remove_custom_repo(self, repo_id: str) -> None:
|
||||||
await self.storage.remove_custom_repo(repo_id)
|
await self.storage.remove_custom_repo(repo_id)
|
||||||
await self.refresh()
|
await self.full_refresh(source="custom_repo_remove")
|
||||||
self.signal_updated()
|
|
||||||
|
|
||||||
async def list_custom_repos(self) -> list[CustomRepo]:
|
async def list_custom_repos(self) -> list[CustomRepo]:
|
||||||
return await self.storage.list_custom_repos()
|
return await self.storage.list_custom_repos()
|
||||||
|
|
||||||
def list_repos_public(self) -> list[dict[str, Any]]:
|
def list_repos_public(self) -> list[dict[str, Any]]:
|
||||||
out: list[dict[str, Any]] = []
|
out: list[dict[str, Any]] = []
|
||||||
|
|
||||||
|
installed_map: dict[str, Any] = getattr(self, '_installed_cache', {}) or {}
|
||||||
|
if not isinstance(installed_map, dict):
|
||||||
|
installed_map = {}
|
||||||
|
|
||||||
for r in self.repos.values():
|
for r in self.repos.values():
|
||||||
|
inst = installed_map.get(r.id)
|
||||||
|
installed = bool(inst)
|
||||||
|
installed_domains: list[str] = []
|
||||||
|
installed_version: str | None = None
|
||||||
|
if isinstance(inst, dict):
|
||||||
|
d = inst.get('domains') or []
|
||||||
|
if isinstance(d, list):
|
||||||
|
installed_domains = [str(x) for x in d if str(x).strip()]
|
||||||
|
v = inst.get('installed_version')
|
||||||
|
installed_version = str(v) if v is not None else None
|
||||||
|
|
||||||
out.append(
|
out.append(
|
||||||
{
|
{
|
||||||
"id": r.id,
|
'id': r.id,
|
||||||
"name": r.name,
|
'name': r.name,
|
||||||
"url": r.url,
|
'url': r.url,
|
||||||
"source": r.source,
|
'source': r.source,
|
||||||
"owner": r.owner,
|
'owner': r.owner,
|
||||||
"provider": r.provider,
|
'provider': r.provider,
|
||||||
"repo_name": r.provider_repo_name,
|
'repo_name': r.provider_repo_name,
|
||||||
"description": r.provider_description or r.meta_description,
|
'description': r.provider_description or r.meta_description,
|
||||||
"default_branch": r.default_branch,
|
'default_branch': r.default_branch,
|
||||||
"latest_version": r.latest_version,
|
'latest_version': r.latest_version,
|
||||||
"latest_version_source": r.latest_version_source,
|
'latest_version_source': r.latest_version_source,
|
||||||
"category": r.meta_category,
|
'category': r.meta_category,
|
||||||
"meta_author": r.meta_author,
|
'meta_author': r.meta_author,
|
||||||
"meta_maintainer": r.meta_maintainer,
|
'meta_maintainer': r.meta_maintainer,
|
||||||
"meta_source": r.meta_source,
|
'meta_source': r.meta_source,
|
||||||
|
'installed': installed,
|
||||||
|
'installed_version': installed_version,
|
||||||
|
'installed_domains': installed_domains,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
return out
|
return out
|
||||||
|
|
||||||
def _split_owner_repo(self, repo_url: str) -> tuple[str | None, str | None]:
|
|
||||||
u = urlparse(repo_url.rstrip("/"))
|
|
||||||
parts = [p for p in u.path.strip("/").split("/") if p]
|
|
||||||
if len(parts) < 2:
|
|
||||||
return None, None
|
|
||||||
owner = parts[0].strip() or None
|
|
||||||
name = parts[1].strip()
|
|
||||||
if name.endswith(".git"):
|
|
||||||
name = name[:-4]
|
|
||||||
name = name.strip() or None
|
|
||||||
return owner, name
|
|
||||||
|
|
||||||
def _is_github(self, repo_url: str) -> bool:
|
|
||||||
return "github.com" in urlparse(repo_url).netloc.lower()
|
|
||||||
|
|
||||||
def _is_gitea(self, repo_url: str) -> bool:
|
|
||||||
host = urlparse(repo_url).netloc.lower()
|
|
||||||
return host and "github.com" not in host and "gitlab.com" not in host
|
|
||||||
|
|
||||||
async def _fetch_text(self, url: str) -> str | None:
|
|
||||||
session = async_get_clientsession(self.hass)
|
|
||||||
try:
|
|
||||||
async with session.get(url, timeout=20) as resp:
|
|
||||||
if resp.status != 200:
|
|
||||||
return None
|
|
||||||
return await resp.text()
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
|
|
||||||
async def fetch_readme_markdown(self, repo_id: str) -> str | None:
|
async def fetch_readme_markdown(self, repo_id: str) -> str | None:
|
||||||
"""Fetch README markdown from GitHub, Gitea or GitLab.
|
|
||||||
|
|
||||||
Defensive behavior:
|
|
||||||
- tries multiple common filenames
|
|
||||||
- tries multiple branches (default, main, master)
|
|
||||||
- uses public raw endpoints (no tokens required for public repositories)
|
|
||||||
"""
|
|
||||||
repo = self.get_repo(repo_id)
|
repo = self.get_repo(repo_id)
|
||||||
if not repo:
|
if not repo:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
repo_url = (repo.url or "").strip()
|
return await fetch_readme_markdown(
|
||||||
if not repo_url:
|
self.hass,
|
||||||
return None
|
repo.url,
|
||||||
|
provider=repo.provider,
|
||||||
|
default_branch=repo.default_branch,
|
||||||
|
)
|
||||||
|
|
||||||
# Branch fallbacks
|
def _pick_ref_for_install(self, repo: RepoItem) -> str:
|
||||||
branch_candidates: list[str] = []
|
# Prefer latest_version (release/tag/atom-derived), fallback to default branch, then main.
|
||||||
|
if repo.latest_version and str(repo.latest_version).strip():
|
||||||
|
return str(repo.latest_version).strip()
|
||||||
if repo.default_branch and str(repo.default_branch).strip():
|
if repo.default_branch and str(repo.default_branch).strip():
|
||||||
branch_candidates.append(str(repo.default_branch).strip())
|
return str(repo.default_branch).strip()
|
||||||
for b in ("main", "master"):
|
return "main"
|
||||||
if b not in branch_candidates:
|
|
||||||
branch_candidates.append(b)
|
|
||||||
|
|
||||||
# Filename fallbacks
|
def _build_zip_url(self, repo_url: str, ref: str) -> str:
|
||||||
filenames = ["README.md", "readme.md", "README.MD", "README.rst", "README"]
|
"""Build a public ZIP download URL (provider-neutral, no tokens).
|
||||||
|
|
||||||
provider = (repo.provider or "").strip().lower()
|
Supports:
|
||||||
if not provider:
|
- GitHub: codeload
|
||||||
provider = detect_provider(repo_url) or ""
|
- GitLab: /-/archive/
|
||||||
|
- Gitea (incl. Bahmcloud): /archive/<ref>.zip
|
||||||
|
"""
|
||||||
|
ref = (ref or "").strip()
|
||||||
|
if not ref:
|
||||||
|
raise BCSInstallError("Missing ref for ZIP download")
|
||||||
|
|
||||||
u = urlparse(repo_url.rstrip("/"))
|
u = urlparse(repo_url.rstrip("/"))
|
||||||
host = (u.netloc or "").lower()
|
host = (u.netloc or "").lower()
|
||||||
|
parts = [p for p in u.path.strip("/").split("/") if p]
|
||||||
|
if len(parts) < 2:
|
||||||
|
raise BCSInstallError("Invalid repository URL (missing owner/repo)")
|
||||||
|
|
||||||
candidates: list[str] = []
|
owner = parts[0]
|
||||||
|
repo = parts[1]
|
||||||
|
if repo.endswith(".git"):
|
||||||
|
repo = repo[:-4]
|
||||||
|
|
||||||
if self._is_github(repo_url):
|
if "github.com" in host:
|
||||||
owner, name = self._split_owner_repo(repo_url)
|
return f"https://codeload.github.com/{owner}/{repo}/zip/{ref}"
|
||||||
if not owner or not name:
|
|
||||||
return None
|
|
||||||
for branch in branch_candidates:
|
|
||||||
base = f"https://raw.githubusercontent.com/{owner}/{name}/{branch}"
|
|
||||||
candidates.extend([f"{base}/{fn}" for fn in filenames])
|
|
||||||
|
|
||||||
elif provider == "gitlab" or "gitlab" in host:
|
if "gitlab" in host:
|
||||||
# GitLab can have nested groups: /group/subgroup/repo
|
base = f"{u.scheme}://{u.netloc}"
|
||||||
parts = [p for p in u.path.strip("/").split("/") if p]
|
path = u.path.strip("/")
|
||||||
if len(parts) < 2:
|
if path.endswith(".git"):
|
||||||
return None
|
path = path[:-4]
|
||||||
|
return f"{base}/{path}/-/archive/{ref}/{repo}-{ref}.zip"
|
||||||
|
|
||||||
repo_name = parts[-1].strip()
|
base = f"{u.scheme}://{u.netloc}"
|
||||||
if repo_name.endswith(".git"):
|
path = u.path.strip("/")
|
||||||
repo_name = repo_name[:-4]
|
if path.endswith(".git"):
|
||||||
group_path = "/".join(parts[:-1]).strip("/")
|
path = path[:-4]
|
||||||
if not group_path or not repo_name:
|
return f"{base}/{path}/archive/{ref}.zip"
|
||||||
return None
|
|
||||||
|
|
||||||
root = f"{u.scheme}://{u.netloc}/{group_path}/{repo_name}"
|
async def _download_zip(self, url: str, dest: Path) -> None:
|
||||||
for branch in branch_candidates:
|
session = async_get_clientsession(self.hass)
|
||||||
bases = [
|
headers = {
|
||||||
f"{root}/-/raw/{branch}",
|
"User-Agent": "BahmcloudStore (Home Assistant)",
|
||||||
# Some instances may expose /raw/<branch> as well
|
"Cache-Control": "no-cache, no-store, max-age=0",
|
||||||
f"{root}/raw/{branch}",
|
"Pragma": "no-cache",
|
||||||
]
|
}
|
||||||
for b in bases:
|
|
||||||
candidates.extend([f"{b}/{fn}" for fn in filenames])
|
|
||||||
|
|
||||||
elif self._is_gitea(repo_url):
|
async with session.get(url, timeout=120, headers=headers) as resp:
|
||||||
owner, name = self._split_owner_repo(repo_url)
|
if resp.status != 200:
|
||||||
if not owner or not name:
|
raise BCSInstallError(f"zip_url returned {resp.status}")
|
||||||
return None
|
data = await resp.read()
|
||||||
|
|
||||||
root = f"{u.scheme}://{u.netloc}/{owner}/{name}"
|
await self.hass.async_add_executor_job(dest.write_bytes, data)
|
||||||
|
|
||||||
for branch in branch_candidates:
|
async def _extract_zip(self, zip_path: Path, extract_dir: Path) -> None:
|
||||||
bases = [
|
def _extract() -> None:
|
||||||
f"{root}/raw/branch/{branch}",
|
with zipfile.ZipFile(zip_path, "r") as zf:
|
||||||
f"{root}/raw/{branch}",
|
zf.extractall(extract_dir)
|
||||||
]
|
|
||||||
for b in bases:
|
|
||||||
candidates.extend([f"{b}/{fn}" for fn in filenames])
|
|
||||||
|
|
||||||
else:
|
await self.hass.async_add_executor_job(_extract)
|
||||||
return None
|
|
||||||
|
|
||||||
for url in candidates:
|
@staticmethod
|
||||||
txt = await self._fetch_text(url)
|
def _find_custom_components_root(extract_root: Path) -> Path | None:
|
||||||
if txt and txt.strip():
|
direct = extract_root / "custom_components"
|
||||||
return txt
|
if direct.exists() and direct.is_dir():
|
||||||
|
return direct
|
||||||
|
|
||||||
|
for child in extract_root.iterdir():
|
||||||
|
candidate = child / "custom_components"
|
||||||
|
if candidate.exists() and candidate.is_dir():
|
||||||
|
return candidate
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
async def _copy_domain_dir(self, src_domain_dir: Path, domain: str) -> None:
|
||||||
|
dest_root = Path(self.hass.config.path("custom_components"))
|
||||||
|
target = dest_root / domain
|
||||||
|
tmp_target = dest_root / f".bcs_tmp_{domain}_{int(time.time())}"
|
||||||
|
|
||||||
|
def _copy() -> None:
|
||||||
|
if tmp_target.exists():
|
||||||
|
shutil.rmtree(tmp_target, ignore_errors=True)
|
||||||
|
|
||||||
|
shutil.copytree(src_domain_dir, tmp_target, dirs_exist_ok=True)
|
||||||
|
|
||||||
|
if target.exists():
|
||||||
|
shutil.rmtree(target, ignore_errors=True)
|
||||||
|
|
||||||
|
tmp_target.rename(target)
|
||||||
|
|
||||||
|
await self.hass.async_add_executor_job(_copy)
|
||||||
|
|
||||||
|
async def _read_installed_version(self, domain: str) -> str | None:
|
||||||
|
def _read() -> str | None:
|
||||||
|
try:
|
||||||
|
p = Path(self.hass.config.path("custom_components", domain, "manifest.json"))
|
||||||
|
if not p.exists():
|
||||||
|
return None
|
||||||
|
data = json.loads(p.read_text(encoding="utf-8"))
|
||||||
|
v = data.get("version")
|
||||||
|
return str(v) if v else None
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return await self.hass.async_add_executor_job(_read)
|
||||||
|
|
||||||
|
async def _refresh_installed_cache(self) -> None:
|
||||||
|
try:
|
||||||
|
items = await self.storage.list_installed_repos()
|
||||||
|
cache: dict[str, Any] = {}
|
||||||
|
for it in items:
|
||||||
|
cache[it.repo_id] = {
|
||||||
|
"domains": it.domains,
|
||||||
|
"installed_version": it.installed_version,
|
||||||
|
"ref": it.ref,
|
||||||
|
"installed_at": it.installed_at,
|
||||||
|
}
|
||||||
|
self._installed_cache = cache
|
||||||
|
except Exception:
|
||||||
|
self._installed_cache = {}
|
||||||
|
|
||||||
|
async def install_repo(self, repo_id: str) -> dict[str, Any]:
|
||||||
|
repo = self.get_repo(repo_id)
|
||||||
|
if not repo:
|
||||||
|
raise BCSInstallError(f"repo_id not found: {repo_id}")
|
||||||
|
|
||||||
|
async with self._install_lock:
|
||||||
|
ref = self._pick_ref_for_install(repo)
|
||||||
|
zip_url = self._build_zip_url(repo.url, ref)
|
||||||
|
|
||||||
|
_LOGGER.info("BCS install started: repo_id=%s ref=%s zip_url=%s", repo_id, ref, zip_url)
|
||||||
|
|
||||||
|
with tempfile.TemporaryDirectory(prefix="bcs_install_") as td:
|
||||||
|
tmp = Path(td)
|
||||||
|
zip_path = tmp / "repo.zip"
|
||||||
|
extract_dir = tmp / "extract"
|
||||||
|
extract_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
await self._download_zip(zip_url, zip_path)
|
||||||
|
await self._extract_zip(zip_path, extract_dir)
|
||||||
|
|
||||||
|
cc_root = self._find_custom_components_root(extract_dir)
|
||||||
|
if not cc_root:
|
||||||
|
raise BCSInstallError("custom_components folder not found in repository ZIP")
|
||||||
|
|
||||||
|
installed_domains: list[str] = []
|
||||||
|
for domain_dir in cc_root.iterdir():
|
||||||
|
if not domain_dir.is_dir():
|
||||||
|
continue
|
||||||
|
manifest = domain_dir / "manifest.json"
|
||||||
|
if not manifest.exists():
|
||||||
|
continue
|
||||||
|
|
||||||
|
domain = domain_dir.name
|
||||||
|
await self._copy_domain_dir(domain_dir, domain)
|
||||||
|
installed_domains.append(domain)
|
||||||
|
|
||||||
|
if not installed_domains:
|
||||||
|
raise BCSInstallError("No integrations found under custom_components/ (missing manifest.json)")
|
||||||
|
|
||||||
|
installed_version = await self._read_installed_version(installed_domains[0])
|
||||||
|
|
||||||
|
await self.storage.set_installed_repo(
|
||||||
|
repo_id=repo_id,
|
||||||
|
url=repo.url,
|
||||||
|
domains=installed_domains,
|
||||||
|
installed_version=installed_version,
|
||||||
|
ref=ref,
|
||||||
|
)
|
||||||
|
await self._refresh_installed_cache()
|
||||||
|
|
||||||
|
persistent_notification.async_create(
|
||||||
|
self.hass,
|
||||||
|
"Bahmcloud Store installation finished. A Home Assistant restart is required to load the integration.",
|
||||||
|
title="Bahmcloud Store",
|
||||||
|
notification_id="bcs_restart_required",
|
||||||
|
)
|
||||||
|
|
||||||
|
_LOGGER.info("BCS install complete: repo_id=%s domains=%s", repo_id, installed_domains)
|
||||||
|
self.signal_updated()
|
||||||
|
return {
|
||||||
|
"ok": True,
|
||||||
|
"repo_id": repo_id,
|
||||||
|
"domains": installed_domains,
|
||||||
|
"installed_version": installed_version,
|
||||||
|
"restart_required": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
async def update_repo(self, repo_id: str) -> dict[str, Any]:
|
||||||
|
_LOGGER.info("BCS update started: repo_id=%s", repo_id)
|
||||||
|
return await self.install_repo(repo_id)
|
||||||
|
|
||||||
|
async def request_restart(self) -> None:
|
||||||
|
await self.hass.services.async_call("homeassistant", "restart", {}, blocking=False)
|
||||||
@@ -1,9 +1,9 @@
|
|||||||
{
|
{
|
||||||
"domain": "bahmcloud_store",
|
"domain": "bahmcloud_store",
|
||||||
"name": "Bahmcloud Store",
|
"name": "Bahmcloud Store",
|
||||||
"version": "0.4.0",
|
"version": "0.5.0",
|
||||||
"documentation": "https://git.bahmcloud.de/bahmcloud/bahmcloud_store",
|
"documentation": "https://git.bahmcloud.de/bahmcloud/bahmcloud_store",
|
||||||
"requirements": [],
|
"requirements": [],
|
||||||
"codeowners": [],
|
"codeowners": ["@bahmcloud"],
|
||||||
"iot_class": "local_polling"
|
"iot_class": "local_polling"
|
||||||
}
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -51,12 +51,7 @@ def detect_provider(repo_url: str) -> str:
|
|||||||
return "github"
|
return "github"
|
||||||
if "gitlab" in host:
|
if "gitlab" in host:
|
||||||
return "gitlab"
|
return "gitlab"
|
||||||
|
return "gitea"
|
||||||
owner, repo = _split_owner_repo(repo_url)
|
|
||||||
if owner and repo:
|
|
||||||
return "gitea"
|
|
||||||
|
|
||||||
return "generic"
|
|
||||||
|
|
||||||
|
|
||||||
async def _safe_json(session, url: str, *, headers: dict | None = None, timeout: int = 20):
|
async def _safe_json(session, url: str, *, headers: dict | None = None, timeout: int = 20):
|
||||||
@@ -82,130 +77,83 @@ async def _safe_text(session, url: str, *, headers: dict | None = None, timeout:
|
|||||||
|
|
||||||
|
|
||||||
def _extract_tag_from_github_url(url: str) -> str | None:
|
def _extract_tag_from_github_url(url: str) -> str | None:
|
||||||
m = re.search(r"/releases/tag/([^/?#]+)", url)
|
m = re.search(r"/releases/tag/([^/?#]+)", url or "")
|
||||||
if m:
|
if not m:
|
||||||
return m.group(1)
|
return None
|
||||||
m = re.search(r"/tag/([^/?#]+)", url)
|
return m.group(1).strip() or None
|
||||||
if m:
|
|
||||||
return m.group(1)
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def _strip_html(s: str) -> str:
|
|
||||||
# minimal HTML entity cleanup for meta descriptions
|
|
||||||
out = (
|
|
||||||
s.replace("&", "&")
|
|
||||||
.replace(""", '"')
|
|
||||||
.replace("'", "'")
|
|
||||||
.replace("<", "<")
|
|
||||||
.replace(">", ">")
|
|
||||||
)
|
|
||||||
return re.sub(r"\s+", " ", out).strip()
|
|
||||||
|
|
||||||
|
|
||||||
def _extract_meta(html: str, *, prop: str | None = None, name: str | None = None) -> str | None:
|
def _extract_meta(html: str, *, prop: str | None = None, name: str | None = None) -> str | None:
|
||||||
# Extract <meta property="og:description" content="...">
|
if not html:
|
||||||
# or <meta name="description" content="...">
|
return None
|
||||||
if prop:
|
if prop:
|
||||||
# property="..." content="..."
|
m = re.search(rf'<meta\s+property="{re.escape(prop)}"\s+content="([^"]+)"', html)
|
||||||
m = re.search(
|
|
||||||
r'<meta[^>]+property=["\']' + re.escape(prop) + r'["\'][^>]+content=["\']([^"\']+)["\']',
|
|
||||||
html,
|
|
||||||
flags=re.IGNORECASE,
|
|
||||||
)
|
|
||||||
if m:
|
if m:
|
||||||
return _strip_html(m.group(1))
|
return m.group(1).strip()
|
||||||
m = re.search(
|
|
||||||
r'<meta[^>]+content=["\']([^"\']+)["\'][^>]+property=["\']' + re.escape(prop) + r'["\']',
|
|
||||||
html,
|
|
||||||
flags=re.IGNORECASE,
|
|
||||||
)
|
|
||||||
if m:
|
|
||||||
return _strip_html(m.group(1))
|
|
||||||
|
|
||||||
if name:
|
if name:
|
||||||
m = re.search(
|
m = re.search(rf'<meta\s+name="{re.escape(name)}"\s+content="([^"]+)"', html)
|
||||||
r'<meta[^>]+name=["\']' + re.escape(name) + r'["\'][^>]+content=["\']([^"\']+)["\']',
|
|
||||||
html,
|
|
||||||
flags=re.IGNORECASE,
|
|
||||||
)
|
|
||||||
if m:
|
if m:
|
||||||
return _strip_html(m.group(1))
|
return m.group(1).strip()
|
||||||
m = re.search(
|
|
||||||
r'<meta[^>]+content=["\']([^"\']+)["\'][^>]+name=["\']' + re.escape(name) + r'["\']',
|
|
||||||
html,
|
|
||||||
flags=re.IGNORECASE,
|
|
||||||
)
|
|
||||||
if m:
|
|
||||||
return _strip_html(m.group(1))
|
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
async def _github_description_html(hass: HomeAssistant, owner: str, repo: str) -> str | None:
|
async def _github_description_html(hass: HomeAssistant, owner: str, repo: str) -> str | None:
|
||||||
"""
|
|
||||||
GitHub API may be rate-limited; fetch public HTML and read meta description.
|
|
||||||
"""
|
|
||||||
session = async_get_clientsession(hass)
|
session = async_get_clientsession(hass)
|
||||||
headers = {
|
url = f"https://github.com/{owner}/{repo}"
|
||||||
"User-Agent": UA,
|
html, status = await _safe_text(session, url, headers={"User-Agent": UA})
|
||||||
"Accept": "text/html,application/xhtml+xml",
|
if status != 200 or not html:
|
||||||
}
|
|
||||||
|
|
||||||
html, status = await _safe_text(session, f"https://github.com/{owner}/{repo}", headers=headers)
|
|
||||||
if not html or status != 200:
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
desc = _extract_meta(html, prop="og:description")
|
desc = _extract_meta(html, prop="og:description")
|
||||||
if desc:
|
if desc:
|
||||||
return desc
|
return desc
|
||||||
|
|
||||||
desc = _extract_meta(html, name="description")
|
return _extract_meta(html, name="description")
|
||||||
if desc:
|
|
||||||
return desc
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
async def _github_latest_version_atom(hass: HomeAssistant, owner: str, repo: str) -> tuple[str | None, str | None]:
|
async def _github_latest_version_atom(hass: HomeAssistant, owner: str, repo: str) -> tuple[str | None, str | None]:
|
||||||
session = async_get_clientsession(hass)
|
session = async_get_clientsession(hass)
|
||||||
headers = {"User-Agent": UA, "Accept": "application/atom+xml,text/xml;q=0.9,*/*;q=0.8"}
|
url = f"https://github.com/{owner}/{repo}/releases.atom"
|
||||||
|
atom, status = await _safe_text(session, url, headers={"User-Agent": UA})
|
||||||
xml_text, _ = await _safe_text(session, f"https://github.com/{owner}/{repo}/releases.atom", headers=headers)
|
if status != 200 or not atom:
|
||||||
if not xml_text:
|
|
||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
root = ET.fromstring(xml_text)
|
root = ET.fromstring(atom)
|
||||||
except Exception:
|
ns = {"a": "http://www.w3.org/2005/Atom"}
|
||||||
return None, None
|
entry = root.find("a:entry", ns)
|
||||||
|
if entry is None:
|
||||||
for entry in root.findall(".//{*}entry"):
|
return None, None
|
||||||
for link in entry.findall(".//{*}link"):
|
link = entry.find("a:link", ns)
|
||||||
href = link.attrib.get("href")
|
if link is not None and link.attrib.get("href"):
|
||||||
if not href:
|
tag = _extract_tag_from_github_url(link.attrib["href"])
|
||||||
continue
|
|
||||||
tag = _extract_tag_from_github_url(href)
|
|
||||||
if tag:
|
if tag:
|
||||||
return tag, "atom"
|
return tag, "atom"
|
||||||
|
title = entry.find("a:title", ns)
|
||||||
|
if title is not None and title.text:
|
||||||
|
t = title.text.strip()
|
||||||
|
if t:
|
||||||
|
return t, "atom"
|
||||||
|
except Exception:
|
||||||
|
return None, None
|
||||||
|
|
||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
|
|
||||||
async def _github_latest_version_redirect(hass: HomeAssistant, owner: str, repo: str) -> tuple[str | None, str | None]:
|
async def _github_latest_version_redirect(hass: HomeAssistant, owner: str, repo: str) -> tuple[str | None, str | None]:
|
||||||
session = async_get_clientsession(hass)
|
session = async_get_clientsession(hass)
|
||||||
headers = {"User-Agent": UA}
|
|
||||||
url = f"https://github.com/{owner}/{repo}/releases/latest"
|
url = f"https://github.com/{owner}/{repo}/releases/latest"
|
||||||
try:
|
try:
|
||||||
async with session.head(url, allow_redirects=False, timeout=15, headers=headers) as resp:
|
async with session.get(url, timeout=20, headers={"User-Agent": UA}, allow_redirects=True) as resp:
|
||||||
if resp.status in (301, 302, 303, 307, 308):
|
if resp.status != 200:
|
||||||
loc = resp.headers.get("Location")
|
return None, None
|
||||||
if loc:
|
final = str(resp.url)
|
||||||
tag = _extract_tag_from_github_url(loc)
|
tag = _extract_tag_from_github_url(final)
|
||||||
if tag:
|
if tag:
|
||||||
return tag, "release"
|
return tag, "release"
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
return None, None
|
||||||
|
|
||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
|
|
||||||
@@ -213,31 +161,30 @@ async def _github_latest_version_api(hass: HomeAssistant, owner: str, repo: str)
|
|||||||
session = async_get_clientsession(hass)
|
session = async_get_clientsession(hass)
|
||||||
headers = {"Accept": "application/vnd.github+json", "User-Agent": UA}
|
headers = {"Accept": "application/vnd.github+json", "User-Agent": UA}
|
||||||
|
|
||||||
data, _ = await _safe_json(session, f"https://api.github.com/repos/{owner}/{repo}/releases/latest", headers=headers)
|
data, status = await _safe_json(session, f"https://api.github.com/repos/{owner}/{repo}/releases/latest", headers=headers)
|
||||||
if isinstance(data, dict):
|
if isinstance(data, dict) and data.get("tag_name"):
|
||||||
tag = data.get("tag_name") or data.get("name")
|
return str(data["tag_name"]), "release"
|
||||||
if isinstance(tag, str) and tag.strip():
|
|
||||||
return tag.strip(), "release"
|
|
||||||
|
|
||||||
data, _ = await _safe_json(session, f"https://api.github.com/repos/{owner}/{repo}/tags?per_page=1", headers=headers)
|
if status == 404:
|
||||||
if isinstance(data, list) and data:
|
data, _ = await _safe_json(session, f"https://api.github.com/repos/{owner}/{repo}/tags?per_page=1", headers=headers)
|
||||||
tag = data[0].get("name")
|
if isinstance(data, list) and data:
|
||||||
if isinstance(tag, str) and tag.strip():
|
t = data[0]
|
||||||
return tag.strip(), "tag"
|
if isinstance(t, dict) and t.get("name"):
|
||||||
|
return str(t["name"]), "tag"
|
||||||
|
|
||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
|
|
||||||
async def _github_latest_version(hass: HomeAssistant, owner: str, repo: str) -> tuple[str | None, str | None]:
|
async def _github_latest_version(hass: HomeAssistant, owner: str, repo: str) -> tuple[str | None, str | None]:
|
||||||
tag, src = await _github_latest_version_atom(hass, owner, repo)
|
|
||||||
if tag:
|
|
||||||
return tag, src
|
|
||||||
|
|
||||||
tag, src = await _github_latest_version_redirect(hass, owner, repo)
|
tag, src = await _github_latest_version_redirect(hass, owner, repo)
|
||||||
if tag:
|
if tag:
|
||||||
return tag, src
|
return tag, src
|
||||||
|
|
||||||
return await _github_latest_version_api(hass, owner, repo)
|
tag, src = await _github_latest_version_api(hass, owner, repo)
|
||||||
|
if tag:
|
||||||
|
return tag, src
|
||||||
|
|
||||||
|
return await _github_latest_version_atom(hass, owner, repo)
|
||||||
|
|
||||||
|
|
||||||
async def _gitea_latest_version(hass: HomeAssistant, base: str, owner: str, repo: str) -> tuple[str | None, str | None]:
|
async def _gitea_latest_version(hass: HomeAssistant, base: str, owner: str, repo: str) -> tuple[str | None, str | None]:
|
||||||
@@ -245,43 +192,51 @@ async def _gitea_latest_version(hass: HomeAssistant, base: str, owner: str, repo
|
|||||||
|
|
||||||
data, _ = await _safe_json(session, f"{base}/api/v1/repos/{owner}/{repo}/releases?limit=1")
|
data, _ = await _safe_json(session, f"{base}/api/v1/repos/{owner}/{repo}/releases?limit=1")
|
||||||
if isinstance(data, list) and data:
|
if isinstance(data, list) and data:
|
||||||
tag = data[0].get("tag_name") or data[0].get("name")
|
r = data[0]
|
||||||
if isinstance(tag, str) and tag.strip():
|
if isinstance(r, dict) and r.get("tag_name"):
|
||||||
return tag.strip(), "release"
|
return str(r["tag_name"]), "release"
|
||||||
|
|
||||||
data, _ = await _safe_json(session, f"{base}/api/v1/repos/{owner}/{repo}/tags?limit=1")
|
data, _ = await _safe_json(session, f"{base}/api/v1/repos/{owner}/{repo}/tags?limit=1")
|
||||||
if isinstance(data, list) and data:
|
if isinstance(data, list) and data:
|
||||||
tag = data[0].get("name")
|
t = data[0]
|
||||||
if isinstance(tag, str) and tag.strip():
|
if isinstance(t, dict) and t.get("name"):
|
||||||
return tag.strip(), "tag"
|
return str(t["name"]), "tag"
|
||||||
|
|
||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
|
|
||||||
async def _gitlab_latest_version(hass: HomeAssistant, base: str, owner: str, repo: str) -> tuple[str | None, str | None]:
|
async def _gitlab_latest_version(
|
||||||
|
hass: HomeAssistant, base: str, owner: str, repo: str
|
||||||
|
) -> tuple[str | None, str | None]:
|
||||||
session = async_get_clientsession(hass)
|
session = async_get_clientsession(hass)
|
||||||
headers = {"User-Agent": UA}
|
headers = {"User-Agent": UA}
|
||||||
|
|
||||||
project = quote_plus(f"{owner}/{repo}")
|
project = quote_plus(f"{owner}/{repo}")
|
||||||
|
|
||||||
data, _ = await _safe_json(
|
data, _ = await _safe_json(session, f"{base}/api/v4/projects/{project}/releases?per_page=1", headers=headers)
|
||||||
session,
|
|
||||||
f"{base}/api/v4/projects/{project}/releases?per_page=1&order_by=released_at&sort=desc",
|
|
||||||
headers=headers,
|
|
||||||
)
|
|
||||||
if isinstance(data, list) and data:
|
if isinstance(data, list) and data:
|
||||||
tag = data[0].get("tag_name") or data[0].get("name")
|
r = data[0]
|
||||||
if isinstance(tag, str) and tag.strip():
|
if isinstance(r, dict) and r.get("tag_name"):
|
||||||
return tag.strip(), "release"
|
return str(r["tag_name"]), "release"
|
||||||
|
|
||||||
data, _ = await _safe_json(
|
data, _ = await _safe_json(session, f"{base}/api/v4/projects/{project}/repository/tags?per_page=1", headers=headers)
|
||||||
session,
|
|
||||||
f"{base}/api/v4/projects/{project}/repository/tags?per_page=1&order_by=updated&sort=desc",
|
|
||||||
headers=headers,
|
|
||||||
)
|
|
||||||
if isinstance(data, list) and data:
|
if isinstance(data, list) and data:
|
||||||
tag = data[0].get("name")
|
t = data[0]
|
||||||
if isinstance(tag, str) and tag.strip():
|
if isinstance(t, dict) and t.get("name"):
|
||||||
return tag.strip(), "tag"
|
return str(t["name"]), "tag"
|
||||||
|
|
||||||
|
atom, status = await _safe_text(session, f"{base}/{owner}/{repo}/-/tags?format=atom", headers=headers)
|
||||||
|
if status == 200 and atom:
|
||||||
|
try:
|
||||||
|
root = ET.fromstring(atom)
|
||||||
|
ns = {"a": "http://www.w3.org/2005/Atom"}
|
||||||
|
entry = root.find("a:entry", ns)
|
||||||
|
if entry is not None:
|
||||||
|
title = entry.find("a:title", ns)
|
||||||
|
if title is not None and title.text:
|
||||||
|
return title.text.strip(), "atom"
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
@@ -307,7 +262,6 @@ async def fetch_repo_info(hass: HomeAssistant, repo_url: str) -> RepoInfo:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
if provider == "github":
|
if provider == "github":
|
||||||
# Try API repo details (may be rate-limited)
|
|
||||||
headers = {"Accept": "application/vnd.github+json", "User-Agent": UA}
|
headers = {"Accept": "application/vnd.github+json", "User-Agent": UA}
|
||||||
data, status = await _safe_json(session, f"https://api.github.com/repos/{owner}/{repo}", headers=headers)
|
data, status = await _safe_json(session, f"https://api.github.com/repos/{owner}/{repo}", headers=headers)
|
||||||
|
|
||||||
@@ -318,12 +272,10 @@ async def fetch_repo_info(hass: HomeAssistant, repo_url: str) -> RepoInfo:
|
|||||||
if isinstance(data.get("owner"), dict) and data["owner"].get("login"):
|
if isinstance(data.get("owner"), dict) and data["owner"].get("login"):
|
||||||
info.owner = data["owner"]["login"]
|
info.owner = data["owner"]["login"]
|
||||||
else:
|
else:
|
||||||
# If API blocked, still set reasonable defaults
|
|
||||||
if status == 403:
|
if status == 403:
|
||||||
_LOGGER.debug("GitHub API blocked/rate-limited for repo info %s/%s", owner, repo)
|
_LOGGER.debug("GitHub API blocked/rate-limited for repo info %s/%s", owner, repo)
|
||||||
info.default_branch = "main"
|
info.default_branch = "main"
|
||||||
|
|
||||||
# If description missing, fetch from GitHub HTML
|
|
||||||
if not info.description:
|
if not info.description:
|
||||||
desc = await _github_description_html(hass, owner, repo)
|
desc = await _github_description_html(hass, owner, repo)
|
||||||
if desc:
|
if desc:
|
||||||
@@ -371,8 +323,110 @@ async def fetch_repo_info(hass: HomeAssistant, repo_url: str) -> RepoInfo:
|
|||||||
info.latest_version_source = src
|
info.latest_version_source = src
|
||||||
return info
|
return info
|
||||||
|
|
||||||
return info
|
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
_LOGGER.debug("Provider fetch failed for %s: %s", repo_url, e)
|
_LOGGER.debug("fetch_repo_info failed for %s: %s", repo_url, e)
|
||||||
return info
|
|
||||||
|
return info
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_readme_markdown(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
repo_url: str,
|
||||||
|
*,
|
||||||
|
provider: str | None = None,
|
||||||
|
default_branch: str | None = None,
|
||||||
|
) -> str | None:
|
||||||
|
"""Fetch README Markdown for public repositories (GitHub/GitLab/Gitea).
|
||||||
|
|
||||||
|
Defensive behavior:
|
||||||
|
- tries multiple common README filenames
|
||||||
|
- tries multiple branches (default, main, master)
|
||||||
|
- uses public raw endpoints (no tokens required for public repositories)
|
||||||
|
"""
|
||||||
|
repo_url = (repo_url or "").strip()
|
||||||
|
if not repo_url:
|
||||||
|
return None
|
||||||
|
|
||||||
|
prov = (provider or "").strip().lower() if provider else ""
|
||||||
|
if not prov:
|
||||||
|
prov = detect_provider(repo_url)
|
||||||
|
|
||||||
|
branch_candidates: list[str] = []
|
||||||
|
if default_branch and str(default_branch).strip():
|
||||||
|
branch_candidates.append(str(default_branch).strip())
|
||||||
|
for b in ("main", "master"):
|
||||||
|
if b not in branch_candidates:
|
||||||
|
branch_candidates.append(b)
|
||||||
|
|
||||||
|
filenames = ["README.md", "readme.md", "README.MD", "README.rst", "README"]
|
||||||
|
|
||||||
|
session = async_get_clientsession(hass)
|
||||||
|
headers = {"User-Agent": UA}
|
||||||
|
|
||||||
|
def _normalize_gitlab_path(path: str) -> str | None:
|
||||||
|
p = (path or "").strip().strip("/")
|
||||||
|
if not p:
|
||||||
|
return None
|
||||||
|
parts = [x for x in p.split("/") if x]
|
||||||
|
if len(parts) < 2:
|
||||||
|
return None
|
||||||
|
if parts[-1].endswith(".git"):
|
||||||
|
parts[-1] = parts[-1][:-4]
|
||||||
|
return "/".join(parts)
|
||||||
|
|
||||||
|
candidates: list[str] = []
|
||||||
|
|
||||||
|
if prov == "github":
|
||||||
|
owner, repo = _split_owner_repo(repo_url)
|
||||||
|
if not owner or not repo:
|
||||||
|
return None
|
||||||
|
for branch in branch_candidates:
|
||||||
|
base = f"https://raw.githubusercontent.com/{owner}/{repo}/{branch}"
|
||||||
|
for fn in filenames:
|
||||||
|
candidates.append(f"{base}/{fn}")
|
||||||
|
|
||||||
|
elif prov == "gitea":
|
||||||
|
owner, repo = _split_owner_repo(repo_url)
|
||||||
|
if not owner or not repo:
|
||||||
|
return None
|
||||||
|
u = urlparse(repo_url.rstrip("/"))
|
||||||
|
root = f"{u.scheme}://{u.netloc}/{owner}/{repo}"
|
||||||
|
for branch in branch_candidates:
|
||||||
|
bases = [
|
||||||
|
f"{root}/raw/branch/{branch}",
|
||||||
|
f"{root}/raw/{branch}",
|
||||||
|
]
|
||||||
|
for b in bases:
|
||||||
|
for fn in filenames:
|
||||||
|
candidates.append(f"{b}/{fn}")
|
||||||
|
|
||||||
|
elif prov == "gitlab":
|
||||||
|
u = urlparse(repo_url.rstrip("/"))
|
||||||
|
path_repo = _normalize_gitlab_path(u.path)
|
||||||
|
if not path_repo:
|
||||||
|
return None
|
||||||
|
root = f"{u.scheme}://{u.netloc}/{path_repo}"
|
||||||
|
for branch in branch_candidates:
|
||||||
|
bases = [
|
||||||
|
f"{root}/-/raw/{branch}",
|
||||||
|
f"{root}/raw/{branch}",
|
||||||
|
]
|
||||||
|
for b in bases:
|
||||||
|
for fn in filenames:
|
||||||
|
candidates.append(f"{b}/{fn}")
|
||||||
|
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
for url in candidates:
|
||||||
|
try:
|
||||||
|
async with session.get(url, timeout=20, headers=headers) as resp:
|
||||||
|
if resp.status != 200:
|
||||||
|
continue
|
||||||
|
txt = await resp.text()
|
||||||
|
if txt and txt.strip():
|
||||||
|
return txt
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
|
||||||
|
return None
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import time
|
||||||
import uuid
|
import uuid
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Any
|
from typing import Any
|
||||||
@@ -18,19 +19,39 @@ class CustomRepo:
|
|||||||
name: str | None = None
|
name: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class InstalledRepo:
|
||||||
|
repo_id: str
|
||||||
|
url: str
|
||||||
|
domains: list[str]
|
||||||
|
installed_at: int
|
||||||
|
installed_version: str | None = None
|
||||||
|
ref: str | None = None
|
||||||
|
|
||||||
|
|
||||||
class BCSStorage:
|
class BCSStorage:
|
||||||
"""Persistent storage for manually added repositories."""
|
"""Persistent storage for Bahmcloud Store.
|
||||||
|
|
||||||
|
Keys:
|
||||||
|
- custom_repos: list of manually added repositories
|
||||||
|
- installed_repos: mapping repo_id -> installed metadata
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, hass: HomeAssistant) -> None:
|
def __init__(self, hass: HomeAssistant) -> None:
|
||||||
self.hass = hass
|
self.hass = hass
|
||||||
self._store = Store(hass, _STORAGE_VERSION, _STORAGE_KEY)
|
self._store: Store[dict[str, Any]] = Store(hass, _STORAGE_VERSION, _STORAGE_KEY)
|
||||||
|
|
||||||
async def _load(self) -> dict[str, Any]:
|
async def _load(self) -> dict[str, Any]:
|
||||||
data = await self._store.async_load()
|
data = await self._store.async_load() or {}
|
||||||
if not data:
|
if not isinstance(data, dict):
|
||||||
return {"custom_repos": []}
|
data = {}
|
||||||
if "custom_repos" not in data:
|
|
||||||
|
if "custom_repos" not in data or not isinstance(data.get("custom_repos"), list):
|
||||||
data["custom_repos"] = []
|
data["custom_repos"] = []
|
||||||
|
|
||||||
|
if "installed_repos" not in data or not isinstance(data.get("installed_repos"), dict):
|
||||||
|
data["installed_repos"] = {}
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
async def _save(self, data: dict[str, Any]) -> None:
|
async def _save(self, data: dict[str, Any]) -> None:
|
||||||
@@ -43,24 +64,20 @@ class BCSStorage:
|
|||||||
for r in repos:
|
for r in repos:
|
||||||
if not isinstance(r, dict):
|
if not isinstance(r, dict):
|
||||||
continue
|
continue
|
||||||
rid = str(r.get("id") or "")
|
rid = r.get("id")
|
||||||
url = str(r.get("url") or "")
|
url = r.get("url")
|
||||||
name = r.get("name")
|
if not rid or not url:
|
||||||
if rid and url:
|
continue
|
||||||
out.append(CustomRepo(id=rid, url=url, name=str(name) if name else None))
|
out.append(CustomRepo(id=str(rid), url=str(url), name=r.get("name")))
|
||||||
return out
|
return out
|
||||||
|
|
||||||
async def add_custom_repo(self, url: str, name: str | None) -> CustomRepo:
|
async def add_custom_repo(self, url: str, name: str | None) -> CustomRepo:
|
||||||
data = await self._load()
|
data = await self._load()
|
||||||
repos = data.get("custom_repos", [])
|
repos = data.get("custom_repos", [])
|
||||||
|
|
||||||
# Deduplicate by URL
|
# De-duplicate by URL
|
||||||
for r in repos:
|
for r in repos:
|
||||||
if isinstance(r, dict) and str(r.get("url", "")).strip() == url.strip():
|
if isinstance(r, dict) and str(r.get("url") or "").strip() == url.strip():
|
||||||
# Update name if provided
|
|
||||||
if name:
|
|
||||||
r["name"] = name
|
|
||||||
await self._save(data)
|
|
||||||
return CustomRepo(id=str(r["id"]), url=str(r["url"]), name=r.get("name"))
|
return CustomRepo(id=str(r["id"]), url=str(r["url"]), name=r.get("name"))
|
||||||
|
|
||||||
rid = f"custom:{uuid.uuid4().hex[:10]}"
|
rid = f"custom:{uuid.uuid4().hex[:10]}"
|
||||||
@@ -73,6 +90,78 @@ class BCSStorage:
|
|||||||
async def remove_custom_repo(self, repo_id: str) -> None:
|
async def remove_custom_repo(self, repo_id: str) -> None:
|
||||||
data = await self._load()
|
data = await self._load()
|
||||||
repos = data.get("custom_repos", [])
|
repos = data.get("custom_repos", [])
|
||||||
data["custom_repos"] = [r for r in repos if not (isinstance(r, dict) and r.get("id") == repo_id)]
|
data["custom_repos"] = [
|
||||||
|
r for r in repos if not (isinstance(r, dict) and r.get("id") == repo_id)
|
||||||
|
]
|
||||||
await self._save(data)
|
await self._save(data)
|
||||||
|
|
||||||
|
async def get_installed_repo(self, repo_id: str) -> InstalledRepo | None:
|
||||||
|
data = await self._load()
|
||||||
|
installed = data.get("installed_repos", {})
|
||||||
|
if not isinstance(installed, dict):
|
||||||
|
return None
|
||||||
|
entry = installed.get(repo_id)
|
||||||
|
if not isinstance(entry, dict):
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
domains = entry.get("domains") or []
|
||||||
|
if not isinstance(domains, list):
|
||||||
|
domains = []
|
||||||
|
domains = [str(d) for d in domains if str(d).strip()]
|
||||||
|
|
||||||
|
return InstalledRepo(
|
||||||
|
repo_id=str(entry.get("repo_id") or repo_id),
|
||||||
|
url=str(entry.get("url") or ""),
|
||||||
|
domains=domains,
|
||||||
|
installed_at=int(entry.get("installed_at") or 0),
|
||||||
|
installed_version=str(entry.get("installed_version")) if entry.get("installed_version") else None,
|
||||||
|
ref=str(entry.get("ref")) if entry.get("ref") else None,
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def list_installed_repos(self) -> list[InstalledRepo]:
|
||||||
|
data = await self._load()
|
||||||
|
installed = data.get("installed_repos", {})
|
||||||
|
out: list[InstalledRepo] = []
|
||||||
|
if not isinstance(installed, dict):
|
||||||
|
return out
|
||||||
|
for repo_id in list(installed.keys()):
|
||||||
|
item = await self.get_installed_repo(str(repo_id))
|
||||||
|
if item:
|
||||||
|
out.append(item)
|
||||||
|
return out
|
||||||
|
|
||||||
|
async def set_installed_repo(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
repo_id: str,
|
||||||
|
url: str,
|
||||||
|
domains: list[str],
|
||||||
|
installed_version: str | None,
|
||||||
|
ref: str | None,
|
||||||
|
) -> None:
|
||||||
|
data = await self._load()
|
||||||
|
installed = data.get("installed_repos", {})
|
||||||
|
if not isinstance(installed, dict):
|
||||||
|
installed = {}
|
||||||
|
data["installed_repos"] = installed
|
||||||
|
|
||||||
|
installed[str(repo_id)] = {
|
||||||
|
"repo_id": str(repo_id),
|
||||||
|
"url": str(url),
|
||||||
|
"domains": [str(d) for d in (domains or []) if str(d).strip()],
|
||||||
|
"installed_at": int(time.time()),
|
||||||
|
"installed_version": installed_version,
|
||||||
|
"ref": ref,
|
||||||
|
}
|
||||||
|
await self._save(data)
|
||||||
|
|
||||||
|
async def remove_installed_repo(self, repo_id: str) -> None:
|
||||||
|
data = await self._load()
|
||||||
|
installed = data.get("installed_repos", {})
|
||||||
|
if isinstance(installed, dict) and repo_id in installed:
|
||||||
|
installed.pop(repo_id, None)
|
||||||
|
data["installed_repos"] = installed
|
||||||
|
await self._save(data)
|
||||||
@@ -16,14 +16,12 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
def _render_markdown_server_side(md: str) -> str | None:
|
def _render_markdown_server_side(md: str) -> str | None:
|
||||||
"""Render Markdown -> sanitized HTML (server-side)."""
|
|
||||||
text = (md or "").strip()
|
text = (md or "").strip()
|
||||||
if not text:
|
if not text:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
html: str | None = None
|
html: str | None = None
|
||||||
|
|
||||||
# 1) python-markdown
|
|
||||||
try:
|
try:
|
||||||
import markdown as mdlib # type: ignore
|
import markdown as mdlib # type: ignore
|
||||||
|
|
||||||
@@ -39,7 +37,6 @@ def _render_markdown_server_side(md: str) -> str | None:
|
|||||||
if not html:
|
if not html:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# 2) Sanitize via bleach
|
|
||||||
try:
|
try:
|
||||||
import bleach # type: ignore
|
import bleach # type: ignore
|
||||||
|
|
||||||
@@ -124,16 +121,6 @@ def _maybe_decode_base64(content: str, encoding: Any) -> str | None:
|
|||||||
|
|
||||||
|
|
||||||
def _extract_text_recursive(obj: Any, depth: int = 0) -> str | None:
|
def _extract_text_recursive(obj: Any, depth: int = 0) -> str | None:
|
||||||
"""
|
|
||||||
Robust extraction for README markdown.
|
|
||||||
|
|
||||||
Handles:
|
|
||||||
- str / bytes
|
|
||||||
- dict with:
|
|
||||||
- {content: "...", encoding: "base64"} (possibly nested)
|
|
||||||
- {readme: "..."} etc.
|
|
||||||
- list of dicts (pick first matching)
|
|
||||||
"""
|
|
||||||
if obj is None:
|
if obj is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -150,21 +137,16 @@ def _extract_text_recursive(obj: Any, depth: int = 0) -> str | None:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
if isinstance(obj, dict):
|
if isinstance(obj, dict):
|
||||||
# 1) If it looks like "file content"
|
|
||||||
content = obj.get("content")
|
content = obj.get("content")
|
||||||
encoding = obj.get("encoding")
|
encoding = obj.get("encoding")
|
||||||
|
|
||||||
# Base64 decode if possible
|
|
||||||
decoded = _maybe_decode_base64(content, encoding)
|
decoded = _maybe_decode_base64(content, encoding)
|
||||||
if decoded:
|
if decoded:
|
||||||
return decoded
|
return decoded
|
||||||
|
|
||||||
# content may already be plain text
|
|
||||||
if isinstance(content, str) and (not isinstance(encoding, str) or not encoding.strip()):
|
if isinstance(content, str) and (not isinstance(encoding, str) or not encoding.strip()):
|
||||||
# Heuristic: treat as markdown if it has typical markdown chars, otherwise still return
|
|
||||||
return content
|
return content
|
||||||
|
|
||||||
# 2) direct text keys (readme/markdown/text/body/data)
|
|
||||||
for k in _TEXT_KEYS:
|
for k in _TEXT_KEYS:
|
||||||
v = obj.get(k)
|
v = obj.get(k)
|
||||||
if isinstance(v, str):
|
if isinstance(v, str):
|
||||||
@@ -175,7 +157,6 @@ def _extract_text_recursive(obj: Any, depth: int = 0) -> str | None:
|
|||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# 3) Sometimes nested under "file" / "result" / "payload" etc.
|
|
||||||
for v in obj.values():
|
for v in obj.values():
|
||||||
out = _extract_text_recursive(v, depth + 1)
|
out = _extract_text_recursive(v, depth + 1)
|
||||||
if out:
|
if out:
|
||||||
@@ -198,7 +179,7 @@ class StaticAssetsView(HomeAssistantView):
|
|||||||
name = "api:bahmcloud_store_static"
|
name = "api:bahmcloud_store_static"
|
||||||
requires_auth = False
|
requires_auth = False
|
||||||
|
|
||||||
async def get(self, request: web.Request, path: str) -> web.Response:
|
async def get(self, request: web.Request, path: str) -> web.StreamResponse:
|
||||||
base = Path(__file__).resolve().parent / "panel"
|
base = Path(__file__).resolve().parent / "panel"
|
||||||
base_resolved = base.resolve()
|
base_resolved = base.resolve()
|
||||||
|
|
||||||
@@ -218,24 +199,7 @@ class StaticAssetsView(HomeAssistantView):
|
|||||||
_LOGGER.error("BCS static asset not found: %s", target)
|
_LOGGER.error("BCS static asset not found: %s", target)
|
||||||
return web.Response(status=404)
|
return web.Response(status=404)
|
||||||
|
|
||||||
content_type = "text/plain"
|
resp = web.FileResponse(path=target)
|
||||||
charset = None
|
|
||||||
|
|
||||||
if target.suffix == ".js":
|
|
||||||
content_type = "application/javascript"
|
|
||||||
charset = "utf-8"
|
|
||||||
elif target.suffix == ".html":
|
|
||||||
content_type = "text/html"
|
|
||||||
charset = "utf-8"
|
|
||||||
elif target.suffix == ".css":
|
|
||||||
content_type = "text/css"
|
|
||||||
charset = "utf-8"
|
|
||||||
elif target.suffix == ".svg":
|
|
||||||
content_type = "image/svg+xml"
|
|
||||||
elif target.suffix == ".png":
|
|
||||||
content_type = "image/png"
|
|
||||||
|
|
||||||
resp = web.Response(body=target.read_bytes(), content_type=content_type, charset=charset)
|
|
||||||
resp.headers["Cache-Control"] = "no-store, no-cache, must-revalidate, max-age=0"
|
resp.headers["Cache-Control"] = "no-store, no-cache, must-revalidate, max-age=0"
|
||||||
resp.headers["Pragma"] = "no-cache"
|
resp.headers["Pragma"] = "no-cache"
|
||||||
return resp
|
return resp
|
||||||
@@ -247,7 +211,7 @@ class BCSApiView(HomeAssistantView):
|
|||||||
requires_auth = True
|
requires_auth = True
|
||||||
|
|
||||||
def __init__(self, core: Any) -> None:
|
def __init__(self, core: Any) -> None:
|
||||||
self.core = core
|
self.core: BCSCore = core
|
||||||
|
|
||||||
async def get(self, request: web.Request) -> web.Response:
|
async def get(self, request: web.Request) -> web.Response:
|
||||||
return web.json_response(
|
return web.json_response(
|
||||||
@@ -255,7 +219,21 @@ class BCSApiView(HomeAssistantView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
async def post(self, request: web.Request) -> web.Response:
|
async def post(self, request: web.Request) -> web.Response:
|
||||||
data = await request.json()
|
action = request.query.get("action")
|
||||||
|
if action == "refresh":
|
||||||
|
_LOGGER.info("BCS manual refresh triggered via API")
|
||||||
|
try:
|
||||||
|
await self.core.full_refresh(source="manual")
|
||||||
|
return web.json_response({"ok": True})
|
||||||
|
except Exception as e:
|
||||||
|
_LOGGER.error("BCS manual refresh failed: %s", e)
|
||||||
|
return web.json_response({"ok": False, "message": "Refresh failed"}, status=500)
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = await request.json()
|
||||||
|
except Exception:
|
||||||
|
data = {}
|
||||||
|
|
||||||
op = data.get("op")
|
op = data.get("op")
|
||||||
|
|
||||||
if op == "add_custom_repo":
|
if op == "add_custom_repo":
|
||||||
@@ -276,7 +254,7 @@ class BCSCustomRepoView(HomeAssistantView):
|
|||||||
requires_auth = True
|
requires_auth = True
|
||||||
|
|
||||||
def __init__(self, core: Any) -> None:
|
def __init__(self, core: Any) -> None:
|
||||||
self.core = core
|
self.core: BCSCore = core
|
||||||
|
|
||||||
async def delete(self, request: web.Request) -> web.Response:
|
async def delete(self, request: web.Request) -> web.Response:
|
||||||
repo_id = request.query.get("id")
|
repo_id = request.query.get("id")
|
||||||
@@ -292,7 +270,7 @@ class BCSReadmeView(HomeAssistantView):
|
|||||||
requires_auth = True
|
requires_auth = True
|
||||||
|
|
||||||
def __init__(self, core: Any) -> None:
|
def __init__(self, core: Any) -> None:
|
||||||
self.core = core
|
self.core: BCSCore = core
|
||||||
|
|
||||||
async def get(self, request: web.Request) -> web.Response:
|
async def get(self, request: web.Request) -> web.Response:
|
||||||
repo_id = request.query.get("repo_id")
|
repo_id = request.query.get("repo_id")
|
||||||
@@ -309,8 +287,65 @@ class BCSReadmeView(HomeAssistantView):
|
|||||||
status=404,
|
status=404,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Ensure strict JSON string output (avoid accidental objects)
|
|
||||||
md_str = str(md)
|
md_str = str(md)
|
||||||
|
|
||||||
html = _render_markdown_server_side(md_str)
|
html = _render_markdown_server_side(md_str)
|
||||||
return web.json_response({"ok": True, "readme": md_str, "html": html})
|
return web.json_response({"ok": True, "readme": md_str, "html": html})
|
||||||
|
|
||||||
|
|
||||||
|
class BCSInstallView(HomeAssistantView):
|
||||||
|
url = "/api/bcs/install"
|
||||||
|
name = "api:bcs_install"
|
||||||
|
requires_auth = True
|
||||||
|
|
||||||
|
def __init__(self, core: Any) -> None:
|
||||||
|
self.core: BCSCore = core
|
||||||
|
|
||||||
|
async def post(self, request: web.Request) -> web.Response:
|
||||||
|
repo_id = request.query.get("repo_id")
|
||||||
|
if not repo_id:
|
||||||
|
return web.json_response({"ok": False, "message": "Missing repo_id"}, status=400)
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = await self.core.install_repo(repo_id)
|
||||||
|
return web.json_response(result, status=200)
|
||||||
|
except Exception as e:
|
||||||
|
_LOGGER.exception("BCS install failed: %s", e)
|
||||||
|
return web.json_response({"ok": False, "message": str(e) or "Install failed"}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
class BCSUpdateView(HomeAssistantView):
|
||||||
|
url = "/api/bcs/update"
|
||||||
|
name = "api:bcs_update"
|
||||||
|
requires_auth = True
|
||||||
|
|
||||||
|
def __init__(self, core: Any) -> None:
|
||||||
|
self.core: BCSCore = core
|
||||||
|
|
||||||
|
async def post(self, request: web.Request) -> web.Response:
|
||||||
|
repo_id = request.query.get("repo_id")
|
||||||
|
if not repo_id:
|
||||||
|
return web.json_response({"ok": False, "message": "Missing repo_id"}, status=400)
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = await self.core.update_repo(repo_id)
|
||||||
|
return web.json_response(result, status=200)
|
||||||
|
except Exception as e:
|
||||||
|
_LOGGER.exception("BCS update failed: %s", e)
|
||||||
|
return web.json_response({"ok": False, "message": str(e) or "Update failed"}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
class BCSRestartView(HomeAssistantView):
|
||||||
|
url = "/api/bcs/restart"
|
||||||
|
name = "api:bcs_restart"
|
||||||
|
requires_auth = True
|
||||||
|
|
||||||
|
def __init__(self, core: Any) -> None:
|
||||||
|
self.core: BCSCore = core
|
||||||
|
|
||||||
|
async def post(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self.core.request_restart()
|
||||||
|
return web.json_response({"ok": True})
|
||||||
|
except Exception as e:
|
||||||
|
_LOGGER.exception("BCS restart failed: %s", e)
|
||||||
|
return web.json_response({"ok": False, "message": str(e) or "Restart failed"}, status=500)
|
||||||
Reference in New Issue
Block a user