19 Commits
0.6.4 ... 0.6.7

Author SHA1 Message Date
33502a6d99 add 0.6.7 2026-01-19 07:16:35 +00:00
1306ee9518 Change 0.6.7 2026-01-19 07:15:48 +00:00
e37546cab1 0.6 7 2026-01-19 07:15:30 +00:00
88c3233fd1 0.6.7 2026-01-19 07:14:58 +00:00
02f3047080 0.6 7 2026-01-19 07:14:22 +00:00
d4012589e6 add 0.6.6 2026-01-18 19:55:59 +00:00
8ac67fa60c 0.6.6 2026-01-18 19:53:34 +00:00
981490c152 0.6.6 2026-01-18 19:53:12 +00:00
99b2a0f0c5 0.6.6 2026-01-18 19:52:27 +00:00
7ead494765 0.6.6 2026-01-18 19:51:31 +00:00
342b6f6c57 0.6.6 2026-01-18 19:50:44 +00:00
66ca63b2be I 2026-01-18 19:35:19 +00:00
e8325f722f I 2026-01-18 19:32:58 +00:00
7c1a91937a add 0.6.5 2026-01-18 18:55:40 +00:00
7ac3289bb7 0.6.5 2026-01-18 18:53:52 +00:00
19bdbd1b9a 0.6.5 2026-01-18 18:53:30 +00:00
24363cd2ac 0.6.5 2026-01-18 18:52:55 +00:00
e19ca5bff1 0.6.5 2026-01-18 18:51:48 +00:00
05897d4370 0.6.5 2026-01-18 18:51:17 +00:00
8 changed files with 770 additions and 283 deletions

View File

@@ -11,6 +11,49 @@ Sections:
--- ---
## [0.6.7] - 2026-01-19
### Fixed
- Repository metadata loaded on demand is now persisted and restored after Home Assistant restart.
- Background enrichment reliably updates latest versions and descriptions for HACS repositories.
- Cached repository data is correctly restored on startup for non-installed repositories.
### Changed
- Repository details are only considered fully enriched once latest version information is available.
- Improved reliability of background cache synchronization without impacting startup performance.
## [0.6.6] - 2026-01-18
### Added
- Source filter to limit repositories by origin: BCS Official, HACS, or Custom.
- Visual source badges for repositories (BCS Official, HACS, Custom).
- Restored HACS enable/disable toggle in the Store UI.
### Changed
- HACS repositories now display human-readable names and descriptions based on official HACS metadata.
- Improved Store usability on mobile devices by fixing back navigation from repository detail view.
### Fixed
- Fixed missing HACS toggle after UI updates.
- Fixed mobile browser back button exiting the Store instead of returning to the repository list.
## [0.6.5] - 2026-01-18
### Added
- Separate handling of HACS official repositories with an enable/disable toggle in the Store UI.
- HACS repositories are now loaded independently from the main store index.
### Changed
- Store index can remain minimal and curated; HACS repositories are no longer required in store.yaml.
- Improved Store performance and clarity by clearly separating repository sources.
### Fixed
- Browser cache issues resolved by proper panel cache-busting for UI updates.
### Internal
- No changes to install, update, backup, or restore logic.
- Fully backward compatible with existing installations and configurations.
## [0.6.4] - 2026-01-18 ## [0.6.4] - 2026-01-18
### Fixed ### Fixed

View File

@@ -4,7 +4,7 @@ description: >
Supports GitHub, GitLab, Gitea and Bahmcloud repositories with Supports GitHub, GitLab, Gitea and Bahmcloud repositories with
a central index, UI panel and API, similar to HACS but independent. a central index, UI panel and API, similar to HACS but independent.
category: integration category: Integrations
author: Bahmcloud author: Bahmcloud
maintainer: Bahmcloud maintainer: Bahmcloud

View File

@@ -3,10 +3,10 @@ from __future__ import annotations
import logging import logging
from datetime import timedelta from datetime import timedelta
from homeassistant.core import HomeAssistant, callback from homeassistant.core import HomeAssistant
from homeassistant.const import EVENT_HOMEASSISTANT_STARTED
from homeassistant.components.panel_custom import async_register_panel from homeassistant.components.panel_custom import async_register_panel
from homeassistant.helpers.event import async_track_time_interval, async_call_later from homeassistant.helpers.event import async_track_time_interval, async_call_later
from homeassistant.const import EVENT_HOMEASSISTANT_STARTED
from homeassistant.helpers.discovery import async_load_platform from homeassistant.helpers.discovery import async_load_platform
from .core import BCSCore, BCSConfig, BCSError from .core import BCSCore, BCSConfig, BCSError
@@ -35,6 +35,7 @@ async def async_setup(hass: HomeAssistant, config: dict) -> bool:
from .views import ( from .views import (
StaticAssetsView, StaticAssetsView,
BCSApiView, BCSApiView,
BCSSettingsView,
BCSReadmeView, BCSReadmeView,
BCSVersionsView, BCSVersionsView,
BCSRepoDetailView, BCSRepoDetailView,
@@ -49,6 +50,7 @@ async def async_setup(hass: HomeAssistant, config: dict) -> bool:
hass.http.register_view(StaticAssetsView()) hass.http.register_view(StaticAssetsView())
hass.http.register_view(BCSApiView(core)) hass.http.register_view(BCSApiView(core))
hass.http.register_view(BCSSettingsView(core))
hass.http.register_view(BCSReadmeView(core)) hass.http.register_view(BCSReadmeView(core))
hass.http.register_view(BCSVersionsView(core)) hass.http.register_view(BCSVersionsView(core))
hass.http.register_view(BCSRepoDetailView(core)) hass.http.register_view(BCSRepoDetailView(core))
@@ -65,30 +67,24 @@ async def async_setup(hass: HomeAssistant, config: dict) -> bool:
frontend_url_path="bahmcloud-store", frontend_url_path="bahmcloud-store",
webcomponent_name="bahmcloud-store-panel", webcomponent_name="bahmcloud-store-panel",
# IMPORTANT: bump v to avoid caching old JS # IMPORTANT: bump v to avoid caching old JS
module_url="/api/bahmcloud_store_static/panel.js?v=107", module_url="/api/bahmcloud_store_static/panel.js?v=108",
sidebar_title="Bahmcloud Store", sidebar_title="Bahmcloud Store",
sidebar_icon="mdi:store", sidebar_icon="mdi:store",
require_admin=True, require_admin=True,
config={}, config={},
) )
# IMPORTANT: async def _do_startup_refresh(_now=None) -> None:
# Do NOT block Home Assistant startup with network-heavy refreshes.
# We wait until HA has fully started, then kick off the initial refresh.
async def _startup_refresh() -> None:
try: try:
await core.full_refresh(source="startup") await core.full_refresh(source="startup")
except BCSError as e: except BCSError as e:
_LOGGER.error("Initial refresh failed: %s", e) _LOGGER.error("Initial refresh failed: %s", e)
except Exception:
_LOGGER.exception("Unexpected error during initial refresh")
@callback # Do not block Home Assistant startup. Schedule the initial refresh after HA started.
def _schedule_startup_refresh(_event=None) -> None: def _on_ha_started(_event) -> None:
# Give HA a short head-start (UI, recorder, etc.) before we start fetching lots of data. async_call_later(hass, 30, _do_startup_refresh)
async_call_later(hass, 30, lambda _now: hass.async_create_task(_startup_refresh()))
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, _schedule_startup_refresh) hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, _on_ha_started)
async def periodic(_now) -> None: async def periodic(_now) -> None:
try: try:
@@ -101,4 +97,4 @@ async def async_setup(hass: HomeAssistant, config: dict) -> bool:
interval_seconds = int(getattr(core, "refresh_seconds", 300) or 300) interval_seconds = int(getattr(core, "refresh_seconds", 300) or 300)
async_track_time_interval(hass, periodic, timedelta(seconds=interval_seconds)) async_track_time_interval(hass, periodic, timedelta(seconds=interval_seconds))
return True return True

View File

@@ -32,6 +32,17 @@ RESTART_REQUIRED_ISSUE_ID = "restart_required"
BACKUP_META_FILENAME = ".bcs_backup_meta.json" BACKUP_META_FILENAME = ".bcs_backup_meta.json"
# Optional HACS integrations index (GitHub repositories only).
HACS_INTEGRATIONS_URL = "https://data-v2.hacs.xyz/integration/repositories.json"
HACS_INTEGRATIONS_DATA_URL = "https://data-v2.hacs.xyz/integration/data.json"
HACS_DEFAULT_CATEGORY = "Integrations"
HACS_CACHE_TTL_SECONDS = 60 * 60 * 24 # 24h
# Repo enrichment cache:
# - persists across restarts
# - keeps UI populated (name/description/latest) without blocking startup
REPO_CACHE_TTL_SECONDS = 6 * 60 * 60 # 6h
class BCSError(Exception): class BCSError(Exception):
"""BCS core error.""" """BCS core error."""
@@ -95,20 +106,19 @@ class BCSCore:
self._install_lock = asyncio.Lock() self._install_lock = asyncio.Lock()
self._installed_cache: dict[str, Any] = {} self._installed_cache: dict[str, Any] = {}
# Phase P1/P2: local repo cache + background enrichment # Persistent settings (UI toggles etc.)
# The cache persists provider/meta/latest data so the UI can show more self.settings: dict[str, Any] = {"hacs_enabled": False}
# information immediately and we can later do delta refresh.
self._repo_cache: dict[str, Any] = {}
self._repo_cache_loaded: bool = False
self._repo_cache_flush_task: asyncio.Task | None = None
# Background enrichment worker (non-blocking) # Cached HACS metadata (display names/descriptions). Loaded from storage.
self._bg_enrich_task: asyncio.Task | None = None self._hacs_meta_fetched_at: int = 0
self._bg_enrich_pending: set[str] = set() self._hacs_meta: dict[str, dict[str, Any]] = {}
self._bg_enrich_ttl_seconds: int = 6 * 3600 self._hacs_meta_lock = asyncio.Lock()
self._bg_enrich_max_parallel: int = 3
self._bg_signal_interval_seconds: float = 2.0 # Persistent per-repo enrichment cache (name/description/latest). Loaded from storage.
self._bg_last_signal_ts: float = 0.0 self._repo_cache_fetched_at: int = 0
self._repo_cache: dict[str, dict[str, Any]] = {}
self._repo_cache_lock = asyncio.Lock()
self._repo_enrich_task: asyncio.Task | None = None
# Phase F2: backups before install/update # Phase F2: backups before install/update
self._backup_root = Path(self.hass.config.path(".bcs_backups")) self._backup_root = Path(self.hass.config.path(".bcs_backups"))
@@ -119,18 +129,42 @@ class BCSCore:
self.version = await self._read_manifest_version_async() self.version = await self._read_manifest_version_async()
await self._refresh_installed_cache() await self._refresh_installed_cache()
# Load persisted repo cache once at startup. # Load persistent settings (do not fail startup)
try: try:
self._repo_cache = await self.storage.get_repo_cache_map() s = await self.storage.get_settings()
if not isinstance(self._repo_cache, dict): if isinstance(s, dict):
self._repo_cache = {} self.settings.update(s)
except Exception: except Exception:
self._repo_cache = {} pass
self._repo_cache_loaded = True
# After a successful HA restart, restart-required is no longer relevant. # After a successful HA restart, restart-required is no longer relevant.
self._clear_restart_required_issue() self._clear_restart_required_issue()
# Load cached HACS metadata (optional; improves UX when HACS toggle is enabled).
try:
hc = await self.storage.get_hacs_cache()
if isinstance(hc, dict):
self._hacs_meta_fetched_at = int(hc.get("fetched_at") or 0)
repos = hc.get("repos")
if isinstance(repos, dict):
# Normalize to string keys
self._hacs_meta = {str(k): (v if isinstance(v, dict) else {}) for k, v in repos.items()}
except Exception:
self._hacs_meta_fetched_at = 0
self._hacs_meta = {}
# Load persisted per-repo enrichment cache (keeps UI populated after restart).
try:
rc = await self.storage.get_repo_cache()
if isinstance(rc, dict):
self._repo_cache_fetched_at = int(rc.get("fetched_at") or 0)
repos = rc.get("repos")
if isinstance(repos, dict):
self._repo_cache = {str(k): (v if isinstance(v, dict) else {}) for k, v in repos.items()}
except Exception:
self._repo_cache_fetched_at = 0
self._repo_cache = {}
async def _read_manifest_version_async(self) -> str: async def _read_manifest_version_async(self) -> str:
def _read() -> str: def _read() -> str:
try: try:
@@ -199,22 +233,60 @@ class BCSCore:
data = (self._installed_cache or {}).get(repo_id) data = (self._installed_cache or {}).get(repo_id)
return data if isinstance(data, dict) else None return data if isinstance(data, dict) else None
def get_settings_public(self) -> dict[str, Any]:
"""Return UI-relevant settings (no I/O)."""
return {
"hacs_enabled": bool(self.settings.get("hacs_enabled", False)),
}
async def set_settings(self, updates: dict[str, Any]) -> dict[str, Any]:
"""Persist settings and apply them."""
safe_updates: dict[str, Any] = {}
if "hacs_enabled" in (updates or {}):
safe_updates["hacs_enabled"] = bool(updates.get("hacs_enabled"))
merged = await self.storage.set_settings(safe_updates)
if isinstance(merged, dict):
self.settings.update(merged)
# Reload repo list after changing settings.
await self.full_refresh(source="settings")
return self.get_settings_public()
async def refresh(self) -> None: async def refresh(self) -> None:
index_repos, refresh_seconds = await self._load_index_repos() index_repos, refresh_seconds = await self._load_index_repos()
self.refresh_seconds = refresh_seconds self.refresh_seconds = refresh_seconds
hacs_enabled = bool(self.settings.get("hacs_enabled", False))
hacs_repos: list[RepoItem] = []
if hacs_enabled:
try:
hacs_repos = await self._load_hacs_repos()
except Exception as e:
_LOGGER.warning("BCS HACS index load failed: %s", e)
custom_repos = await self.storage.list_custom_repos() custom_repos = await self.storage.list_custom_repos()
# Fast path: if index + custom repos did not change, skip expensive work. # Fast path: if index + custom repos did not change, skip expensive work.
try: try:
custom_sig = [(c.id, (c.url or '').strip(), (c.name or '').strip()) for c in (custom_repos or [])] custom_sig = [(c.id, (c.url or '').strip(), (c.name or '').strip()) for c in (custom_repos or [])]
custom_sig.sort() custom_sig.sort()
refresh_signature = json.dumps({"index_hash": self.last_index_hash, "custom": custom_sig}, sort_keys=True) hacs_sig = len(hacs_repos) if hacs_enabled else 0
refresh_signature = json.dumps({"index_hash": self.last_index_hash, "custom": custom_sig, "hacs": hacs_sig, "hacs_enabled": hacs_enabled}, sort_keys=True)
except Exception: except Exception:
refresh_signature = f"{self.last_index_hash}:{len(custom_repos or [])}" refresh_signature = f"{self.last_index_hash}:{len(custom_repos or [])}:{'h' if hacs_enabled else 'n'}:{len(hacs_repos)}"
if self._last_refresh_signature and refresh_signature == self._last_refresh_signature and self.repos: if self._last_refresh_signature and refresh_signature == self._last_refresh_signature and self.repos:
_LOGGER.debug("BCS refresh skipped (no changes detected)") _LOGGER.debug("BCS refresh fast-path (no repo list changes)")
# Even if the repo list is unchanged, we still want fresh versions/descriptions
# for installed repos and we still want background enrichment to keep the
# overview populated.
if hacs_enabled and self._hacs_meta_needs_refresh():
self.hass.async_create_task(self._refresh_hacs_meta_background())
await self._enrich_installed_only(self.repos)
self._schedule_repo_enrich_background()
return return
@@ -223,6 +295,9 @@ class BCSCore:
for item in index_repos: for item in index_repos:
merged[item.id] = item merged[item.id] = item
for item in hacs_repos:
merged[item.id] = item
for c in custom_repos: for c in custom_repos:
merged[c.id] = RepoItem( merged[c.id] = RepoItem(
id=c.id, id=c.id,
@@ -234,174 +309,368 @@ class BCSCore:
for r in merged.values(): for r in merged.values():
r.provider = detect_provider(r.url) r.provider = detect_provider(r.url)
# Apply persisted cache (provider/meta/latest) to all repos so the list # Apply cached HACS display metadata immediately (fast UX).
# view can show richer data immediately. if hacs_enabled and hacs_repos:
self._apply_hacs_meta(merged)
# Refresh HACS metadata in the background if cache is missing/stale.
if self._hacs_meta_needs_refresh():
self.hass.async_create_task(self._refresh_hacs_meta_background())
# Apply persisted per-repo enrichment cache (instant UI after restart).
self._apply_repo_cache(merged) self._apply_repo_cache(merged)
await self._enrich_installed_only(merged) await self._enrich_installed_only(merged)
self.repos = merged self.repos = merged
# Enrich remaining repos in the background and persist results (non-blocking).
self._schedule_repo_enrich_background()
self._last_refresh_signature = refresh_signature self._last_refresh_signature = refresh_signature
_LOGGER.info( _LOGGER.info(
"BCS refresh complete: repos=%s (index=%s, custom=%s)", "BCS refresh complete: repos=%s (index=%s, hacs=%s, custom=%s)",
len(self.repos), len(self.repos),
len([r for r in self.repos.values() if r.source == "index"]), len([r for r in self.repos.values() if r.source == "index"]),
len([r for r in self.repos.values() if r.source == "hacs"]),
len([r for r in self.repos.values() if r.source == "custom"]), len([r for r in self.repos.values() if r.source == "custom"]),
) )
# Start/continue background enrichment for repos (non-blocking). async def _load_hacs_repos(self) -> list[RepoItem]:
self._schedule_background_enrich(list(self.repos.keys())) """Load the official HACS integrations repository list.
This is used as an optional additional source to keep the local store index small.
We only parse owner/repo strings and map them to GitHub URLs.
"""
session = async_get_clientsession(self.hass)
headers = {
"User-Agent": "BahmcloudStore (Home Assistant)",
"Cache-Control": "no-cache, no-store, max-age=0",
"Pragma": "no-cache",
}
async with session.get(HACS_INTEGRATIONS_URL, timeout=60, headers=headers) as resp:
if resp.status != 200:
raise BCSError(f"HACS index returned {resp.status}")
data = await resp.json()
if not isinstance(data, list):
raise BCSError("HACS repositories.json must be a list")
items: list[RepoItem] = []
for entry in data:
if not isinstance(entry, str):
continue
full_name = entry.strip().strip("/")
if not full_name or "/" not in full_name:
continue
repo_id = f"hacs:{full_name.lower()}"
owner = full_name.split("/", 1)[0].strip()
items.append(
RepoItem(
id=repo_id,
# Name is improved later via cached HACS meta (manifest.name).
name=full_name,
url=f"https://github.com/{full_name}",
source="hacs",
owner=owner,
provider_repo_name=full_name, # keep stable owner/repo reference
meta_category=HACS_DEFAULT_CATEGORY,
)
)
return items
def _hacs_meta_needs_refresh(self) -> bool:
if not self._hacs_meta_fetched_at or not self._hacs_meta:
return True
age = int(time.time()) - int(self._hacs_meta_fetched_at)
return age > HACS_CACHE_TTL_SECONDS
def _apply_hacs_meta(self, merged: dict[str, RepoItem]) -> None:
"""Apply cached HACS metadata to matching repos (no I/O)."""
if not self._hacs_meta:
return
def _full_name_from_repo(r: RepoItem) -> str | None:
# Prefer the original owner/repo (stable) if we kept it.
if r.provider_repo_name and "/" in str(r.provider_repo_name):
return str(r.provider_repo_name).strip()
# Fall back to URL path: https://github.com/owner/repo
try:
u = urlparse((r.url or "").strip())
parts = [p for p in (u.path or "").strip("/").split("/") if p]
if len(parts) >= 2:
repo = parts[1]
if repo.endswith(".git"):
repo = repo[:-4]
return f"{parts[0]}/{repo}"
except Exception:
pass
return None
for r in merged.values():
if r.source != "hacs":
continue
key = _full_name_from_repo(r)
if not key or "/" not in key:
continue
meta = self._hacs_meta.get(key)
if not isinstance(meta, dict) or not meta:
continue
# Prefer HACS manifest name as display name.
display_name = meta.get("name")
if isinstance(display_name, str) and display_name.strip():
r.name = display_name.strip()
r.meta_name = display_name.strip()
desc = meta.get("description")
if isinstance(desc, str) and desc.strip():
r.meta_description = desc.strip()
domain = meta.get("domain")
# We don't store domain in RepoItem fields, but keep it in meta_source for debugging.
# (Optional: extend RepoItem later if needed.)
if isinstance(domain, str) and domain.strip():
# Keep under meta_source marker to help identify source.
pass
r.meta_source = r.meta_source or "hacs"
r.meta_category = r.meta_category or HACS_DEFAULT_CATEGORY
async def _refresh_hacs_meta_background(self) -> None:
"""Fetch and cache HACS integration metadata in the background.
Uses the official HACS data endpoint which includes manifest data.
This avoids per-repo GitHub calls and improves the UX (names/descriptions).
"""
async with self._hacs_meta_lock:
# Another task might have refreshed already.
if not self._hacs_meta_needs_refresh():
return
session = async_get_clientsession(self.hass)
headers = {
"User-Agent": "BahmcloudStore (Home Assistant)",
"Cache-Control": "no-cache, no-store, max-age=0",
"Pragma": "no-cache",
}
try:
async with session.get(HACS_INTEGRATIONS_DATA_URL, timeout=120, headers=headers) as resp:
if resp.status != 200:
raise BCSError(f"HACS data.json returned {resp.status}")
data = await resp.json()
except Exception as e:
_LOGGER.warning("BCS HACS meta refresh failed: %s", e)
return
# Build mapping owner/repo -> {name, description, domain}
meta_map: dict[str, dict[str, Any]] = {}
if isinstance(data, dict):
for _, obj in data.items():
if not isinstance(obj, dict):
continue
full_name = obj.get("full_name")
if not isinstance(full_name, str) or "/" not in full_name:
continue
manifest = obj.get("manifest")
mname = None
mdesc = None
mdomain = None
if isinstance(manifest, dict):
mname = manifest.get("name")
mdesc = manifest.get("description")
mdomain = manifest.get("domain")
entry: dict[str, Any] = {}
if isinstance(mname, str) and mname.strip():
entry["name"] = mname.strip()
if isinstance(mdesc, str) and mdesc.strip():
entry["description"] = mdesc.strip()
if isinstance(mdomain, str) and mdomain.strip():
entry["domain"] = mdomain.strip()
if entry:
meta_map[full_name.strip()] = entry
self._hacs_meta = meta_map
self._hacs_meta_fetched_at = int(time.time())
try:
await self.storage.set_hacs_cache({
"fetched_at": self._hacs_meta_fetched_at,
"repos": self._hacs_meta,
})
except Exception:
_LOGGER.debug("Failed to persist HACS cache", exc_info=True)
# Apply meta to current repos and notify UI.
try:
self._apply_hacs_meta(self.repos)
except Exception:
pass
_LOGGER.info("BCS HACS metadata cached: repos=%s", len(self._hacs_meta))
self.signal_updated()
# ---------- Persistent per-repo enrichment cache ----------
def _apply_repo_cache(self, merged: dict[str, RepoItem]) -> None: def _apply_repo_cache(self, merged: dict[str, RepoItem]) -> None:
"""Apply persisted cached enrichment data to repo items (no network IO).""" """Apply persisted per-repo enrichment cache to RepoItem objects.
if not self._repo_cache_loaded or not isinstance(self._repo_cache, dict) or not self._repo_cache:
This is a fast, no-I/O operation used to keep the UI populated immediately
after a Home Assistant restart.
"""
if not self._repo_cache:
return return
for rid, r in merged.items(): for repo_id, r in merged.items():
entry = self._repo_cache.get(str(rid)) cached = self._repo_cache.get(str(repo_id))
if not isinstance(entry, dict): if not isinstance(cached, dict) or not cached:
continue
if (entry.get("url") or "").strip() != (r.url or "").strip():
continue continue
# Provider basics cached_url = cached.get("url")
r.provider = entry.get("provider") or r.provider if isinstance(cached_url, str) and cached_url and r.url and cached_url != r.url:
r.owner = entry.get("owner") or r.owner # Repo id reused for different URL; ignore.
r.provider_repo_name = entry.get("provider_repo_name") or r.provider_repo_name continue
r.provider_description = entry.get("provider_description") or r.provider_description
r.default_branch = entry.get("default_branch") or r.default_branch
# Latest version # Fill gaps only; never override fresh values from current refresh.
r.latest_version = entry.get("latest_version") or r.latest_version if (not r.name or r.name == r.url) and isinstance(cached.get("name"), str) and cached.get("name"):
r.latest_version_source = entry.get("latest_version_source") or r.latest_version_source r.name = str(cached.get("name"))
# Metadata for attr, key in (
r.meta_source = entry.get("meta_source") or r.meta_source ("owner", "owner"),
r.meta_name = entry.get("meta_name") or r.meta_name ("provider_repo_name", "provider_repo_name"),
r.meta_description = entry.get("meta_description") or r.meta_description ("provider_description", "provider_description"),
r.meta_category = entry.get("meta_category") or r.meta_category ("default_branch", "default_branch"),
r.meta_author = entry.get("meta_author") or r.meta_author ("latest_version", "latest_version"),
r.meta_maintainer = entry.get("meta_maintainer") or r.meta_maintainer ("latest_version_source", "latest_version_source"),
("meta_source", "meta_source"),
("meta_name", "meta_name"),
("meta_description", "meta_description"),
("meta_category", "meta_category"),
("meta_author", "meta_author"),
("meta_maintainer", "meta_maintainer"),
):
if getattr(r, attr, None):
continue
v = cached.get(key)
if v is None:
continue
if isinstance(v, str):
vv = v.strip()
if vv:
setattr(r, attr, vv)
# Keep a stable name fallback def _repo_needs_enrich(self, repo_id: str, r: RepoItem) -> bool:
if r.meta_name: """Return True if this repo should be enriched in background."""
r.name = r.meta_name cached = self._repo_cache.get(str(repo_id)) if isinstance(self._repo_cache, dict) else None
elif not r.name: ts = 0
r.name = r.provider_repo_name or r.url if isinstance(cached, dict):
ts = int(cached.get("ts") or 0)
def _schedule_repo_cache_flush(self) -> None: # Missing key fields -> enrich.
if self._repo_cache_flush_task and not self._repo_cache_flush_task.done(): if not r.latest_version:
return return True
if not (r.meta_description or r.provider_description):
async def _flush_delayed() -> None:
await asyncio.sleep(5)
try:
await self.storage.set_repo_cache_map(self._repo_cache)
except Exception:
_LOGGER.debug("BCS repo cache flush failed", exc_info=True)
self._repo_cache_flush_task = self.hass.async_create_task(_flush_delayed())
def _cache_entry_is_stale(self, entry: dict[str, Any]) -> bool:
try:
checked_at = int(entry.get("checked_at") or 0)
except Exception:
checked_at = 0
if checked_at <= 0:
return True return True
return (time.time() - checked_at) > self._bg_enrich_ttl_seconds
def _schedule_background_enrich(self, repo_ids: list[str]) -> None: # Stale cache -> enrich.
"""Queue repos for background enrichment and ensure worker is running.""" if ts <= 0:
if not repo_ids: return True
age = int(time.time()) - ts
return age > REPO_CACHE_TTL_SECONDS
def _update_repo_cache_from_repo(self, repo_id: str, r: RepoItem) -> None:
self._repo_cache[str(repo_id)] = {
"ts": int(time.time()),
"url": r.url,
"name": r.name,
"owner": r.owner,
"provider_repo_name": r.provider_repo_name,
"provider_description": r.provider_description,
"default_branch": r.default_branch,
"latest_version": r.latest_version,
"latest_version_source": r.latest_version_source,
"meta_source": r.meta_source,
"meta_name": r.meta_name,
"meta_description": r.meta_description,
"meta_category": r.meta_category,
"meta_author": r.meta_author,
"meta_maintainer": r.meta_maintainer,
}
def _schedule_repo_enrich_background(self) -> None:
"""Schedule background enrichment for repo details.
This is non-blocking and safe to call repeatedly.
"""
if self._repo_enrich_task and not self._repo_enrich_task.done():
return return
self._repo_enrich_task = self.hass.async_create_task(self._repo_enrich_background())
now = time.time() async def _repo_enrich_background(self) -> None:
for rid in repo_ids: # Small delay to avoid competing with critical startup work.
rid = str(rid) await asyncio.sleep(2)
r = self.repos.get(rid)
repo_ids = list(self.repos.keys())
updated = 0
# Keep it conservative to avoid rate limits.
sem = asyncio.Semaphore(4)
async def process_one(repo_id: str) -> None:
nonlocal updated
r = self.repos.get(repo_id)
if not r: if not r:
continue return
if not self._repo_needs_enrich(repo_id, r):
# Already enriched in memory? Still consider staleness from cache. return
entry = self._repo_cache.get(rid) if isinstance(self._repo_cache, dict) else None
stale = True
if isinstance(entry, dict) and (entry.get("url") or "").strip() == (r.url or "").strip():
stale = self._cache_entry_is_stale(entry)
# If we already have fields in memory and the cache isn't stale, skip.
if (r.latest_version or r.meta_source or r.provider_description) and not stale:
continue
# Always enqueue missing/stale entries.
self._bg_enrich_pending.add(rid)
if self._bg_enrich_task and not self._bg_enrich_task.done():
return
self._bg_enrich_task = self.hass.async_create_task(self._background_enrich_worker())
async def _background_enrich_worker(self) -> None:
"""Background worker to enrich repos and update the persistent cache."""
sem = asyncio.Semaphore(self._bg_enrich_max_parallel)
async def _enrich_one(rid: str) -> None:
async with sem: async with sem:
r = self.repos.get(rid)
if not r:
return
entry = self._repo_cache.get(rid) if isinstance(self._repo_cache, dict) else None
if isinstance(entry, dict) and (entry.get("url") or "").strip() == (r.url or "").strip():
if not self._cache_entry_is_stale(entry) and (r.latest_version or r.meta_source or r.provider_description):
return
try: try:
await self._enrich_one_repo(r) await self._enrich_one_repo(r)
except Exception: except Exception:
_LOGGER.debug("BCS background enrich failed for %s", rid, exc_info=True) _LOGGER.debug("BCS background enrich failed for %s", repo_id, exc_info=True)
# still mark checked_at to avoid tight retry loops
self._repo_cache[rid] = {
"url": r.url,
"checked_at": int(time.time()),
}
self._schedule_repo_cache_flush()
return return
# Update persistent cache entry async with self._repo_cache_lock:
self._repo_cache[rid] = { self._update_repo_cache_from_repo(repo_id, r)
"url": r.url, updated += 1
"provider": r.provider,
"owner": r.owner,
"provider_repo_name": r.provider_repo_name,
"provider_description": r.provider_description,
"default_branch": r.default_branch,
"latest_version": r.latest_version,
"latest_version_source": r.latest_version_source,
"meta_source": r.meta_source,
"meta_name": r.meta_name,
"meta_description": r.meta_description,
"meta_category": r.meta_category,
"meta_author": r.meta_author,
"meta_maintainer": r.meta_maintainer,
"checked_at": int(time.time()),
}
self._schedule_repo_cache_flush()
# Throttle UI/entity updates # Process sequentially but allow limited concurrency.
if (time.time() - self._bg_last_signal_ts) >= self._bg_signal_interval_seconds: tasks: list[asyncio.Task] = []
self._bg_last_signal_ts = time.time() for repo_id in repo_ids:
tasks.append(self.hass.async_create_task(process_one(repo_id)))
# Flush in batches to limit memory/connection churn.
if len(tasks) >= 25:
await asyncio.gather(*tasks, return_exceptions=True)
tasks = []
if updated:
await self._persist_repo_cache()
self.signal_updated() self.signal_updated()
await asyncio.sleep(0)
while self._bg_enrich_pending: if tasks:
# Drain in small batches so we don't monopolize the loop await asyncio.gather(*tasks, return_exceptions=True)
batch: list[str] = []
while self._bg_enrich_pending and len(batch) < (self._bg_enrich_max_parallel * 2):
batch.append(self._bg_enrich_pending.pop())
await asyncio.gather(*(_enrich_one(rid) for rid in batch), return_exceptions=True) if updated:
await asyncio.sleep(0) # yield await self._persist_repo_cache()
self.signal_updated()
async def _persist_repo_cache(self) -> None:
async with self._repo_cache_lock:
payload = {
"fetched_at": int(time.time()),
"repos": self._repo_cache,
}
try:
await self.storage.set_repo_cache(payload)
except Exception:
_LOGGER.debug("BCS failed to persist repo cache", exc_info=True)
async def _enrich_and_resolve(self, merged: dict[str, RepoItem]) -> None: async def _enrich_and_resolve(self, merged: dict[str, RepoItem]) -> None:
sem = asyncio.Semaphore(6) sem = asyncio.Semaphore(6)
@@ -493,9 +762,6 @@ class BCSCore:
if not r.name: if not r.name:
r.name = r.provider_repo_name or r.url r.name = r.provider_repo_name or r.url
# Persist into local cache (non-blocking, throttled flush).
self._update_repo_cache_from_item(r)
async def ensure_repo_details(self, repo_id: str) -> RepoItem | None: async def ensure_repo_details(self, repo_id: str) -> RepoItem | None:
"""Ensure provider/meta/latest fields are loaded for a repo. """Ensure provider/meta/latest fields are loaded for a repo.
@@ -505,109 +771,28 @@ class BCSCore:
if not r: if not r:
return None return None
# If we already have a latest_version (or provider_description), consider it enriched. # Consider it enriched only if we already have a latest_version and at least
if r.latest_version or r.provider_description or r.meta_source: # some descriptive info (meta/provider). HACS repos often have meta_source set
# early, but still need provider latest_version.
if r.latest_version and (r.provider_description or r.meta_description or r.meta_source):
return r return r
try: try:
await self._enrich_one_repo(r) await self._enrich_one_repo(r)
# Persist the newly fetched details so they survive a Home Assistant restart.
async with self._repo_cache_lock:
self._update_repo_cache_from_repo(repo_id, r)
await self._persist_repo_cache()
except Exception: except Exception:
_LOGGER.debug("BCS ensure_repo_details failed for %s", repo_id, exc_info=True) _LOGGER.debug("BCS ensure_repo_details failed for %s", repo_id, exc_info=True)
return r return r
# --------------------------------------------------------------------- async def list_repo_versions(self, repo_id: str) -> list[dict[str, Any]]:
# Phase P1/P2: local cache + background enrichment repo = self.get_repo(repo_id)
# --------------------------------------------------------------------- if not repo:
return []
def _apply_repo_cache(self, merged: dict[str, RepoItem]) -> None: return await fetch_repo_versions(self.hass, repo.url)
"""Apply persisted cache fields to repo items.
This makes the list view richer immediately (without remote requests).
"""
cache = self._repo_cache if isinstance(self._repo_cache, dict) else {}
now = int(time.time())
for rid, r in merged.items():
entry = cache.get(str(rid))
if not isinstance(entry, dict):
continue
# Safety: ensure cache belongs to the same URL.
if str(entry.get("url") or "").strip() != str(r.url or "").strip():
continue
# Provider fields
r.provider = entry.get("provider") or r.provider
r.owner = entry.get("owner") or r.owner
r.provider_repo_name = entry.get("provider_repo_name") or r.provider_repo_name
r.provider_description = entry.get("provider_description") or r.provider_description
r.default_branch = entry.get("default_branch") or r.default_branch
# Latest version
r.latest_version = entry.get("latest_version") or r.latest_version
r.latest_version_source = entry.get("latest_version_source") or r.latest_version_source
# Metadata
r.meta_source = entry.get("meta_source") or r.meta_source
r.meta_name = entry.get("meta_name") or r.meta_name
r.meta_description = entry.get("meta_description") or r.meta_description
r.meta_category = entry.get("meta_category") or r.meta_category
r.meta_author = entry.get("meta_author") or r.meta_author
r.meta_maintainer = entry.get("meta_maintainer") or r.meta_maintainer
# Stable display name
if r.meta_name:
r.name = r.meta_name
elif not r.name:
r.name = r.provider_repo_name or r.url
# Mark as stale if the cache is old (used by background enrich).
checked_at = int(entry.get("checked_at") or 0)
entry["_stale"] = (checked_at <= 0) or ((now - checked_at) > self._bg_enrich_ttl_seconds)
def _update_repo_cache_from_item(self, r: RepoItem) -> None:
"""Update in-memory cache from a repo item and schedule a flush."""
if not self._repo_cache_loaded:
return
rid = str(r.id)
now = int(time.time())
entry = {
"url": str(r.url or ""),
"provider": r.provider,
"owner": r.owner,
"provider_repo_name": r.provider_repo_name,
"provider_description": r.provider_description,
"default_branch": r.default_branch,
"latest_version": r.latest_version,
"latest_version_source": r.latest_version_source,
"meta_source": r.meta_source,
"meta_name": r.meta_name,
"meta_description": r.meta_description,
"meta_category": r.meta_category,
"meta_author": r.meta_author,
"meta_maintainer": r.meta_maintainer,
"checked_at": now,
}
if not isinstance(self._repo_cache, dict):
self._repo_cache = {}
self._repo_cache[rid] = entry
self._schedule_repo_cache_flush()
def _schedule_repo_cache_flush(self) -> None:
if self._repo_cache_flush_task and not self._repo_cache_flush_task.done():
return
async def _flush_later() -> None:
try:
await asyncio.sleep(5)
if isinstance(self._repo_cache, dict):
await self.storage.set_repo_cache_map(self._repo_cache)
except Exception:
_LOGGER.debug("BCS repo cache flush failed", exc_info=True)
self._repo_cache_flush_task = self.hass.async_create_task(_flush_later())
def _add_cache_buster(self, url: str) -> str: def _add_cache_buster(self, url: str) -> str:
parts = urlsplit(url) parts = urlsplit(url)

View File

@@ -1,7 +1,7 @@
{ {
"domain": "bahmcloud_store", "domain": "bahmcloud_store",
"name": "Bahmcloud Store", "name": "Bahmcloud Store",
"version": "0.6.4", "version": "0.6.7",
"documentation": "https://git.bahmcloud.de/bahmcloud/bahmcloud_store", "documentation": "https://git.bahmcloud.de/bahmcloud/bahmcloud_store",
"platforms": ["update"], "platforms": ["update"],
"requirements": [], "requirements": [],

View File

@@ -18,6 +18,12 @@ class BahmcloudStorePanel extends HTMLElement {
this._filter = "all"; // all|installed|not_installed|updates|custom this._filter = "all"; // all|installed|not_installed|updates|custom
this._sort = "az"; // az|updates_first|installed_first this._sort = "az"; // az|updates_first|installed_first
// Source filter (all|bcs|hacs|custom)
this._sourceFilter = "all";
// HACS toggle (settings)
this._hacsEnabled = false;
this._detailRepoId = null; this._detailRepoId = null;
this._detailRepo = null; this._detailRepo = null;
this._readmeLoading = false; this._readmeLoading = false;
@@ -50,6 +56,10 @@ class BahmcloudStorePanel extends HTMLElement {
this._versionsCache = {}; // repo_id -> [{ref,label,source}, ...] this._versionsCache = {}; // repo_id -> [{ref,label,source}, ...]
this._versionsLoadingRepoId = null; this._versionsLoadingRepoId = null;
this._selectedVersionByRepoId = {}; // repo_id -> ref ("" means latest) this._selectedVersionByRepoId = {}; // repo_id -> ref ("" means latest)
// History handling (mobile back button should go back to list, not exit panel)
this._historyBound = false;
this._handlingPopstate = false;
} }
set hass(hass) { set hass(hass) {
@@ -57,10 +67,43 @@ class BahmcloudStorePanel extends HTMLElement {
if (!this._rendered) { if (!this._rendered) {
this._rendered = true; this._rendered = true;
this._render(); this._render();
this._ensureHistory();
this._load(); this._load();
} }
} }
_ensureHistory() {
if (this._historyBound) return;
this._historyBound = true;
try {
// Keep an internal history state for this panel.
const current = window.history.state || {};
if (!current || current.__bcs !== true) {
window.history.replaceState({ __bcs: true, view: "store" }, "");
}
} catch (e) {
// ignore
}
window.addEventListener("popstate", (ev) => {
const st = ev?.state;
if (!st || st.__bcs !== true) return;
this._handlingPopstate = true;
try {
const view = st.view || "store";
if (view === "detail" && st.repo_id) {
this._openRepoDetail(st.repo_id, false);
} else {
this._closeDetail(false);
}
} finally {
this._handlingPopstate = false;
}
});
}
async _load() { async _load() {
if (!this._hass) return; if (!this._hass) return;
@@ -72,6 +115,12 @@ class BahmcloudStorePanel extends HTMLElement {
const data = await this._hass.callApi("get", "bcs"); const data = await this._hass.callApi("get", "bcs");
this._data = data; this._data = data;
// Persistent settings (e.g. HACS toggle)
this._hacsEnabled = !!data?.settings?.hacs_enabled;
// Sync settings from backend (e.g. HACS toggle)
this._hacsEnabled = !!data?.settings?.hacs_enabled;
if (this._view === "detail" && this._detailRepoId && Array.isArray(data?.repos)) { if (this._view === "detail" && this._detailRepoId && Array.isArray(data?.repos)) {
const fresh = data.repos.find((r) => this._safeId(r?.id) === this._detailRepoId); const fresh = data.repos.find((r) => this._safeId(r?.id) === this._detailRepoId);
if (fresh) this._detailRepo = fresh; if (fresh) this._detailRepo = fresh;
@@ -84,6 +133,19 @@ class BahmcloudStorePanel extends HTMLElement {
} }
} }
async _setSettings(updates) {
if (!this._hass) return;
try {
const resp = await this._hass.callApi("post", "bcs/settings", updates || {});
if (resp?.ok) {
this._hacsEnabled = !!resp?.settings?.hacs_enabled;
}
} catch (e) {
// Do not fail UI for settings.
this._error = e?.message ? String(e.message) : String(e);
}
}
async _refreshAll() { async _refreshAll() {
if (!this._hass) return; if (!this._hass) return;
if (this._refreshing) return; if (this._refreshing) return;
@@ -308,21 +370,15 @@ class BahmcloudStorePanel extends HTMLElement {
} }
_goBack() { _goBack() {
if (this._view === "detail") {
this._view = "store";
this._detailRepoId = null;
this._detailRepo = null;
this._readmeText = null;
this._readmeHtml = null;
this._readmeError = null;
this._readmeExpanded = false;
this._update();
return;
}
try { try {
// Prefer browser history so mobile back behaves as expected.
history.back(); history.back();
} catch (_) { } catch (_) {
window.location.href = "/"; if (this._view === "detail") {
this._closeDetail(true);
} else {
window.location.href = "/";
}
} }
} }
@@ -368,11 +424,15 @@ class BahmcloudStorePanel extends HTMLElement {
} }
} }
_openRepoDetail(repoId) { _openRepoDetail(repoId, pushHistory = true) {
const repos = Array.isArray(this._data?.repos) ? this._data.repos : []; const repos = Array.isArray(this._data?.repos) ? this._data.repos : [];
const repo = repos.find((r) => this._safeId(r?.id) === repoId); const repo = repos.find((r) => this._safeId(r?.id) === repoId);
if (!repo) return; if (!repo) return;
if (pushHistory) {
this._pushHistory({ view: "detail", repo_id: repoId });
}
this._view = "detail"; this._view = "detail";
this._detailRepoId = repoId; this._detailRepoId = repoId;
this._detailRepo = repo; this._detailRepo = repo;
@@ -561,6 +621,24 @@ class BahmcloudStorePanel extends HTMLElement {
box-shadow: 0 0 0 2px rgba(30,136,229,.15); box-shadow: 0 0 0 2px rgba(30,136,229,.15);
} }
.toggle{
display:inline-flex;
align-items:center;
gap:8px;
padding:10px 12px;
border-radius:14px;
border:1px solid var(--divider-color);
background: var(--card-background-color);
color: var(--primary-text-color);
user-select:none;
cursor:pointer;
}
.toggle input{
margin:0;
width:18px;
height:18px;
}
button{ button{
padding:10px 12px; padding:10px 12px;
border-radius:14px; border-radius:14px;
@@ -859,6 +937,11 @@ class BahmcloudStorePanel extends HTMLElement {
const cat = this._safeText(r?.category) || ""; const cat = this._safeText(r?.category) || "";
if (this._category !== "all" && this._category !== cat) return false; if (this._category !== "all" && this._category !== cat) return false;
// Source filter
if (this._sourceFilter === "bcs" && r?.source !== "index") return false;
if (this._sourceFilter === "hacs" && r?.source !== "hacs") return false;
if (this._sourceFilter === "custom" && r?.source !== "custom") return false;
const latest = this._safeText(r?.latest_version); const latest = this._safeText(r?.latest_version);
const installed = this._asBoolStrict(r?.installed); const installed = this._asBoolStrict(r?.installed);
const installedVersion = this._safeText(r?.installed_version); const installedVersion = this._safeText(r?.installed_version);
@@ -913,7 +996,11 @@ class BahmcloudStorePanel extends HTMLElement {
const updateAvailable = installed && !!latest && (!installedVersion || latest !== installedVersion); const updateAvailable = installed && !!latest && (!installedVersion || latest !== installedVersion);
const badges = []; const badges = [];
if (r?.source === "custom") badges.push("Custom"); // Source badges
if (r?.source === "index") badges.push("BCS Official");
else if (r?.source === "hacs") badges.push("HACS");
else if (r?.source === "custom") badges.push("Custom");
if (installed) badges.push("Installed"); if (installed) badges.push("Installed");
if (updateAvailable) badges.push("Update"); if (updateAvailable) badges.push("Update");
@@ -939,6 +1026,18 @@ class BahmcloudStorePanel extends HTMLElement {
return ` return `
<div class="filters"> <div class="filters">
<input id="q" placeholder="Search…" value="${this._esc(this._search)}" /> <input id="q" placeholder="Search…" value="${this._esc(this._search)}" />
<label class="toggle" title="Show official HACS repositories">
<input id="hacs_toggle" type="checkbox" ${this._hacsEnabled ? "checked" : ""} />
<span>HACS</span>
</label>
<select id="src">
<option value="all" ${this._sourceFilter === "all" ? "selected" : ""}>All sources</option>
<option value="bcs" ${this._sourceFilter === "bcs" ? "selected" : ""}>BCS Official</option>
<option value="hacs" ${this._sourceFilter === "hacs" ? "selected" : ""}>HACS</option>
<option value="custom" ${this._sourceFilter === "custom" ? "selected" : ""}>Custom</option>
</select>
<select id="cat"> <select id="cat">
<option value="all">All categories</option> <option value="all">All categories</option>
${categories.map((c) => `<option value="${this._esc(c)}" ${this._category === c ? "selected" : ""}>${this._esc(c)}</option>`).join("")} ${categories.map((c) => `<option value="${this._esc(c)}" ${this._category === c ? "selected" : ""}>${this._esc(c)}</option>`).join("")}
@@ -972,6 +1071,8 @@ class BahmcloudStorePanel extends HTMLElement {
const cat = root.getElementById("cat"); const cat = root.getElementById("cat");
const filter = root.getElementById("filter"); const filter = root.getElementById("filter");
const sort = root.getElementById("sort"); const sort = root.getElementById("sort");
const src = root.getElementById("src");
const hacsToggle = root.getElementById("hacs_toggle");
if (q) { if (q) {
q.addEventListener("input", (e) => { q.addEventListener("input", (e) => {
@@ -998,12 +1099,51 @@ class BahmcloudStorePanel extends HTMLElement {
}); });
} }
if (src) {
src.addEventListener("change", (e) => {
this._sourceFilter = e?.target?.value || "all";
this._update();
});
}
if (hacsToggle) {
hacsToggle.addEventListener("change", async (e) => {
const enabled = !!e?.target?.checked;
this._hacsEnabled = enabled;
this._update();
await this._setSettings({ hacs_enabled: enabled });
await this._load();
});
}
root.querySelectorAll("[data-open]").forEach((el) => { root.querySelectorAll("[data-open]").forEach((el) => {
const id = el.getAttribute("data-open"); const id = el.getAttribute("data-open");
el.addEventListener("click", () => this._openRepoDetail(id)); el.addEventListener("click", () => this._openRepoDetail(id, true));
}); });
} }
_pushHistory(state) {
if (this._handlingPopstate) return;
try {
window.history.pushState({ __bcs: true, ...(state || {}) }, "");
} catch (e) {
// ignore
}
}
_closeDetail(pushHistory = true) {
this._view = "store";
this._detailRepoId = null;
this._detailRepo = null;
this._readmeText = null;
this._readmeHtml = null;
this._readmeError = null;
this._readmeExpanded = false;
this._readmeCanToggle = false;
if (pushHistory) this._pushHistory({ view: "store" });
this._update();
}
_renderAbout() { _renderAbout() {
return ` return `
<div class="card"> <div class="card">

View File

@@ -36,6 +36,9 @@ class BCSStorage:
Keys: Keys:
- custom_repos: list of manually added repositories - custom_repos: list of manually added repositories
- installed_repos: mapping repo_id -> installed metadata - installed_repos: mapping repo_id -> installed metadata
- settings: persistent user settings (e.g. toggles in the UI)
- hacs_cache: cached HACS metadata to improve UX (display names/descriptions)
- repo_cache: cached per-repo enrichment (names/descriptions/versions) to keep the UI populated after restart
""" """
def __init__(self, hass: HomeAssistant) -> None: def __init__(self, hass: HomeAssistant) -> None:
@@ -53,8 +56,92 @@ class BCSStorage:
if "installed_repos" not in data or not isinstance(data.get("installed_repos"), dict): if "installed_repos" not in data or not isinstance(data.get("installed_repos"), dict):
data["installed_repos"] = {} data["installed_repos"] = {}
if "settings" not in data or not isinstance(data.get("settings"), dict):
data["settings"] = {}
if "hacs_cache" not in data or not isinstance(data.get("hacs_cache"), dict):
data["hacs_cache"] = {}
if "repo_cache" not in data or not isinstance(data.get("repo_cache"), dict):
data["repo_cache"] = {}
return data return data
async def get_repo_cache(self) -> dict[str, Any]:
"""Return cached per-repo enrichment data.
Shape:
{
"fetched_at": <unix_ts>,
"repos": {
"<repo_id>": {
"ts": <unix_ts>,
"url": "...",
"name": "...",
"provider_description": "...",
"meta_name": "...",
"meta_description": "...",
"meta_category": "...",
"meta_source": "...",
"latest_version": "...",
"latest_version_source": "...",
"default_branch": "...",
"owner": "...",
"provider_repo_name": "..."
}
}
}
"""
data = await self._load()
cache = data.get("repo_cache", {})
return cache if isinstance(cache, dict) else {}
async def set_repo_cache(self, cache: dict[str, Any]) -> None:
"""Persist cached per-repo enrichment data."""
data = await self._load()
data["repo_cache"] = cache if isinstance(cache, dict) else {}
await self._save(data)
async def get_hacs_cache(self) -> dict[str, Any]:
"""Return cached HACS metadata.
Shape:
{
"fetched_at": <unix_ts>,
"repos": {"owner/repo": {"name": "...", "description": "...", "domain": "..."}}
}
"""
data = await self._load()
cache = data.get("hacs_cache", {})
return cache if isinstance(cache, dict) else {}
async def set_hacs_cache(self, cache: dict[str, Any]) -> None:
"""Persist cached HACS metadata."""
data = await self._load()
data["hacs_cache"] = cache if isinstance(cache, dict) else {}
await self._save(data)
async def get_settings(self) -> dict[str, Any]:
"""Return persistent settings.
Currently used for UI/behavior toggles.
"""
data = await self._load()
settings = data.get("settings", {})
return settings if isinstance(settings, dict) else {}
async def set_settings(self, updates: dict[str, Any]) -> dict[str, Any]:
"""Update persistent settings and return the merged settings."""
data = await self._load()
settings = data.get("settings", {})
if not isinstance(settings, dict):
settings = {}
for k, v in (updates or {}).items():
settings[str(k)] = v
data["settings"] = settings
await self._save(data)
return settings
async def _save(self, data: dict[str, Any]) -> None: async def _save(self, data: dict[str, Any]) -> None:
await self._store.async_save(data) await self._store.async_save(data)

View File

@@ -215,7 +215,12 @@ class BCSApiView(HomeAssistantView):
async def get(self, request: web.Request) -> web.Response: async def get(self, request: web.Request) -> web.Response:
return web.json_response( return web.json_response(
{"ok": True, "version": self.core.version, "repos": self.core.list_repos_public()} {
"ok": True,
"version": self.core.version,
"settings": self.core.get_settings_public(),
"repos": self.core.list_repos_public(),
}
) )
async def post(self, request: web.Request) -> web.Response: async def post(self, request: web.Request) -> web.Response:
@@ -248,6 +253,37 @@ class BCSApiView(HomeAssistantView):
return web.json_response({"ok": False, "message": "Unknown operation"}, status=400) return web.json_response({"ok": False, "message": "Unknown operation"}, status=400)
class BCSSettingsView(HomeAssistantView):
"""Persistent UI settings (e.g. toggles)."""
url = "/api/bcs/settings"
name = "api:bcs_settings"
requires_auth = True
def __init__(self, core: Any) -> None:
self.core: BCSCore = core
async def get(self, request: web.Request) -> web.Response:
return web.json_response({"ok": True, "settings": self.core.get_settings_public()})
async def post(self, request: web.Request) -> web.Response:
try:
data = await request.json()
except Exception:
data = {}
updates: dict[str, Any] = {}
if "hacs_enabled" in data:
updates["hacs_enabled"] = bool(data.get("hacs_enabled"))
try:
settings = await self.core.set_settings(updates)
return web.json_response({"ok": True, "settings": settings})
except Exception as e:
_LOGGER.exception("BCS set settings failed: %s", e)
return web.json_response({"ok": False, "message": str(e) or "Failed"}, status=500)
class BCSCustomRepoView(HomeAssistantView): class BCSCustomRepoView(HomeAssistantView):
url = "/api/bcs/custom_repo" url = "/api/bcs/custom_repo"
name = "api:bcs_custom_repo" name = "api:bcs_custom_repo"