5 Commits
0.6.6 ... 0.6.7

Author SHA1 Message Date
33502a6d99 add 0.6.7 2026-01-19 07:16:35 +00:00
1306ee9518 Change 0.6.7 2026-01-19 07:15:48 +00:00
e37546cab1 0.6 7 2026-01-19 07:15:30 +00:00
88c3233fd1 0.6.7 2026-01-19 07:14:58 +00:00
02f3047080 0.6 7 2026-01-19 07:14:22 +00:00
4 changed files with 255 additions and 4 deletions

View File

@@ -11,6 +11,17 @@ Sections:
---
## [0.6.7] - 2026-01-19
### Fixed
- Repository metadata loaded on demand is now persisted and restored after Home Assistant restart.
- Background enrichment reliably updates latest versions and descriptions for HACS repositories.
- Cached repository data is correctly restored on startup for non-installed repositories.
### Changed
- Repository details are only considered fully enriched once latest version information is available.
- Improved reliability of background cache synchronization without impacting startup performance.
## [0.6.6] - 2026-01-18
### Added

View File

@@ -38,6 +38,11 @@ HACS_INTEGRATIONS_DATA_URL = "https://data-v2.hacs.xyz/integration/data.json"
HACS_DEFAULT_CATEGORY = "Integrations"
HACS_CACHE_TTL_SECONDS = 60 * 60 * 24 # 24h
# Repo enrichment cache:
# - persists across restarts
# - keeps UI populated (name/description/latest) without blocking startup
REPO_CACHE_TTL_SECONDS = 6 * 60 * 60 # 6h
class BCSError(Exception):
"""BCS core error."""
@@ -109,6 +114,12 @@ class BCSCore:
self._hacs_meta: dict[str, dict[str, Any]] = {}
self._hacs_meta_lock = asyncio.Lock()
# Persistent per-repo enrichment cache (name/description/latest). Loaded from storage.
self._repo_cache_fetched_at: int = 0
self._repo_cache: dict[str, dict[str, Any]] = {}
self._repo_cache_lock = asyncio.Lock()
self._repo_enrich_task: asyncio.Task | None = None
# Phase F2: backups before install/update
self._backup_root = Path(self.hass.config.path(".bcs_backups"))
self._backup_keep_per_domain: int = 5
@@ -142,6 +153,18 @@ class BCSCore:
self._hacs_meta_fetched_at = 0
self._hacs_meta = {}
# Load persisted per-repo enrichment cache (keeps UI populated after restart).
try:
rc = await self.storage.get_repo_cache()
if isinstance(rc, dict):
self._repo_cache_fetched_at = int(rc.get("fetched_at") or 0)
repos = rc.get("repos")
if isinstance(repos, dict):
self._repo_cache = {str(k): (v if isinstance(v, dict) else {}) for k, v in repos.items()}
except Exception:
self._repo_cache_fetched_at = 0
self._repo_cache = {}
async def _read_manifest_version_async(self) -> str:
def _read() -> str:
try:
@@ -254,7 +277,16 @@ class BCSCore:
refresh_signature = f"{self.last_index_hash}:{len(custom_repos or [])}:{'h' if hacs_enabled else 'n'}:{len(hacs_repos)}"
if self._last_refresh_signature and refresh_signature == self._last_refresh_signature and self.repos:
_LOGGER.debug("BCS refresh skipped (no changes detected)")
_LOGGER.debug("BCS refresh fast-path (no repo list changes)")
# Even if the repo list is unchanged, we still want fresh versions/descriptions
# for installed repos and we still want background enrichment to keep the
# overview populated.
if hacs_enabled and self._hacs_meta_needs_refresh():
self.hass.async_create_task(self._refresh_hacs_meta_background())
await self._enrich_installed_only(self.repos)
self._schedule_repo_enrich_background()
return
@@ -285,9 +317,15 @@ class BCSCore:
if self._hacs_meta_needs_refresh():
self.hass.async_create_task(self._refresh_hacs_meta_background())
# Apply persisted per-repo enrichment cache (instant UI after restart).
self._apply_repo_cache(merged)
await self._enrich_installed_only(merged)
self.repos = merged
# Enrich remaining repos in the background and persist results (non-blocking).
self._schedule_repo_enrich_background()
self._last_refresh_signature = refresh_signature
_LOGGER.info(
@@ -478,6 +516,162 @@ class BCSCore:
_LOGGER.info("BCS HACS metadata cached: repos=%s", len(self._hacs_meta))
self.signal_updated()
# ---------- Persistent per-repo enrichment cache ----------
def _apply_repo_cache(self, merged: dict[str, RepoItem]) -> None:
"""Apply persisted per-repo enrichment cache to RepoItem objects.
This is a fast, no-I/O operation used to keep the UI populated immediately
after a Home Assistant restart.
"""
if not self._repo_cache:
return
for repo_id, r in merged.items():
cached = self._repo_cache.get(str(repo_id))
if not isinstance(cached, dict) or not cached:
continue
cached_url = cached.get("url")
if isinstance(cached_url, str) and cached_url and r.url and cached_url != r.url:
# Repo id reused for different URL; ignore.
continue
# Fill gaps only; never override fresh values from current refresh.
if (not r.name or r.name == r.url) and isinstance(cached.get("name"), str) and cached.get("name"):
r.name = str(cached.get("name"))
for attr, key in (
("owner", "owner"),
("provider_repo_name", "provider_repo_name"),
("provider_description", "provider_description"),
("default_branch", "default_branch"),
("latest_version", "latest_version"),
("latest_version_source", "latest_version_source"),
("meta_source", "meta_source"),
("meta_name", "meta_name"),
("meta_description", "meta_description"),
("meta_category", "meta_category"),
("meta_author", "meta_author"),
("meta_maintainer", "meta_maintainer"),
):
if getattr(r, attr, None):
continue
v = cached.get(key)
if v is None:
continue
if isinstance(v, str):
vv = v.strip()
if vv:
setattr(r, attr, vv)
def _repo_needs_enrich(self, repo_id: str, r: RepoItem) -> bool:
"""Return True if this repo should be enriched in background."""
cached = self._repo_cache.get(str(repo_id)) if isinstance(self._repo_cache, dict) else None
ts = 0
if isinstance(cached, dict):
ts = int(cached.get("ts") or 0)
# Missing key fields -> enrich.
if not r.latest_version:
return True
if not (r.meta_description or r.provider_description):
return True
# Stale cache -> enrich.
if ts <= 0:
return True
age = int(time.time()) - ts
return age > REPO_CACHE_TTL_SECONDS
def _update_repo_cache_from_repo(self, repo_id: str, r: RepoItem) -> None:
self._repo_cache[str(repo_id)] = {
"ts": int(time.time()),
"url": r.url,
"name": r.name,
"owner": r.owner,
"provider_repo_name": r.provider_repo_name,
"provider_description": r.provider_description,
"default_branch": r.default_branch,
"latest_version": r.latest_version,
"latest_version_source": r.latest_version_source,
"meta_source": r.meta_source,
"meta_name": r.meta_name,
"meta_description": r.meta_description,
"meta_category": r.meta_category,
"meta_author": r.meta_author,
"meta_maintainer": r.meta_maintainer,
}
def _schedule_repo_enrich_background(self) -> None:
"""Schedule background enrichment for repo details.
This is non-blocking and safe to call repeatedly.
"""
if self._repo_enrich_task and not self._repo_enrich_task.done():
return
self._repo_enrich_task = self.hass.async_create_task(self._repo_enrich_background())
async def _repo_enrich_background(self) -> None:
# Small delay to avoid competing with critical startup work.
await asyncio.sleep(2)
repo_ids = list(self.repos.keys())
updated = 0
# Keep it conservative to avoid rate limits.
sem = asyncio.Semaphore(4)
async def process_one(repo_id: str) -> None:
nonlocal updated
r = self.repos.get(repo_id)
if not r:
return
if not self._repo_needs_enrich(repo_id, r):
return
async with sem:
try:
await self._enrich_one_repo(r)
except Exception:
_LOGGER.debug("BCS background enrich failed for %s", repo_id, exc_info=True)
return
async with self._repo_cache_lock:
self._update_repo_cache_from_repo(repo_id, r)
updated += 1
# Process sequentially but allow limited concurrency.
tasks: list[asyncio.Task] = []
for repo_id in repo_ids:
tasks.append(self.hass.async_create_task(process_one(repo_id)))
# Flush in batches to limit memory/connection churn.
if len(tasks) >= 25:
await asyncio.gather(*tasks, return_exceptions=True)
tasks = []
if updated:
await self._persist_repo_cache()
self.signal_updated()
await asyncio.sleep(0)
if tasks:
await asyncio.gather(*tasks, return_exceptions=True)
if updated:
await self._persist_repo_cache()
self.signal_updated()
async def _persist_repo_cache(self) -> None:
async with self._repo_cache_lock:
payload = {
"fetched_at": int(time.time()),
"repos": self._repo_cache,
}
try:
await self.storage.set_repo_cache(payload)
except Exception:
_LOGGER.debug("BCS failed to persist repo cache", exc_info=True)
async def _enrich_and_resolve(self, merged: dict[str, RepoItem]) -> None:
sem = asyncio.Semaphore(6)
@@ -577,12 +771,19 @@ class BCSCore:
if not r:
return None
# If we already have a latest_version (or provider_description), consider it enriched.
if r.latest_version or r.provider_description or r.meta_source:
# Consider it enriched only if we already have a latest_version and at least
# some descriptive info (meta/provider). HACS repos often have meta_source set
# early, but still need provider latest_version.
if r.latest_version and (r.provider_description or r.meta_description or r.meta_source):
return r
try:
await self._enrich_one_repo(r)
# Persist the newly fetched details so they survive a Home Assistant restart.
async with self._repo_cache_lock:
self._update_repo_cache_from_repo(repo_id, r)
await self._persist_repo_cache()
except Exception:
_LOGGER.debug("BCS ensure_repo_details failed for %s", repo_id, exc_info=True)
return r

View File

@@ -1,7 +1,7 @@
{
"domain": "bahmcloud_store",
"name": "Bahmcloud Store",
"version": "0.6.6",
"version": "0.6.7",
"documentation": "https://git.bahmcloud.de/bahmcloud/bahmcloud_store",
"platforms": ["update"],
"requirements": [],

View File

@@ -38,6 +38,7 @@ class BCSStorage:
- installed_repos: mapping repo_id -> installed metadata
- settings: persistent user settings (e.g. toggles in the UI)
- hacs_cache: cached HACS metadata to improve UX (display names/descriptions)
- repo_cache: cached per-repo enrichment (names/descriptions/versions) to keep the UI populated after restart
"""
def __init__(self, hass: HomeAssistant) -> None:
@@ -61,8 +62,46 @@ class BCSStorage:
if "hacs_cache" not in data or not isinstance(data.get("hacs_cache"), dict):
data["hacs_cache"] = {}
if "repo_cache" not in data or not isinstance(data.get("repo_cache"), dict):
data["repo_cache"] = {}
return data
async def get_repo_cache(self) -> dict[str, Any]:
"""Return cached per-repo enrichment data.
Shape:
{
"fetched_at": <unix_ts>,
"repos": {
"<repo_id>": {
"ts": <unix_ts>,
"url": "...",
"name": "...",
"provider_description": "...",
"meta_name": "...",
"meta_description": "...",
"meta_category": "...",
"meta_source": "...",
"latest_version": "...",
"latest_version_source": "...",
"default_branch": "...",
"owner": "...",
"provider_repo_name": "..."
}
}
}
"""
data = await self._load()
cache = data.get("repo_cache", {})
return cache if isinstance(cache, dict) else {}
async def set_repo_cache(self, cache: dict[str, Any]) -> None:
"""Persist cached per-repo enrichment data."""
data = await self._load()
data["repo_cache"] = cache if isinstance(cache, dict) else {}
await self._save(data)
async def get_hacs_cache(self) -> dict[str, Any]:
"""Return cached HACS metadata.