This commit is contained in:
2026-01-19 14:35:05 +00:00
parent 76d8a45f37
commit faac180212

View File

@@ -42,6 +42,9 @@ HACS_CACHE_TTL_SECONDS = 60 * 60 * 24 # 24h
# - persists across restarts
# - keeps UI populated (name/description/latest) without blocking startup
REPO_CACHE_TTL_SECONDS = 6 * 60 * 60 # 6h
# Release/tag lists change rarely and can be expensive for some providers (e.g. GitHub rate limits).
# Cache them longer and refresh only on-demand (when the user opens the version selector).
VERSIONS_CACHE_TTL_SECONDS = 7 * 24 * 60 * 60 # 7d
class BCSError(Exception):
@@ -572,6 +575,12 @@ class BCSCore:
if isinstance(cached, dict):
ts = int(cached.get("ts") or 0)
# Background enrichment should be a one-time best-effort pass.
# If we already attempted it once for this repo, do not keep retrying on every refresh.
# On-demand (opening repo details / version selector) can still refresh and persist.
if bool(cached.get("bg_done")):
return False
# Missing key fields -> enrich.
if not r.latest_version:
return True
@@ -634,10 +643,20 @@ class BCSCore:
await self._enrich_one_repo(r)
except Exception:
_LOGGER.debug("BCS background enrich failed for %s", repo_id, exc_info=True)
# Mark as attempted so we don't keep hammering the provider.
async with self._repo_cache_lock:
cached = self._repo_cache.setdefault(str(repo_id), {})
if isinstance(cached, dict):
cached["bg_done"] = True
cached["bg_done_ts"] = int(time.time())
return
async with self._repo_cache_lock:
self._update_repo_cache_from_repo(repo_id, r)
cached = self._repo_cache.setdefault(str(repo_id), {})
if isinstance(cached, dict):
cached["bg_done"] = True
cached["bg_done_ts"] = int(time.time())
updated += 1
# Process sequentially but allow limited concurrency.
@@ -792,7 +811,43 @@ class BCSCore:
repo = self.get_repo(repo_id)
if not repo:
return []
return await fetch_repo_versions(self.hass, repo.url)
# Prefer cached version lists to avoid hammering provider APIs (notably GitHub unauthenticated
# rate limits). We refresh on-demand when the user opens the selector.
cached = None
cached_ts = 0
async with self._repo_cache_lock:
cached = self._repo_cache.get(str(repo_id)) if isinstance(self._repo_cache, dict) else None
if isinstance(cached, dict):
cached_ts = int(cached.get("versions_ts", 0) or 0)
now = int(time.time())
if isinstance(cached, dict) and cached.get("versions") and (now - cached_ts) < VERSIONS_CACHE_TTL_SECONDS:
return list(cached.get("versions") or [])
try:
versions = await fetch_repo_versions(self.hass, repo.url)
except Exception:
versions = []
# If the provider fetch returned only the basic fallbacks ("Latest" + "Branch") but we have
# a previously cached richer list, keep using the cached list.
if (
isinstance(cached, dict)
and cached.get("versions")
and len(list(cached.get("versions") or [])) > 2
and len(versions) <= 2
):
return list(cached.get("versions") or [])
# Persist whatever we got (even if small) to avoid repeated calls when rate-limited.
async with self._repo_cache_lock:
entry = self._repo_cache.setdefault(str(repo_id), {}) if isinstance(self._repo_cache, dict) else {}
if isinstance(entry, dict):
entry["versions"] = versions
entry["versions_ts"] = now
await self._persist_repo_cache()
return versions
def _add_cache_buster(self, url: str) -> str:
parts = urlsplit(url)