0.6.3
This commit is contained in:
@@ -89,6 +89,9 @@ class BCSCore:
|
||||
self.last_index_hash: str | None = None
|
||||
self.last_index_loaded_at: float | None = None
|
||||
|
||||
# Fast refresh: skip expensive processing when index/custom repos unchanged
|
||||
self._last_refresh_signature: str | None = None
|
||||
|
||||
self._install_lock = asyncio.Lock()
|
||||
self._installed_cache: dict[str, Any] = {}
|
||||
|
||||
@@ -178,6 +181,19 @@ class BCSCore:
|
||||
|
||||
custom_repos = await self.storage.list_custom_repos()
|
||||
|
||||
# Fast path: if index + custom repos did not change, skip expensive work.
|
||||
try:
|
||||
custom_sig = [(c.id, (c.url or '').strip(), (c.name or '').strip()) for c in (custom_repos or [])]
|
||||
custom_sig.sort()
|
||||
refresh_signature = json.dumps({"index_hash": self.last_index_hash, "custom": custom_sig}, sort_keys=True)
|
||||
except Exception:
|
||||
refresh_signature = f"{self.last_index_hash}:{len(custom_repos or [])}"
|
||||
|
||||
if self._last_refresh_signature and refresh_signature == self._last_refresh_signature and self.repos:
|
||||
_LOGGER.debug("BCS refresh skipped (no changes detected)")
|
||||
return
|
||||
|
||||
|
||||
merged: dict[str, RepoItem] = {}
|
||||
|
||||
for item in index_repos:
|
||||
@@ -194,9 +210,11 @@ class BCSCore:
|
||||
for r in merged.values():
|
||||
r.provider = detect_provider(r.url)
|
||||
|
||||
await self._enrich_and_resolve(merged)
|
||||
await self._enrich_installed_only(merged)
|
||||
self.repos = merged
|
||||
|
||||
self._last_refresh_signature = refresh_signature
|
||||
|
||||
_LOGGER.info(
|
||||
"BCS refresh complete: repos=%s (index=%s, custom=%s)",
|
||||
len(self.repos),
|
||||
@@ -238,6 +256,87 @@ class BCSCore:
|
||||
|
||||
await asyncio.gather(*(process_one(r) for r in merged.values()), return_exceptions=True)
|
||||
|
||||
|
||||
async def _enrich_installed_only(self, merged: dict[str, RepoItem]) -> None:
|
||||
"""Enrich only installed repos (fast refresh for large indexes).
|
||||
|
||||
This keeps the backend responsive even with thousands of repositories.
|
||||
Details for non-installed repos are fetched on-demand.
|
||||
"""
|
||||
installed_map: dict[str, Any] = getattr(self, "_installed_cache", {}) or {}
|
||||
if not isinstance(installed_map, dict) or not installed_map:
|
||||
return
|
||||
|
||||
to_process: list[RepoItem] = []
|
||||
for rid in installed_map.keys():
|
||||
r = merged.get(str(rid))
|
||||
if r:
|
||||
to_process.append(r)
|
||||
|
||||
if not to_process:
|
||||
return
|
||||
|
||||
sem = asyncio.Semaphore(6)
|
||||
|
||||
async def process_one(r: RepoItem) -> None:
|
||||
async with sem:
|
||||
await self._enrich_one_repo(r)
|
||||
|
||||
await asyncio.gather(*(process_one(r) for r in to_process), return_exceptions=True)
|
||||
|
||||
async def _enrich_one_repo(self, r: RepoItem) -> None:
|
||||
"""Fetch provider info + metadata for a single repo item."""
|
||||
info: RepoInfo = await fetch_repo_info(self.hass, r.url)
|
||||
|
||||
r.provider = info.provider or r.provider
|
||||
r.owner = info.owner or r.owner
|
||||
r.provider_repo_name = info.repo_name
|
||||
r.provider_description = info.description
|
||||
r.default_branch = info.default_branch or r.default_branch
|
||||
|
||||
r.latest_version = info.latest_version
|
||||
r.latest_version_source = info.latest_version_source
|
||||
|
||||
md: RepoMetadata = await fetch_repo_metadata(self.hass, r.url, r.default_branch)
|
||||
r.meta_source = md.source
|
||||
if md.name:
|
||||
r.meta_name = md.name
|
||||
r.name = md.name
|
||||
r.meta_description = md.description
|
||||
if md.category:
|
||||
r.meta_category = md.category
|
||||
r.meta_author = md.author
|
||||
r.meta_maintainer = md.maintainer
|
||||
|
||||
# Keep a stable name fallback
|
||||
if not r.name:
|
||||
r.name = r.provider_repo_name or r.url
|
||||
|
||||
async def ensure_repo_details(self, repo_id: str) -> RepoItem | None:
|
||||
"""Ensure provider/meta/latest fields are loaded for a repo.
|
||||
|
||||
Used by the UI when a repo detail view is opened.
|
||||
"""
|
||||
r = self.get_repo(repo_id)
|
||||
if not r:
|
||||
return None
|
||||
|
||||
# If we already have a latest_version (or provider_description), consider it enriched.
|
||||
if r.latest_version or r.provider_description or r.meta_source:
|
||||
return r
|
||||
|
||||
try:
|
||||
await self._enrich_one_repo(r)
|
||||
except Exception:
|
||||
_LOGGER.debug("BCS ensure_repo_details failed for %s", repo_id, exc_info=True)
|
||||
return r
|
||||
|
||||
async def list_repo_versions(self, repo_id: str) -> list[dict[str, Any]]:
|
||||
repo = self.get_repo(repo_id)
|
||||
if not repo:
|
||||
return []
|
||||
return await fetch_repo_versions(self.hass, repo.url)
|
||||
|
||||
def _add_cache_buster(self, url: str) -> str:
|
||||
parts = urlsplit(url)
|
||||
q = dict(parse_qsl(parts.query, keep_blank_values=True))
|
||||
@@ -329,6 +428,7 @@ class BCSCore:
|
||||
name=name,
|
||||
url=repo_url,
|
||||
source="index",
|
||||
meta_category=str(r.get("category")) if r.get("category") else None,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user