7 Commits
0.7.0 ... 0.7.2

Author SHA1 Message Date
a8ff892993 add 0.7.2 2026-01-20 08:15:43 +00:00
90223e3fc4 0.7.2 2026-01-20 08:15:14 +00:00
0f5504b67d 0.7.2 2026-01-20 08:14:58 +00:00
5fff1b2692 add 0.7.1 2026-01-20 07:19:03 +00:00
c8356c7603 0.7.1 2026-01-20 07:17:45 +00:00
0c49a50fc9 0.7.1 2026-01-20 07:17:24 +00:00
fa48841645 0.7.1 2026-01-20 07:16:48 +00:00
4 changed files with 149 additions and 10 deletions

View File

@@ -11,6 +11,18 @@ Sections:
--- ---
## 0.7.2 2026-01-20
### Fixed
- When Bahmcloud Store is installed via an external installer (files copied into /config/custom_components), it now reconciles itself as "installed" in BCS storage so update checks work immediately.
## 0.7.1 2026-01-20
### Fixed
- GitHub version provider now reliably fetches the latest 20 releases/tags using authenticated API requests.
- Repositories that were previously fetched in a degraded state (only `latest` and `branch`) are now automatically refreshed on repository view.
- Cached version lists with incomplete data are no longer reused and are re-fetched from the provider.
## [0.7.0] - 2026-01-20 ## [0.7.0] - 2026-01-20
### Added ### Added

View File

@@ -181,6 +181,108 @@ class BCSCore:
return await self.hass.async_add_executor_job(_read) return await self.hass.async_add_executor_job(_read)
async def _read_manifest_info_async(self) -> dict[str, str]:
"""Read manifest.json fields that help identify this integration."""
def _read() -> dict[str, str]:
try:
manifest_path = Path(__file__).resolve().parent / "manifest.json"
data = json.loads(manifest_path.read_text(encoding="utf-8"))
out: dict[str, str] = {}
for k in ("version", "documentation", "name", "domain"):
v = data.get(k)
if v:
out[str(k)] = str(v)
return out
except Exception:
return {}
return await self.hass.async_add_executor_job(_read)
def _normalize_repo_base(self, url: str) -> str:
"""Normalize repository URLs to a stable base for matching.
Example:
https://git.example.tld/org/repo/raw/branch/main/store.yaml
becomes:
https://git.example.tld/org/repo
"""
try:
p = urlsplit(str(url or "").strip())
parts = [x for x in (p.path or "").split("/") if x]
base_path = "/" + "/".join(parts[:2]) if len(parts) >= 2 else (p.path or "")
return urlunsplit((p.scheme, p.netloc, base_path.rstrip("/"), "", "")).lower()
except Exception:
return str(url or "").strip().lower()
async def _ensure_self_marked_installed(self, repos: dict[str, RepoItem]) -> None:
"""Ensure BCS is treated as installed when deployed via external installer.
When users install BCS via an installer that places files into
/config/custom_components, our internal storage has no installed entry.
This breaks update detection for the BCS repo entry in the Store.
"""
try:
# Already tracked as installed?
items = await self.storage.list_installed_repos()
for it in items:
if DOMAIN in [str(d) for d in (it.domains or [])]:
return
# Files must exist on disk.
cc_root = Path(self.hass.config.path("custom_components"))
manifest_path = cc_root / DOMAIN / "manifest.json"
if not manifest_path.exists():
return
info = await self._read_manifest_info_async()
doc = (info.get("documentation") or "").strip()
name = (info.get("name") or "").strip()
ver = (info.get("version") or self.version or "unknown").strip()
doc_base = self._normalize_repo_base(doc) if doc else ""
# Identify the matching repo entry in our current repo list.
chosen: RepoItem | None = None
if doc_base:
for r in repos.values():
if self._normalize_repo_base(r.url) == doc_base:
chosen = r
break
if not chosen and name:
for r in repos.values():
if (r.name or "").strip().lower() == name.lower():
chosen = r
break
if not chosen:
for r in repos.values():
if "bahmcloud_store" in (r.url or "").lower():
chosen = r
break
if not chosen:
_LOGGER.debug("BCS self-install reconcile: could not match repo entry")
return
await self.storage.set_installed_repo(
repo_id=chosen.id,
url=chosen.url,
domains=[DOMAIN],
installed_version=ver if ver != "unknown" else None,
installed_manifest_version=ver if ver != "unknown" else None,
ref=ver if ver != "unknown" else None,
)
_LOGGER.info(
"BCS self-install reconcile: marked as installed (repo_id=%s version=%s)",
chosen.id,
ver,
)
except Exception:
_LOGGER.debug("BCS self-install reconcile failed", exc_info=True)
def add_listener(self, cb) -> None: def add_listener(self, cb) -> None:
self._listeners.append(cb) self._listeners.append(cb)
@@ -324,6 +426,12 @@ class BCSCore:
# Apply persisted per-repo enrichment cache (instant UI after restart). # Apply persisted per-repo enrichment cache (instant UI after restart).
self._apply_repo_cache(merged) self._apply_repo_cache(merged)
# If BCS itself was installed via an external installer (i.e. files exist on disk
# but our storage has no installed entry yet), we still want update checks to work.
# Reconcile this once we have the current repo list.
await self._ensure_self_marked_installed(merged)
await self._refresh_installed_cache()
await self._enrich_installed_only(merged) await self._enrich_installed_only(merged)
self.repos = merged self.repos = merged
@@ -808,13 +916,21 @@ class BCSCore:
_LOGGER.debug("BCS ensure_repo_details failed for %s", repo_id, exc_info=True) _LOGGER.debug("BCS ensure_repo_details failed for %s", repo_id, exc_info=True)
return r return r
async def list_repo_versions(self, repo_id: str, *, limit: int = 20) -> list[dict[str, Any]]: async def list_repo_versions(
self,
repo_id: str,
*,
limit: int = 20,
force_refresh: bool = False,
) -> list[dict[str, Any]]:
repo = self.get_repo(repo_id) repo = self.get_repo(repo_id)
if not repo: if not repo:
return [] return []
# Prefer cached version lists to avoid hammering provider APIs (notably GitHub unauthenticated # Prefer cached version lists to avoid hammering provider APIs (notably GitHub unauthenticated
# rate limits). We refresh on-demand when the user opens the selector. # rate limits). However, if the cached list is clearly a degraded fallback (e.g. only
# "Latest" + "Branch"), we treat it as stale and retry immediately when the user requests
# versions again.
cached = None cached = None
cached_ts = 0 cached_ts = 0
async with self._repo_cache_lock: async with self._repo_cache_lock:
@@ -823,8 +939,17 @@ class BCSCore:
cached_ts = int(cached.get("versions_ts", 0) or 0) cached_ts = int(cached.get("versions_ts", 0) or 0)
now = int(time.time()) now = int(time.time())
if isinstance(cached, dict) and cached.get("versions") and (now - cached_ts) < VERSIONS_CACHE_TTL_SECONDS: cached_versions = list(cached.get("versions") or []) if isinstance(cached, dict) else []
return list(cached.get("versions") or []) cache_fresh = (now - cached_ts) < VERSIONS_CACHE_TTL_SECONDS
# Cache hit if it's fresh and not degraded, unless the caller explicitly wants a refresh.
if (
not force_refresh
and cached_versions
and cache_fresh
and len(cached_versions) > 2
):
return cached_versions
try: try:
versions = await fetch_repo_versions( versions = await fetch_repo_versions(

View File

@@ -1,7 +1,7 @@
{ {
"domain": "bahmcloud_store", "domain": "bahmcloud_store",
"name": "Bahmcloud Store", "name": "Bahmcloud Store",
"version": "0.7.0", "version": "0.7.2",
"documentation": "https://git.bahmcloud.de/bahmcloud/bahmcloud_store", "documentation": "https://git.bahmcloud.de/bahmcloud/bahmcloud_store",
"config_flow": true, "config_flow": true,
"platforms": ["update"], "platforms": ["update"],

View File

@@ -537,7 +537,7 @@ async def fetch_repo_versions(
- source: release|tag|branch - source: release|tag|branch
Notes: Notes:
- Uses public endpoints (no tokens) for public repositories. - Uses provider APIs; for GitHub we include the configured token (if any) to avoid unauthenticated rate limits.
- We prefer releases first (if available), then tags. - We prefer releases first (if available), then tags.
""" """
@@ -575,11 +575,13 @@ async def fetch_repo_versions(
try: try:
if prov == "github": if prov == "github":
# Releases # Releases (prefer these over tags)
gh_headers = {"Accept": "application/vnd.github+json", "User-Agent": UA} # Use the configured GitHub token (if any) to avoid unauthenticated rate limits.
gh_headers = _github_headers(github_token)
per_page = max(1, min(int(limit), 100))
data, _ = await _safe_json( data, _ = await _safe_json(
session, session,
f"https://api.github.com/repos/{owner}/{repo}/releases?per_page={int(limit)}", f"https://api.github.com/repos/{owner}/{repo}/releases?per_page={per_page}",
headers=gh_headers, headers=gh_headers,
) )
if isinstance(data, list): if isinstance(data, list):
@@ -597,7 +599,7 @@ async def fetch_repo_versions(
# Tags # Tags
data, _ = await _safe_json( data, _ = await _safe_json(
session, session,
f"https://api.github.com/repos/{owner}/{repo}/tags?per_page={int(limit)}", f"https://api.github.com/repos/{owner}/{repo}/tags?per_page={per_page}",
headers=gh_headers, headers=gh_headers,
) )
if isinstance(data, list): if isinstance(data, list):