82 Commits
0.4.0 ... 0.5.6

Author SHA1 Message Date
e2dfa20789 0.5.6 2026-01-17 08:39:24 +00:00
8e8b58d2d2 0.5.6 2026-01-17 08:37:09 +00:00
76ecaabd98 0.5.6 2026-01-17 08:35:21 +00:00
3f14dc3bd9 Buttons add 8n panel 2026-01-17 08:16:54 +00:00
50a78714cc Button gix 2026-01-17 08:16:10 +00:00
3bf01c91f1 Button fix 2026-01-17 08:15:32 +00:00
7aa14284dd Button fix 2026-01-17 08:14:59 +00:00
24933e980d Button 2026-01-17 08:13:22 +00:00
e10624df6b Button 1 2026-01-17 08:04:02 +00:00
1a1ebd3821 Button 2026-01-17 08:01:43 +00:00
d3d61067db Button 2026-01-17 08:01:23 +00:00
23b605becf Button delete 2026-01-17 07:59:20 +00:00
c07f8615e4 Add 0.5.5 2026-01-16 20:18:34 +00:00
9b209a15bf 0.5.5 2026-01-16 20:17:28 +00:00
30258bd2c0 Fix 0.5.4 to. 5 2026-01-16 20:15:11 +00:00
2c8ca490ea Add 0.5.4 2026-01-16 20:06:08 +00:00
9e8a8e81b9 0.5.4 fix 2026-01-16 20:05:20 +00:00
f5b2534fdb 0.5.4 2026-01-16 20:02:24 +00:00
8b3916c3fa 0.5.4 2026-01-16 19:59:33 +00:00
13e71046f8 Add on 0.5.4 2026-01-16 19:58:58 +00:00
58e3674325 0.5.4 2026-01-16 19:57:49 +00:00
828d84caa3 0.5.3 2026-01-16 19:55:20 +00:00
c18e93406a 0.5.3 2026-01-16 19:20:07 +00:00
9af18ba090 0.5.3 2026-01-16 19:19:23 +00:00
fff50a1580 0.5.3 2026-01-16 19:18:47 +00:00
f8e9967c3a 0.5.3 2026-01-16 19:18:09 +00:00
7bc493eb45 0.5.3 2026-01-16 19:16:39 +00:00
b97b970a45 Dump 2026-01-16 19:16:01 +00:00
593e0c367d 0.5.3 2026-01-16 19:14:35 +00:00
8e0817a64b 0.5.3 2026-01-16 19:13:56 +00:00
dfc7e44565 0.5.3 2026-01-16 19:13:17 +00:00
c9c4f99fbf 0.5.3 2026-01-16 19:12:43 +00:00
37cc11c9ee 0.5.3 2026-01-16 19:12:10 +00:00
9c773c07e8 0.5.3 2026-01-16 19:11:26 +00:00
c04612e159 0.5.3 2026-01-16 19:10:35 +00:00
5796012189 0.5.3 2026-01-16 19:09:47 +00:00
01576153d8 Add 0.5.2 2026-01-16 17:31:49 +00:00
30484a08c1 V0. 5.2 2026-01-16 17:30:51 +00:00
faf122aa1c Fic install 2026-01-16 17:27:38 +00:00
1e86df49e9 Fux insta 2026-01-16 17:27:02 +00:00
df631eec9e Fix install 2026-01-16 17:26:22 +00:00
07240d1268 Add 2026-01-16 16:50:25 +00:00
50587ffbbd 12 2026-01-16 16:17:43 +00:00
d6347e7e59 . 2026-01-16 16:06:52 +00:00
870e77ec13 .. 2026-01-15 20:40:04 +00:00
38fb9fb073 .. 2026-01-15 20:32:21 +00:00
c20bd4dd07 .. 2026-01-15 20:25:34 +00:00
296c816633 . 2026-01-15 20:12:30 +00:00
18a2b5529c . 2026-01-15 19:53:44 +00:00
246fab7e1e . 2026-01-15 19:53:07 +00:00
ce5802721f . 2026-01-15 19:52:05 +00:00
2f46966fe2 . 2026-01-15 19:51:26 +00:00
132f9e27c1 revert 6488b434d8
revert custom_components/bahmcloud_store/core.py aktualisiert
2026-01-15 18:02:30 +00:00
618511be73 revert bffc594da5
revert custom_components/bahmcloud_store/views.py aktualisiert
2026-01-15 18:02:11 +00:00
6488b434d8 custom_components/bahmcloud_store/core.py aktualisiert 2026-01-15 18:00:56 +00:00
bffc594da5 custom_components/bahmcloud_store/views.py aktualisiert 2026-01-15 18:00:32 +00:00
d78217100c custom_components/bahmcloud_store/manifest.json aktualisiert 2026-01-15 17:53:28 +00:00
09e1ef1af5 CHANGELOG.md aktualisiert 2026-01-15 17:53:13 +00:00
9ad558c9ab custom_components/bahmcloud_store/__init__.py aktualisiert 2026-01-15 17:42:38 +00:00
19df0eea22 custom_components/bahmcloud_store/panel/panel.js aktualisiert 2026-01-15 17:41:58 +00:00
745979b9a6 custom_components/bahmcloud_store/views.py aktualisiert 2026-01-15 17:19:45 +00:00
f861b2490a custom_components/bahmcloud_store/__init__.py aktualisiert 2026-01-15 17:18:45 +00:00
32946c1a98 custom_components/bahmcloud_store/core.py aktualisiert 2026-01-15 17:17:48 +00:00
a9a681d801 custom_components/bahmcloud_store/views.py aktualisiert 2026-01-15 17:06:43 +00:00
2ae6ac43a5 custom_components/bahmcloud_store/__init__.py aktualisiert 2026-01-15 17:01:41 +00:00
504c126c2c custom_components/bahmcloud_store/__init__.py aktualisiert 2026-01-15 16:59:33 +00:00
85cc97b557 custom_components/bahmcloud_store/core.py aktualisiert 2026-01-15 16:58:31 +00:00
4ca80a9c88 custom_components/bahmcloud_store/panel/app.js aktualisiert 2026-01-15 16:45:23 +00:00
ac5bc8a6f4 custom_components/bahmcloud_store/core.py aktualisiert 2026-01-15 16:21:14 +00:00
c4361cc8bd custom_components/bahmcloud_store/core.py aktualisiert 2026-01-15 16:13:42 +00:00
1794d579d2 custom_components/bahmcloud_store/panel/app.js aktualisiert 2026-01-15 16:08:48 +00:00
bcfbf7151c custom_components/bahmcloud_store/views.py aktualisiert 2026-01-15 16:04:59 +00:00
38730cdd31 custom_components/bahmcloud_store/manifest.json aktualisiert 2026-01-15 15:55:34 +00:00
5d5d78d727 CHANGELOG.md aktualisiert 2026-01-15 15:55:07 +00:00
67297bfc9c custom_components/bahmcloud_store/providers.py aktualisiert 2026-01-15 15:43:05 +00:00
82fda5dfc4 custom_components/bahmcloud_store/core.py aktualisiert 2026-01-15 15:39:10 +00:00
907f14b73c custom_components/bahmcloud_store/providers.py aktualisiert 2026-01-15 15:38:42 +00:00
3eefd447ac custom_components/bahmcloud_store/core.py aktualisiert 2026-01-15 15:29:39 +00:00
72ce95525c custom_components/bahmcloud_store/providers.py aktualisiert 2026-01-15 15:28:35 +00:00
081f277b92 custom_components/bahmcloud_store/core.py aktualisiert 2026-01-15 15:15:47 +00:00
28b86e19e1 custom_components/bahmcloud_store/core.py aktualisiert 2026-01-15 15:09:33 +00:00
e863677428 custom_components/bahmcloud_store/core.py aktualisiert 2026-01-15 15:04:13 +00:00
13 changed files with 2935 additions and 881 deletions

View File

@@ -11,19 +11,119 @@ Sections:
--- ---
## [0.5.6] - 2026-01-17
### Added
- Repository uninstall support directly from the Store UI.
- New backend API endpoint: `POST /api/bcs/uninstall`.
- Automatic **reconcile**: repositories are marked as not installed when their `custom_components` directories are removed manually.
### Changed
- Installation & Updates section extended with an Uninstall button.
- Store state now remains consistent even after manual file system changes.
### Fixed
- Repositories remained marked as installed after manual deletion of their domains.
- UI cache issues caused by outdated static assets.
## [0.5.5] - 2026-01-16
### Fixed
- Update entities now refresh their displayed name after store refreshes, so repository names replace fallback IDs (e.g. `index:1`) reliably.
## [0.5.4] - 2026-01-16
### Added
- Native **Repair fix flow** for restart-required situations.
- “Restart required” issues are now **fixable** and provide a confirmation dialog with a real restart action.
### Changed
- Restart-required issues are automatically cleared after Home Assistant restarts.
- Update entities now fully align with official Home Assistant behavior (Updates screen + Repairs integration).
### Fixed
- Fixed integration startup issues caused by incorrect file placement.
- Resolved circular import and missing setup errors during Home Assistant startup.
- Ensured YAML-based setup remains fully supported.
## [0.5.3] - 2026-01-16
### Added
- Native Home Assistant Update entities for installed repositories (shown under **Settings → System → Updates**).
- Human-friendly update names based on repository name (instead of internal repo IDs like `index:1`).
### Changed
- Update UI now behaves like official Home Assistant integrations (update action is triggered via the HA Updates screen).
## [0.5.2] - 2026-01-16
### Added
- Install and update backend endpoints (`POST /api/bcs/install`, `POST /api/bcs/update`) to install repositories into `/config/custom_components`.
- Installed version tracking based on the actually installed ref (tag/release/branch), stored persistently to support repositories with outdated/`0.0.0` manifest versions.
- API fields `installed_version` (installed ref) and `installed_manifest_version` (informational) to improve transparency in the UI.
### Changed
- Update availability is now evaluated using the stored installed ref (instead of `manifest.json` version), preventing false-positive updates when repositories do not maintain manifest versions.
### Fixed
- Repositories with `manifest.json` version `0.0.0` (or stale versions) no longer appear as constantly requiring updates after installing the latest release/tag.
## [0.5.0] - 2026-01-15
### Added
- Manual refresh button that triggers a full backend refresh (store index + provider data).
- Unified refresh pipeline: startup, timer and UI now use the same refresh logic.
- Cache-busting for store index requests to always fetch the latest store.yaml.
### Improved
- Logging for store index loading and parsing.
- Refresh behavior now deterministic and verifiable via logs.
### Fixed
- Refresh button previously only reloaded cached data.
- Store index was not always reloaded immediately on user action.
## [0.4.1] - 2026-01-15
### Fixed
- Fixed GitLab README loading by using robust raw file endpoints.
- Added support for nested GitLab groups when resolving README paths.
- Added fallback handling for multiple README filenames (`README.md`, `README`, `README.rst`, etc.).
- Added branch fallback logic for README loading (`default`, `main`, `master`).
- Improved error resilience so README loading failures never break the store core.
- No behavior change for GitHub and Gitea providers.
## [0.4.0] - 2026-01-15 ## [0.4.0] - 2026-01-15
### Added ### Added
- Repository detail view (second page) in the Store UI. - Initial public release of the Bahmcloud Store integration.
- README rendering using Home Assistant's `ha-markdown` element. - Sidebar panel with repository browser UI.
- Floating action buttons (FAB): - Support for loading repositories from a central `store.yaml` index.
- Open repository - Support for custom repositories added by the user.
- Reload README - Provider abstraction for GitHub, GitLab and Gitea:
- Install (coming soon) - Fetch repository information (name, description, default branch).
- Update (coming soon) - Resolve latest version from:
- Search field and category filter on the repository list page. - Releases
- New authenticated API endpoint: - Tags
- `GET /api/bcs/readme?repo_id=<id>` returns README markdown (best-effort). - Fallback mechanisms.
- Repository metadata support via:
- `bcs.yaml`
- `hacs.yaml`
- `hacs.json`
- README loading and rendering pipeline:
- Fetch raw README files.
- Server-side Markdown rendering.
- Sanitized HTML output for the panel UI.
- Auto refresh mechanism for store index and repository metadata.
- API endpoints:
- List repositories
- Add custom repository
- Remove repository
Persisted via Home Assistant storage (`.storage/bcs_store`).
- Public static asset endpoint for panel JS (`/api/bahmcloud_store_static/...`) without auth (required for HA custom panels).
- Initial API namespace:
- `GET /api/bcs` list merged repositories (index + custom)
- `POST /api/bcs` add custom repository
- `DELETE /api/bcs/custom_repo` remove custom repository
### Changed ### Changed
- Repository cards are now clickable to open the detail view. - Repository cards are now clickable to open the detail view.

19
bcs.yaml Normal file
View File

@@ -0,0 +1,19 @@
name: Bahmcloud Store
description: >
Provider-neutral custom integration store for Home Assistant.
Supports GitHub, GitLab, Gitea and Bahmcloud repositories with
a central index, UI panel and API, similar to HACS but independent.
category: Store
author: Bahmcloud
maintainer: Bahmcloud
domains:
- bahmcloud_store
min_ha_version: "2024.1.0"
homepage: https://git.bahmcloud.de/bahmcloud/bahmcloud_store
issues: https://git.bahmcloud.de/bahmcloud/bahmcloud_store/issues
source: https://git.bahmcloud.de/bahmcloud/bahmcloud_store

View File

@@ -4,10 +4,9 @@ import logging
from datetime import timedelta from datetime import timedelta
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.const import Platform
from homeassistant.helpers.discovery import async_load_platform
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.components.panel_custom import async_register_panel from homeassistant.components.panel_custom import async_register_panel
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.discovery import async_load_platform
from .core import BCSCore, BCSConfig, BCSError from .core import BCSCore, BCSConfig, BCSError
@@ -20,20 +19,44 @@ CONF_STORE_URL = "store_url"
async def async_setup(hass: HomeAssistant, config: dict) -> bool: async def async_setup(hass: HomeAssistant, config: dict) -> bool:
cfg = config.get(DOMAIN, {}) cfg = config.get(DOMAIN, {}) or {}
store_url = cfg.get(CONF_STORE_URL, DEFAULT_STORE_URL) store_url = cfg.get(CONF_STORE_URL, DEFAULT_STORE_URL)
core = BCSCore(hass, BCSConfig(store_url=store_url)) core = BCSCore(hass, BCSConfig(store_url=store_url))
hass.data[DOMAIN] = core hass.data[DOMAIN] = core
await core.register_http_views() await core.async_initialize()
# Provide native Update entities in Settings -> System -> Updates.
# This integration is YAML-based (async_setup), therefore we load the platform manually.
await async_load_platform(hass, "update", DOMAIN, {}, config)
from .views import (
StaticAssetsView,
BCSApiView,
BCSReadmeView,
BCSCustomRepoView,
BCSInstallView,
BCSUpdateView,
BCSUninstallView,
BCSRestartView,
)
hass.http.register_view(StaticAssetsView())
hass.http.register_view(BCSApiView(core))
hass.http.register_view(BCSReadmeView(core))
hass.http.register_view(BCSCustomRepoView(core))
hass.http.register_view(BCSInstallView(core))
hass.http.register_view(BCSUpdateView(core))
hass.http.register_view(BCSUninstallView(core))
hass.http.register_view(BCSRestartView(core))
# RESTORE: keep the module_url pattern that worked for you
await async_register_panel( await async_register_panel(
hass, hass,
frontend_url_path="bahmcloud-store", frontend_url_path="bahmcloud-store",
webcomponent_name="bahmcloud-store-panel", webcomponent_name="bahmcloud-store-panel",
module_url="/api/bahmcloud_store_static/panel.js?v=42", # IMPORTANT: bump v to avoid caching old JS
module_url="/api/bahmcloud_store_static/panel.js?v=102",
sidebar_title="Bahmcloud Store", sidebar_title="Bahmcloud Store",
sidebar_icon="mdi:store", sidebar_icon="mdi:store",
require_admin=True, require_admin=True,
@@ -41,19 +64,19 @@ async def async_setup(hass: HomeAssistant, config: dict) -> bool:
) )
try: try:
await core.refresh() await core.full_refresh(source="startup")
except BCSError as e: except BCSError as e:
_LOGGER.error("Initial refresh failed: %s", e) _LOGGER.error("Initial refresh failed: %s", e)
async def periodic(_now) -> None: async def periodic(_now) -> None:
try: try:
await core.refresh() await core.full_refresh(source="timer")
core.signal_updated()
except BCSError as e: except BCSError as e:
_LOGGER.warning("Periodic refresh failed: %s", e) _LOGGER.warning("Periodic refresh failed: %s", e)
except Exception as e: # pylint: disable=broad-exception-caught
_LOGGER.exception("Unexpected error during periodic refresh: %s", e)
interval = timedelta(seconds=int(core.refresh_seconds or 300)) interval_seconds = int(getattr(core, "refresh_seconds", 300) or 300)
async_track_time_interval(hass, periodic, interval) async_track_time_interval(hass, periodic, timedelta(seconds=interval_seconds))
await async_load_platform(hass, Platform.UPDATE, DOMAIN, {}, config)
return True return True

View File

@@ -1,32 +1,44 @@
from __future__ import annotations from __future__ import annotations
import asyncio import asyncio
import hashlib
import json import json
import logging import logging
import time
import shutil
import tempfile
import zipfile
from dataclasses import dataclass from dataclasses import dataclass
from pathlib import Path from pathlib import Path
from typing import Any from typing import Any
from urllib.parse import urlparse from urllib.parse import parse_qsl, urlencode, urlsplit, urlunsplit, urlparse
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers import issue_registry as ir
from homeassistant.util import yaml as ha_yaml from homeassistant.util import yaml as ha_yaml
from .storage import BCSStorage, CustomRepo from .storage import BCSStorage, CustomRepo
from .views import StaticAssetsView, BCSApiView, BCSReadmeView from .providers import fetch_repo_info, detect_provider, RepoInfo, fetch_readme_markdown
from .custom_repo_view import BCSCustomRepoView
from .providers import fetch_repo_info, detect_provider, RepoInfo
from .metadata import fetch_repo_metadata, RepoMetadata from .metadata import fetch_repo_metadata, RepoMetadata
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
DOMAIN = "bahmcloud_store" DOMAIN = "bahmcloud_store"
SIGNAL_UPDATED = f"{DOMAIN}_updated"
RESTART_REQUIRED_ISSUE_ID = "restart_required"
class BCSError(Exception): class BCSError(Exception):
"""BCS core error.""" """BCS core error."""
class BCSInstallError(BCSError):
"""BCS installation/update error."""
@dataclass @dataclass
class BCSConfig: class BCSConfig:
store_url: str store_url: str
@@ -46,7 +58,7 @@ class RepoItem:
default_branch: str | None = None default_branch: str | None = None
latest_version: str | None = None latest_version: str | None = None
latest_version_source: str | None = None # "release" | "tag" | None latest_version_source: str | None = None # "release" | "tag" | "atom" | None
meta_source: str | None = None meta_source: str | None = None
meta_name: str | None = None meta_name: str | None = None
@@ -66,9 +78,28 @@ class BCSCore:
self.repos: dict[str, RepoItem] = {} self.repos: dict[str, RepoItem] = {}
self._listeners: list[callable] = [] self._listeners: list[callable] = []
self.version: str = self._read_manifest_version() # Will be loaded asynchronously (no blocking IO in event loop)
self.version: str = "unknown"
def _read_manifest_version(self) -> str: # Diagnostics (helps verify refresh behavior)
self.last_index_url: str | None = None
self.last_index_bytes: int | None = None
self.last_index_hash: str | None = None
self.last_index_loaded_at: float | None = None
self._install_lock = asyncio.Lock()
self._installed_cache: dict[str, Any] = {}
async def async_initialize(self) -> None:
"""Async initialization that avoids blocking file IO."""
self.version = await self._read_manifest_version_async()
await self._refresh_installed_cache()
# After a successful HA restart, restart-required is no longer relevant.
self._clear_restart_required_issue()
async def _read_manifest_version_async(self) -> str:
def _read() -> str:
try: try:
manifest_path = Path(__file__).resolve().parent / "manifest.json" manifest_path = Path(__file__).resolve().parent / "manifest.json"
data = json.loads(manifest_path.read_text(encoding="utf-8")) data = json.loads(manifest_path.read_text(encoding="utf-8"))
@@ -77,25 +108,64 @@ class BCSCore:
except Exception: except Exception:
return "unknown" return "unknown"
return await self.hass.async_add_executor_job(_read)
def add_listener(self, cb) -> None: def add_listener(self, cb) -> None:
self._listeners.append(cb) self._listeners.append(cb)
def signal_updated(self) -> None: def signal_updated(self) -> None:
# Notify entities/platforms (e.g. update entities) that BCS data changed.
async_dispatcher_send(self.hass, SIGNAL_UPDATED)
for cb in list(self._listeners): for cb in list(self._listeners):
try: try:
cb() cb()
except Exception: except Exception:
pass pass
async def register_http_views(self) -> None: def _mark_restart_required(self) -> None:
self.hass.http.register_view(StaticAssetsView()) """Show a 'restart required' issue in Home Assistant Settings.
self.hass.http.register_view(BCSApiView(self))
self.hass.http.register_view(BCSReadmeView(self)) IMPORTANT:
self.hass.http.register_view(BCSCustomRepoView(self)) - is_fixable=True enables the "Fix/OK" button
- the real action is implemented in repairs.py (fix flow)
"""
try:
ir.async_create_issue(
self.hass,
DOMAIN,
RESTART_REQUIRED_ISSUE_ID,
is_fixable=True, # <-- IMPORTANT: show "Fix" button
is_persistent=False,
severity=ir.IssueSeverity.WARNING,
translation_key=RESTART_REQUIRED_ISSUE_ID,
)
except Exception:
_LOGGER.debug("Failed to create restart required issue", exc_info=True)
def _clear_restart_required_issue(self) -> None:
"""Remove restart required issue after HA restarted."""
try:
if hasattr(ir, "async_delete_issue"):
ir.async_delete_issue(self.hass, DOMAIN, RESTART_REQUIRED_ISSUE_ID)
elif hasattr(ir, "async_remove_issue"):
ir.async_remove_issue(self.hass, DOMAIN, RESTART_REQUIRED_ISSUE_ID)
except Exception:
_LOGGER.debug("Failed to clear restart required issue", exc_info=True)
async def full_refresh(self, source: str = "manual") -> None:
"""Single refresh entry-point used by both timer and manual button."""
_LOGGER.info("BCS full refresh triggered (source=%s)", source)
await self.refresh()
self.signal_updated()
def get_repo(self, repo_id: str) -> RepoItem | None: def get_repo(self, repo_id: str) -> RepoItem | None:
return self.repos.get(repo_id) return self.repos.get(repo_id)
def get_installed(self, repo_id: str) -> dict[str, Any] | None:
"""Return cached installation info for a repo_id (no I/O)."""
data = (self._installed_cache or {}).get(repo_id)
return data if isinstance(data, dict) else None
async def refresh(self) -> None: async def refresh(self) -> None:
index_repos, refresh_seconds = await self._load_index_repos() index_repos, refresh_seconds = await self._load_index_repos()
self.refresh_seconds = refresh_seconds self.refresh_seconds = refresh_seconds
@@ -121,6 +191,13 @@ class BCSCore:
await self._enrich_and_resolve(merged) await self._enrich_and_resolve(merged)
self.repos = merged self.repos = merged
_LOGGER.info(
"BCS refresh complete: repos=%s (index=%s, custom=%s)",
len(self.repos),
len([r for r in self.repos.values() if r.source == "index"]),
len([r for r in self.repos.values() if r.source == "custom"]),
)
async def _enrich_and_resolve(self, merged: dict[str, RepoItem]) -> None: async def _enrich_and_resolve(self, merged: dict[str, RepoItem]) -> None:
sem = asyncio.Semaphore(6) sem = asyncio.Semaphore(6)
@@ -155,16 +232,72 @@ class BCSCore:
await asyncio.gather(*(process_one(r) for r in merged.values()), return_exceptions=True) await asyncio.gather(*(process_one(r) for r in merged.values()), return_exceptions=True)
async def _load_index_repos(self) -> tuple[list[RepoItem], int]: def _add_cache_buster(self, url: str) -> str:
parts = urlsplit(url)
q = dict(parse_qsl(parts.query, keep_blank_values=True))
q["t"] = str(int(time.time()))
new_query = urlencode(q)
return urlunsplit((parts.scheme, parts.netloc, parts.path, new_query, parts.fragment))
def _gitea_src_to_raw(self, url: str) -> str:
parts = urlsplit(url)
path = parts.path
path2 = path.replace("/src/branch/", "/raw/branch/")
if path2 == path:
return url
return urlunsplit((parts.scheme, parts.netloc, path2, parts.query, parts.fragment))
async def _fetch_store_text(self, url: str) -> str:
session = async_get_clientsession(self.hass) session = async_get_clientsession(self.hass)
try:
async with session.get(self.config.store_url, timeout=20) as resp: headers = {
"User-Agent": "BahmcloudStore (Home Assistant)",
"Cache-Control": "no-cache, no-store, max-age=0",
"Pragma": "no-cache",
"Expires": "0",
}
async with session.get(url, timeout=30, headers=headers) as resp:
if resp.status != 200: if resp.status != 200:
raise BCSError(f"store_url returned {resp.status}") raise BCSError(f"store_url returned {resp.status}")
raw = await resp.text() return await resp.text()
async def _load_index_repos(self) -> tuple[list[RepoItem], int]:
store_url = (self.config.store_url or "").strip()
if not store_url:
raise BCSError("store_url is empty")
url = self._add_cache_buster(store_url)
try:
raw = await self._fetch_store_text(url)
# If we fetched a HTML page (wrong endpoint), attempt raw conversion.
if "<html" in raw.lower() or "<!doctype html" in raw.lower():
fallback = self._add_cache_buster(self._gitea_src_to_raw(store_url))
if fallback != url:
_LOGGER.warning("BCS store index looked like HTML, retrying raw URL")
raw = await self._fetch_store_text(fallback)
url = fallback
except Exception as e: except Exception as e:
raise BCSError(f"Failed fetching store index: {e}") from e raise BCSError(f"Failed fetching store index: {e}") from e
# Diagnostics
b = raw.encode("utf-8", errors="replace")
h = hashlib.sha256(b).hexdigest()[:12]
self.last_index_url = url
self.last_index_bytes = len(b)
self.last_index_hash = h
self.last_index_loaded_at = time.time()
_LOGGER.info(
"BCS index loaded: url=%s bytes=%s sha=%s",
self.last_index_url,
self.last_index_bytes,
self.last_index_hash,
)
try: try:
data = ha_yaml.parse_yaml(raw) data = ha_yaml.parse_yaml(raw)
if not isinstance(data, dict): if not isinstance(data, dict):
@@ -179,42 +312,66 @@ class BCSCore:
for i, r in enumerate(repos): for i, r in enumerate(repos):
if not isinstance(r, dict): if not isinstance(r, dict):
continue continue
url = str(r.get("url", "")).strip() repo_url = str(r.get("url", "")).strip()
if not url: if not repo_url:
continue continue
name = str(r.get("name") or url).strip() name = str(r.get("name") or repo_url).strip()
items.append( items.append(
RepoItem( RepoItem(
id=f"index:{i}", id=f"index:{i}",
name=name, name=name,
url=url, url=repo_url,
source="index", source="index",
) )
) )
_LOGGER.info("BCS index parsed: repos=%s refresh_seconds=%s", len(items), refresh_seconds)
return items, refresh_seconds return items, refresh_seconds
except Exception as e: except Exception as e:
raise BCSError(f"Invalid store.yaml: {e}") from e raise BCSError(f"Invalid store.yaml: {e}") from e
async def add_custom_repo(self, url: str, name: str | None) -> CustomRepo: async def add_custom_repo(self, url: str, name: str | None) -> CustomRepo:
repo = await self.storage.add_custom_repo(url=url, name=name) url = str(url or "").strip()
await self.refresh() if not url:
self.signal_updated() raise BCSError("Missing url")
return repo
c = await self.storage.add_custom_repo(url, name)
await self.full_refresh(source="custom_repo_add")
return c
async def remove_custom_repo(self, repo_id: str) -> None: async def remove_custom_repo(self, repo_id: str) -> None:
await self.storage.remove_custom_repo(repo_id) await self.storage.remove_custom_repo(repo_id)
await self.refresh() await self.full_refresh(source="custom_repo_remove")
self.signal_updated()
async def list_custom_repos(self) -> list[CustomRepo]: async def list_custom_repos(self) -> list[CustomRepo]:
return await self.storage.list_custom_repos() return await self.storage.list_custom_repos()
def list_repos_public(self) -> list[dict[str, Any]]: def list_repos_public(self) -> list[dict[str, Any]]:
out: list[dict[str, Any]] = [] out: list[dict[str, Any]] = []
installed_map: dict[str, Any] = getattr(self, "_installed_cache", {}) or {}
if not isinstance(installed_map, dict):
installed_map = {}
for r in self.repos.values(): for r in self.repos.values():
resolved_description = r.meta_description or r.provider_description inst = installed_map.get(r.id)
installed = bool(inst)
installed_domains: list[str] = []
installed_version: str | None = None
installed_manifest_version: str | None = None
if isinstance(inst, dict):
d = inst.get("domains") or []
if isinstance(d, list):
installed_domains = [str(x) for x in d if str(x).strip()]
v = inst.get("installed_version")
installed_version = str(v) if v is not None else None
mv = inst.get("installed_manifest_version")
installed_manifest_version = str(mv) if mv is not None else None
out.append( out.append(
{ {
"id": r.id, "id": r.id,
@@ -223,101 +380,302 @@ class BCSCore:
"source": r.source, "source": r.source,
"owner": r.owner, "owner": r.owner,
"provider": r.provider, "provider": r.provider,
"repo_name": r.provider_repo_name,
"meta_source": r.meta_source, "description": r.provider_description or r.meta_description,
"meta_name": r.meta_name, "default_branch": r.default_branch,
"meta_description": r.meta_description,
"meta_category": r.meta_category,
"meta_author": r.meta_author,
"meta_maintainer": r.meta_maintainer,
"provider_repo_name": r.provider_repo_name,
"provider_description": r.provider_description,
"description": resolved_description,
"category": r.meta_category,
"latest_version": r.latest_version, "latest_version": r.latest_version,
"latest_version_source": r.latest_version_source, "latest_version_source": r.latest_version_source,
"category": r.meta_category,
"meta_author": r.meta_author,
"meta_maintainer": r.meta_maintainer,
"meta_source": r.meta_source,
"installed": installed,
"installed_version": installed_version,
"installed_manifest_version": installed_manifest_version,
"installed_domains": installed_domains,
} }
) )
return out return out
# ----------------------------
# README fetching
# ----------------------------
def _normalize_repo_name(self, name: str | None) -> str | None:
if not name:
return None
n = name.strip()
if n.endswith(".git"):
n = n[:-4]
return n or None
def _split_owner_repo(self, repo_url: str) -> tuple[str | None, str | None]:
u = urlparse(repo_url.rstrip("/"))
parts = [p for p in u.path.strip("/").split("/") if p]
if len(parts) < 2:
return None, None
owner = parts[0].strip() or None
repo = self._normalize_repo_name(parts[1])
return owner, repo
def _is_github(self, repo_url: str) -> bool:
return "github.com" in urlparse(repo_url).netloc.lower()
def _is_gitea(self, repo_url: str) -> bool:
host = urlparse(repo_url).netloc.lower()
return host and "github.com" not in host and "gitlab.com" not in host
async def _fetch_text(self, url: str) -> str | None:
session = async_get_clientsession(self.hass)
try:
async with session.get(url, timeout=20) as resp:
if resp.status != 200:
return None
return await resp.text()
except Exception:
return None
async def fetch_readme_markdown(self, repo_id: str) -> str | None: async def fetch_readme_markdown(self, repo_id: str) -> str | None:
repo = self.get_repo(repo_id) repo = self.get_repo(repo_id)
if not repo: if not repo:
return None return None
owner, name = self._split_owner_repo(repo.url) return await fetch_readme_markdown(
if not owner or not name: self.hass,
repo.url,
provider=repo.provider,
default_branch=repo.default_branch,
)
def _pick_ref_for_install(self, repo: RepoItem) -> str:
if repo.latest_version and str(repo.latest_version).strip():
return str(repo.latest_version).strip()
if repo.default_branch and str(repo.default_branch).strip():
return str(repo.default_branch).strip()
return "main"
def _build_zip_url(self, repo_url: str, ref: str) -> str:
ref = (ref or "").strip()
if not ref:
raise BCSInstallError("Missing ref for ZIP download")
u = urlparse(repo_url.rstrip("/"))
host = (u.netloc or "").lower()
parts = [p for p in u.path.strip("/").split("/") if p]
if len(parts) < 2:
raise BCSInstallError("Invalid repository URL (missing owner/repo)")
owner = parts[0]
repo = parts[1]
if repo.endswith(".git"):
repo = repo[:-4]
if "github.com" in host:
return f"https://codeload.github.com/{owner}/{repo}/zip/{ref}"
if "gitlab" in host:
base = f"{u.scheme}://{u.netloc}"
path = u.path.strip("/")
if path.endswith(".git"):
path = path[:-4]
return f"{base}/{path}/-/archive/{ref}/{repo}-{ref}.zip"
base = f"{u.scheme}://{u.netloc}"
path = u.path.strip("/")
if path.endswith(".git"):
path = path[:-4]
return f"{base}/{path}/archive/{ref}.zip"
async def _download_zip(self, url: str, dest: Path) -> None:
session = async_get_clientsession(self.hass)
headers = {
"User-Agent": "BahmcloudStore (Home Assistant)",
"Cache-Control": "no-cache, no-store, max-age=0",
"Pragma": "no-cache",
}
async with session.get(url, timeout=120, headers=headers) as resp:
if resp.status != 200:
raise BCSInstallError(f"zip_url returned {resp.status}")
data = await resp.read()
await self.hass.async_add_executor_job(dest.write_bytes, data)
async def _extract_zip(self, zip_path: Path, extract_dir: Path) -> None:
def _extract() -> None:
with zipfile.ZipFile(zip_path, "r") as zf:
zf.extractall(extract_dir)
await self.hass.async_add_executor_job(_extract)
@staticmethod
def _find_custom_components_root(extract_root: Path) -> Path | None:
direct = extract_root / "custom_components"
if direct.exists() and direct.is_dir():
return direct
for child in extract_root.iterdir():
candidate = child / "custom_components"
if candidate.exists() and candidate.is_dir():
return candidate
return None return None
branch = repo.default_branch or "main" async def _copy_domain_dir(self, src_domain_dir: Path, domain: str) -> None:
filenames = ["README.md", "readme.md", "README.MD"] dest_root = Path(self.hass.config.path("custom_components"))
target = dest_root / domain
tmp_target = dest_root / f".bcs_tmp_{domain}_{int(time.time())}"
candidates: list[str] = [] def _copy() -> None:
if tmp_target.exists():
shutil.rmtree(tmp_target, ignore_errors=True)
if self._is_github(repo.url): shutil.copytree(src_domain_dir, tmp_target, dirs_exist_ok=True)
# raw github content
base = f"https://raw.githubusercontent.com/{owner}/{name}/{branch}"
candidates.extend([f"{base}/{fn}" for fn in filenames])
elif self._is_gitea(repo.url): if target.exists():
u = urlparse(repo.url.rstrip("/")) shutil.rmtree(target, ignore_errors=True)
root = f"{u.scheme}://{u.netloc}/{owner}/{name}"
# gitea raw endpoints (both common forms) tmp_target.rename(target)
bases = [
f"{root}/raw/branch/{branch}",
f"{root}/raw/{branch}",
]
for b in bases:
candidates.extend([f"{b}/{fn}" for fn in filenames])
else: await self.hass.async_add_executor_job(_copy)
async def _read_installed_manifest_version(self, domain: str) -> str | None:
def _read() -> str | None:
try:
p = Path(self.hass.config.path("custom_components", domain, "manifest.json"))
if not p.exists():
return None
data = json.loads(p.read_text(encoding="utf-8"))
v = data.get("version")
return str(v) if v else None
except Exception:
return None return None
for url in candidates: return await self.hass.async_add_executor_job(_read)
txt = await self._fetch_text(url)
if txt:
return txt
return None async def _refresh_installed_cache(self) -> None:
"""Refresh installed cache and reconcile with filesystem.
If a user manually deletes a domain folder under /config/custom_components,
we automatically remove the installed flag from our storage so the Store UI
does not show stale "installed" state.
"""
try:
items = await self.storage.list_installed_repos()
cache: dict[str, Any] = {}
# Determine which installed repos still exist on disk.
cc_root = Path(self.hass.config.path("custom_components"))
to_remove: list[str] = []
for it in items:
domains = [str(d) for d in (it.domains or []) if str(d).strip()]
# A repo is considered "present" if at least one of its domains
# exists and contains a manifest.json.
present = False
for d in domains:
p = cc_root / d
if p.is_dir() and (p / "manifest.json").exists():
present = True
break
if not present:
to_remove.append(it.repo_id)
continue
cache[it.repo_id] = {
"installed": True,
"domains": domains,
"installed_version": it.installed_version,
"installed_manifest_version": it.installed_manifest_version,
"ref": it.ref,
"installed_at": it.installed_at,
}
# Remove stale installed entries from storage.
for rid in to_remove:
try:
await self.storage.remove_installed_repo(rid)
_LOGGER.info("BCS reconcile: removed stale installed repo_id=%s", rid)
except Exception:
_LOGGER.debug("BCS reconcile: failed removing stale repo_id=%s", rid, exc_info=True)
self._installed_cache = cache
except Exception:
self._installed_cache = {}
async def uninstall_repo(self, repo_id: str) -> dict[str, Any]:
"""Uninstall a repository by deleting its installed domains and clearing storage."""
async with self._install_lock:
inst = await self.storage.get_installed_repo(repo_id)
if not inst:
# Already uninstalled.
await self._refresh_installed_cache()
self.signal_updated()
return {"ok": True, "repo_id": repo_id, "removed": [], "restart_required": False}
cc_root = Path(self.hass.config.path("custom_components"))
removed: list[str] = []
def _remove_dir(path: Path) -> None:
if path.exists() and path.is_dir():
shutil.rmtree(path, ignore_errors=True)
for domain in inst.domains:
d = str(domain).strip()
if not d:
continue
target = cc_root / d
await self.hass.async_add_executor_job(_remove_dir, target)
removed.append(d)
await self.storage.remove_installed_repo(repo_id)
await self._refresh_installed_cache()
# Show restart required in Settings.
if removed:
self._mark_restart_required()
_LOGGER.info("BCS uninstall complete: repo_id=%s removed_domains=%s", repo_id, removed)
self.signal_updated()
return {"ok": True, "repo_id": repo_id, "removed": removed, "restart_required": bool(removed)}
async def install_repo(self, repo_id: str) -> dict[str, Any]:
repo = self.get_repo(repo_id)
if not repo:
raise BCSInstallError(f"repo_id not found: {repo_id}")
async with self._install_lock:
ref = self._pick_ref_for_install(repo)
zip_url = self._build_zip_url(repo.url, ref)
_LOGGER.info("BCS install started: repo_id=%s ref=%s zip_url=%s", repo_id, ref, zip_url)
with tempfile.TemporaryDirectory(prefix="bcs_install_") as td:
tmp = Path(td)
zip_path = tmp / "repo.zip"
extract_dir = tmp / "extract"
extract_dir.mkdir(parents=True, exist_ok=True)
await self._download_zip(zip_url, zip_path)
await self._extract_zip(zip_path, extract_dir)
cc_root = self._find_custom_components_root(extract_dir)
if not cc_root:
raise BCSInstallError("custom_components folder not found in repository ZIP")
installed_domains: list[str] = []
for domain_dir in cc_root.iterdir():
if not domain_dir.is_dir():
continue
manifest = domain_dir / "manifest.json"
if not manifest.exists():
continue
domain = domain_dir.name
await self._copy_domain_dir(domain_dir, domain)
installed_domains.append(domain)
if not installed_domains:
raise BCSInstallError("No integrations found under custom_components/ (missing manifest.json)")
installed_manifest_version = await self._read_installed_manifest_version(installed_domains[0])
installed_version = ref
await self.storage.set_installed_repo(
repo_id=repo_id,
url=repo.url,
domains=installed_domains,
installed_version=installed_version,
installed_manifest_version=installed_manifest_version,
ref=ref,
)
await self._refresh_installed_cache()
self._mark_restart_required()
_LOGGER.info(
"BCS install complete: repo_id=%s domains=%s installed_ref=%s manifest_version=%s",
repo_id,
installed_domains,
installed_version,
installed_manifest_version,
)
self.signal_updated()
return {
"ok": True,
"repo_id": repo_id,
"domains": installed_domains,
"installed_version": installed_version,
"installed_manifest_version": installed_manifest_version,
"restart_required": True,
}
async def update_repo(self, repo_id: str) -> dict[str, Any]:
_LOGGER.info("BCS update started: repo_id=%s", repo_id)
return await self.install_repo(repo_id)
async def request_restart(self) -> None:
await self.hass.services.async_call("homeassistant", "restart", {}, blocking=False)

View File

@@ -1,9 +1,10 @@
{ {
"domain": "bahmcloud_store", "domain": "bahmcloud_store",
"name": "Bahmcloud Store", "name": "Bahmcloud Store",
"version": "0.4.0", "version": "0.5.6",
"documentation": "https://git.bahmcloud.de/bahmcloud/bahmcloud_store", "documentation": "https://git.bahmcloud.de/bahmcloud/bahmcloud_store",
"platforms": ["update"],
"requirements": [], "requirements": [],
"codeowners": [], "codeowners": ["@bahmcloud"],
"iot_class": "local_polling" "iot_class": "local_polling"
} }

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -6,6 +6,8 @@ import xml.etree.ElementTree as ET
from dataclasses import dataclass from dataclasses import dataclass
from urllib.parse import quote_plus, urlparse from urllib.parse import quote_plus, urlparse
from packaging.version import InvalidVersion, Version
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.aiohttp_client import async_get_clientsession
@@ -51,13 +53,8 @@ def detect_provider(repo_url: str) -> str:
return "github" return "github"
if "gitlab" in host: if "gitlab" in host:
return "gitlab" return "gitlab"
owner, repo = _split_owner_repo(repo_url)
if owner and repo:
return "gitea" return "gitea"
return "generic"
async def _safe_json(session, url: str, *, headers: dict | None = None, timeout: int = 20): async def _safe_json(session, url: str, *, headers: dict | None = None, timeout: int = 20):
try: try:
@@ -82,130 +79,113 @@ async def _safe_text(session, url: str, *, headers: dict | None = None, timeout:
def _extract_tag_from_github_url(url: str) -> str | None: def _extract_tag_from_github_url(url: str) -> str | None:
m = re.search(r"/releases/tag/([^/?#]+)", url) m = re.search(r"/releases/tag/([^/?#]+)", url or "")
if m: if not m:
return m.group(1)
m = re.search(r"/tag/([^/?#]+)", url)
if m:
return m.group(1)
return None return None
return m.group(1).strip() or None
def _strip_html(s: str) -> str:
# minimal HTML entity cleanup for meta descriptions
out = (
s.replace("&amp;", "&")
.replace("&quot;", '"')
.replace("&#39;", "'")
.replace("&lt;", "<")
.replace("&gt;", ">")
)
return re.sub(r"\s+", " ", out).strip()
def _extract_meta(html: str, *, prop: str | None = None, name: str | None = None) -> str | None: def _extract_meta(html: str, *, prop: str | None = None, name: str | None = None) -> str | None:
# Extract <meta property="og:description" content="..."> if not html:
# or <meta name="description" content="..."> return None
if prop: if prop:
# property="..." content="..." m = re.search(rf'<meta\s+property="{re.escape(prop)}"\s+content="([^"]+)"', html)
m = re.search(
r'<meta[^>]+property=["\']' + re.escape(prop) + r'["\'][^>]+content=["\']([^"\']+)["\']',
html,
flags=re.IGNORECASE,
)
if m: if m:
return _strip_html(m.group(1)) return m.group(1).strip()
m = re.search(
r'<meta[^>]+content=["\']([^"\']+)["\'][^>]+property=["\']' + re.escape(prop) + r'["\']',
html,
flags=re.IGNORECASE,
)
if m:
return _strip_html(m.group(1))
if name: if name:
m = re.search( m = re.search(rf'<meta\s+name="{re.escape(name)}"\s+content="([^"]+)"', html)
r'<meta[^>]+name=["\']' + re.escape(name) + r'["\'][^>]+content=["\']([^"\']+)["\']',
html,
flags=re.IGNORECASE,
)
if m: if m:
return _strip_html(m.group(1)) return m.group(1).strip()
m = re.search(
r'<meta[^>]+content=["\']([^"\']+)["\'][^>]+name=["\']' + re.escape(name) + r'["\']',
html,
flags=re.IGNORECASE,
)
if m:
return _strip_html(m.group(1))
return None return None
async def _github_description_html(hass: HomeAssistant, owner: str, repo: str) -> str | None: def _semver_key(tag: str) -> Version | None:
""" t = (tag or "").strip()
GitHub API may be rate-limited; fetch public HTML and read meta description. if not t:
""" return None
session = async_get_clientsession(hass) if t.startswith(("v", "V")):
headers = { t = t[1:]
"User-Agent": UA, try:
"Accept": "text/html,application/xhtml+xml", return Version(t)
} except InvalidVersion:
return None
html, status = await _safe_text(session, f"https://github.com/{owner}/{repo}", headers=headers)
if not html or status != 200: def _pick_highest_semver(tags: list[str]) -> str | None:
parsed: list[tuple[Version, str]] = []
for t in tags:
if not isinstance(t, str):
continue
ts = t.strip()
if not ts:
continue
v = _semver_key(ts)
if v is not None:
parsed.append((v, ts))
if not parsed:
return None
parsed.sort(key=lambda x: x[0], reverse=True)
return parsed[0][1]
async def _github_description_html(hass: HomeAssistant, owner: str, repo: str) -> str | None:
session = async_get_clientsession(hass)
url = f"https://github.com/{owner}/{repo}"
html, status = await _safe_text(session, url, headers={"User-Agent": UA})
if status != 200 or not html:
return None return None
desc = _extract_meta(html, prop="og:description") desc = _extract_meta(html, prop="og:description")
if desc: if desc:
return desc return desc
desc = _extract_meta(html, name="description") return _extract_meta(html, name="description")
if desc:
return desc
return None
async def _github_latest_version_atom(hass: HomeAssistant, owner: str, repo: str) -> tuple[str | None, str | None]: async def _github_latest_version_atom(hass: HomeAssistant, owner: str, repo: str) -> tuple[str | None, str | None]:
session = async_get_clientsession(hass) session = async_get_clientsession(hass)
headers = {"User-Agent": UA, "Accept": "application/atom+xml,text/xml;q=0.9,*/*;q=0.8"} url = f"https://github.com/{owner}/{repo}/releases.atom"
atom, status = await _safe_text(session, url, headers={"User-Agent": UA})
xml_text, _ = await _safe_text(session, f"https://github.com/{owner}/{repo}/releases.atom", headers=headers) if status != 200 or not atom:
if not xml_text:
return None, None return None, None
try: try:
root = ET.fromstring(xml_text) root = ET.fromstring(atom)
except Exception: ns = {"a": "http://www.w3.org/2005/Atom"}
entry = root.find("a:entry", ns)
if entry is None:
return None, None return None, None
link = entry.find("a:link", ns)
for entry in root.findall(".//{*}entry"): if link is not None and link.attrib.get("href"):
for link in entry.findall(".//{*}link"): tag = _extract_tag_from_github_url(link.attrib["href"])
href = link.attrib.get("href")
if not href:
continue
tag = _extract_tag_from_github_url(href)
if tag: if tag:
return tag, "atom" return tag, "atom"
title = entry.find("a:title", ns)
if title is not None and title.text:
t = title.text.strip()
if t:
return t, "atom"
except Exception:
return None, None
return None, None return None, None
async def _github_latest_version_redirect(hass: HomeAssistant, owner: str, repo: str) -> tuple[str | None, str | None]: async def _github_latest_version_redirect(hass: HomeAssistant, owner: str, repo: str) -> tuple[str | None, str | None]:
session = async_get_clientsession(hass) session = async_get_clientsession(hass)
headers = {"User-Agent": UA}
url = f"https://github.com/{owner}/{repo}/releases/latest" url = f"https://github.com/{owner}/{repo}/releases/latest"
try: try:
async with session.head(url, allow_redirects=False, timeout=15, headers=headers) as resp: async with session.get(url, timeout=20, headers={"User-Agent": UA}, allow_redirects=True) as resp:
if resp.status in (301, 302, 303, 307, 308): if resp.status != 200:
loc = resp.headers.get("Location") return None, None
if loc: final = str(resp.url)
tag = _extract_tag_from_github_url(loc) tag = _extract_tag_from_github_url(final)
if tag: if tag:
return tag, "release" return tag, "release"
except Exception: except Exception:
pass return None, None
return None, None return None, None
@@ -213,75 +193,125 @@ async def _github_latest_version_api(hass: HomeAssistant, owner: str, repo: str)
session = async_get_clientsession(hass) session = async_get_clientsession(hass)
headers = {"Accept": "application/vnd.github+json", "User-Agent": UA} headers = {"Accept": "application/vnd.github+json", "User-Agent": UA}
data, _ = await _safe_json(session, f"https://api.github.com/repos/{owner}/{repo}/releases/latest", headers=headers) data, status = await _safe_json(session, f"https://api.github.com/repos/{owner}/{repo}/releases/latest", headers=headers)
if isinstance(data, dict): if isinstance(data, dict) and data.get("tag_name"):
tag = data.get("tag_name") or data.get("name") return str(data["tag_name"]), "release"
if isinstance(tag, str) and tag.strip():
return tag.strip(), "release"
data, _ = await _safe_json(session, f"https://api.github.com/repos/{owner}/{repo}/tags?per_page=1", headers=headers) # No releases -> pick highest semver from many tags (instead of per_page=1)
if isinstance(data, list) and data: if status == 404:
tag = data[0].get("name") data, _ = await _safe_json(session, f"https://api.github.com/repos/{owner}/{repo}/tags?per_page=100", headers=headers)
if isinstance(tag, str) and tag.strip(): tags: list[str] = []
return tag.strip(), "tag" if isinstance(data, list):
for t in data:
if isinstance(t, dict) and t.get("name"):
tags.append(str(t["name"]))
best = _pick_highest_semver(tags)
if best:
return best, "tag"
# fallback: keep old behavior (first tag)
if tags:
return tags[0], "tag"
return None, None return None, None
async def _github_latest_version(hass: HomeAssistant, owner: str, repo: str) -> tuple[str | None, str | None]: async def _github_latest_version(hass: HomeAssistant, owner: str, repo: str) -> tuple[str | None, str | None]:
tag, src = await _github_latest_version_atom(hass, owner, repo)
if tag:
return tag, src
tag, src = await _github_latest_version_redirect(hass, owner, repo) tag, src = await _github_latest_version_redirect(hass, owner, repo)
if tag: if tag:
return tag, src return tag, src
return await _github_latest_version_api(hass, owner, repo) tag, src = await _github_latest_version_api(hass, owner, repo)
if tag:
return tag, src
return await _github_latest_version_atom(hass, owner, repo)
async def _gitea_latest_version(hass: HomeAssistant, base: str, owner: str, repo: str) -> tuple[str | None, str | None]: async def _gitea_latest_version(hass: HomeAssistant, base: str, owner: str, repo: str) -> tuple[str | None, str | None]:
session = async_get_clientsession(hass) session = async_get_clientsession(hass)
data, _ = await _safe_json(session, f"{base}/api/v1/repos/{owner}/{repo}/releases?limit=1") # releases: fetch multiple, pick highest semver (instead of limit=1)
if isinstance(data, list) and data: data, _ = await _safe_json(session, f"{base}/api/v1/repos/{owner}/{repo}/releases?limit=50")
tag = data[0].get("tag_name") or data[0].get("name") rel_tags: list[str] = []
if isinstance(tag, str) and tag.strip(): if isinstance(data, list):
return tag.strip(), "release" for r in data:
if isinstance(r, dict) and r.get("tag_name"):
rel_tags.append(str(r["tag_name"]))
data, _ = await _safe_json(session, f"{base}/api/v1/repos/{owner}/{repo}/tags?limit=1") best_rel = _pick_highest_semver(rel_tags)
if isinstance(data, list) and data: if best_rel:
tag = data[0].get("name") return best_rel, "release"
if isinstance(tag, str) and tag.strip(): if rel_tags:
return tag.strip(), "tag" return rel_tags[0], "release"
# tags: fetch multiple, pick highest semver (instead of limit=1)
data, _ = await _safe_json(session, f"{base}/api/v1/repos/{owner}/{repo}/tags?limit=50")
tags: list[str] = []
if isinstance(data, list):
for t in data:
if isinstance(t, dict) and t.get("name"):
tags.append(str(t["name"]))
best = _pick_highest_semver(tags)
if best:
return best, "tag"
if tags:
return tags[0], "tag"
return None, None return None, None
async def _gitlab_latest_version(hass: HomeAssistant, base: str, owner: str, repo: str) -> tuple[str | None, str | None]: async def _gitlab_latest_version(
hass: HomeAssistant, base: str, owner: str, repo: str
) -> tuple[str | None, str | None]:
session = async_get_clientsession(hass) session = async_get_clientsession(hass)
headers = {"User-Agent": UA} headers = {"User-Agent": UA}
project = quote_plus(f"{owner}/{repo}") project = quote_plus(f"{owner}/{repo}")
data, _ = await _safe_json( # releases: fetch multiple, pick highest semver (instead of per_page=1)
session, data, _ = await _safe_json(session, f"{base}/api/v4/projects/{project}/releases?per_page=50", headers=headers)
f"{base}/api/v4/projects/{project}/releases?per_page=1&order_by=released_at&sort=desc", rel_tags: list[str] = []
headers=headers, if isinstance(data, list):
) for r in data:
if isinstance(data, list) and data: if isinstance(r, dict) and r.get("tag_name"):
tag = data[0].get("tag_name") or data[0].get("name") rel_tags.append(str(r["tag_name"]))
if isinstance(tag, str) and tag.strip():
return tag.strip(), "release"
data, _ = await _safe_json( best_rel = _pick_highest_semver(rel_tags)
session, if best_rel:
f"{base}/api/v4/projects/{project}/repository/tags?per_page=1&order_by=updated&sort=desc", return best_rel, "release"
headers=headers, if rel_tags:
) return rel_tags[0], "release"
if isinstance(data, list) and data:
tag = data[0].get("name") # tags: fetch multiple, pick highest semver (instead of per_page=1)
if isinstance(tag, str) and tag.strip(): data, _ = await _safe_json(session, f"{base}/api/v4/projects/{project}/repository/tags?per_page=50", headers=headers)
return tag.strip(), "tag" tags: list[str] = []
if isinstance(data, list):
for t in data:
if isinstance(t, dict) and t.get("name"):
tags.append(str(t["name"]))
best = _pick_highest_semver(tags)
if best:
return best, "tag"
if tags:
return tags[0], "tag"
# atom fallback
atom, status = await _safe_text(session, f"{base}/{owner}/{repo}/-/tags?format=atom", headers=headers)
if status == 200 and atom:
try:
root = ET.fromstring(atom)
ns = {"a": "http://www.w3.org/2005/Atom"}
entry = root.find("a:entry", ns)
if entry is not None:
title = entry.find("a:title", ns)
if title is not None and title.text:
return title.text.strip(), "atom"
except Exception:
pass
return None, None return None, None
@@ -307,7 +337,6 @@ async def fetch_repo_info(hass: HomeAssistant, repo_url: str) -> RepoInfo:
try: try:
if provider == "github": if provider == "github":
# Try API repo details (may be rate-limited)
headers = {"Accept": "application/vnd.github+json", "User-Agent": UA} headers = {"Accept": "application/vnd.github+json", "User-Agent": UA}
data, status = await _safe_json(session, f"https://api.github.com/repos/{owner}/{repo}", headers=headers) data, status = await _safe_json(session, f"https://api.github.com/repos/{owner}/{repo}", headers=headers)
@@ -318,12 +347,10 @@ async def fetch_repo_info(hass: HomeAssistant, repo_url: str) -> RepoInfo:
if isinstance(data.get("owner"), dict) and data["owner"].get("login"): if isinstance(data.get("owner"), dict) and data["owner"].get("login"):
info.owner = data["owner"]["login"] info.owner = data["owner"]["login"]
else: else:
# If API blocked, still set reasonable defaults
if status == 403: if status == 403:
_LOGGER.debug("GitHub API blocked/rate-limited for repo info %s/%s", owner, repo) _LOGGER.debug("GitHub API blocked/rate-limited for repo info %s/%s", owner, repo)
info.default_branch = "main" info.default_branch = "main"
# If description missing, fetch from GitHub HTML
if not info.description: if not info.description:
desc = await _github_description_html(hass, owner, repo) desc = await _github_description_html(hass, owner, repo)
if desc: if desc:
@@ -371,8 +398,110 @@ async def fetch_repo_info(hass: HomeAssistant, repo_url: str) -> RepoInfo:
info.latest_version_source = src info.latest_version_source = src
return info return info
except Exception as e:
_LOGGER.debug("fetch_repo_info failed for %s: %s", repo_url, e)
return info return info
except Exception as e:
_LOGGER.debug("Provider fetch failed for %s: %s", repo_url, e) async def fetch_readme_markdown(
return info hass: HomeAssistant,
repo_url: str,
*,
provider: str | None = None,
default_branch: str | None = None,
) -> str | None:
"""Fetch README Markdown for public repositories (GitHub/GitLab/Gitea).
Defensive behavior:
- tries multiple common README filenames
- tries multiple branches (default, main, master)
- uses public raw endpoints (no tokens required for public repositories)
"""
repo_url = (repo_url or "").strip()
if not repo_url:
return None
prov = (provider or "").strip().lower() if provider else ""
if not prov:
prov = detect_provider(repo_url)
branch_candidates: list[str] = []
if default_branch and str(default_branch).strip():
branch_candidates.append(str(default_branch).strip())
for b in ("main", "master"):
if b not in branch_candidates:
branch_candidates.append(b)
filenames = ["README.md", "readme.md", "README.MD", "README.rst", "README"]
session = async_get_clientsession(hass)
headers = {"User-Agent": UA}
def _normalize_gitlab_path(path: str) -> str | None:
p = (path or "").strip().strip("/")
if not p:
return None
parts = [x for x in p.split("/") if x]
if len(parts) < 2:
return None
if parts[-1].endswith(".git"):
parts[-1] = parts[-1][:-4]
return "/".join(parts)
candidates: list[str] = []
if prov == "github":
owner, repo = _split_owner_repo(repo_url)
if not owner or not repo:
return None
for branch in branch_candidates:
base = f"https://raw.githubusercontent.com/{owner}/{repo}/{branch}"
for fn in filenames:
candidates.append(f"{base}/{fn}")
elif prov == "gitea":
owner, repo = _split_owner_repo(repo_url)
if not owner or not repo:
return None
u = urlparse(repo_url.rstrip("/"))
root = f"{u.scheme}://{u.netloc}/{owner}/{repo}"
for branch in branch_candidates:
bases = [
f"{root}/raw/branch/{branch}",
f"{root}/raw/{branch}",
]
for b in bases:
for fn in filenames:
candidates.append(f"{b}/{fn}")
elif prov == "gitlab":
u = urlparse(repo_url.rstrip("/"))
path_repo = _normalize_gitlab_path(u.path)
if not path_repo:
return None
root = f"{u.scheme}://{u.netloc}/{path_repo}"
for branch in branch_candidates:
bases = [
f"{root}/-/raw/{branch}",
f"{root}/raw/{branch}",
]
for b in bases:
for fn in filenames:
candidates.append(f"{b}/{fn}")
else:
return None
for url in candidates:
try:
async with session.get(url, timeout=20, headers=headers) as resp:
if resp.status != 200:
continue
txt = await resp.text()
if txt and txt.strip():
return txt
except Exception:
continue
return None

View File

@@ -0,0 +1,55 @@
from __future__ import annotations
import logging
import voluptuous as vol
from homeassistant.components.repairs import RepairsFlow
from homeassistant.core import HomeAssistant
from homeassistant import data_entry_flow
from .core import RESTART_REQUIRED_ISSUE_ID
_LOGGER = logging.getLogger(__name__)
class BCSRestartRequiredFlow(RepairsFlow):
"""Repairs flow to restart Home Assistant after BCS install/update."""
def __init__(self, hass: HomeAssistant) -> None:
self.hass = hass
async def async_step_init(
self, user_input: dict[str, str] | None = None
) -> data_entry_flow.FlowResult:
return await self.async_step_confirm(user_input)
async def async_step_confirm(
self, user_input: dict[str, str] | None = None
) -> data_entry_flow.FlowResult:
if user_input is not None:
_LOGGER.info("BCS repairs flow: restarting Home Assistant (user confirmed)")
await self.hass.services.async_call(
"homeassistant",
"restart",
{},
blocking=False,
)
return self.async_create_entry(title="", data={})
return self.async_show_form(
step_id="confirm",
data_schema=vol.Schema({}),
)
async def async_create_fix_flow(
hass: HomeAssistant,
issue_id: str,
data: dict[str, str | int | float | None] | None,
) -> RepairsFlow:
"""Create a repairs flow for BCS fixable issues."""
if issue_id == RESTART_REQUIRED_ISSUE_ID:
return BCSRestartRequiredFlow(hass)
raise data_entry_flow.UnknownHandler

View File

@@ -1,5 +1,6 @@
from __future__ import annotations from __future__ import annotations
import time
import uuid import uuid
from dataclasses import dataclass from dataclasses import dataclass
from typing import Any from typing import Any
@@ -18,19 +19,40 @@ class CustomRepo:
name: str | None = None name: str | None = None
@dataclass
class InstalledRepo:
repo_id: str
url: str
domains: list[str]
installed_at: int
installed_version: str | None = None # BCS "installed ref" (tag/release/branch)
installed_manifest_version: str | None = None # informational only
ref: str | None = None # kept for backward compatibility / diagnostics
class BCSStorage: class BCSStorage:
"""Persistent storage for manually added repositories.""" """Persistent storage for Bahmcloud Store.
Keys:
- custom_repos: list of manually added repositories
- installed_repos: mapping repo_id -> installed metadata
"""
def __init__(self, hass: HomeAssistant) -> None: def __init__(self, hass: HomeAssistant) -> None:
self.hass = hass self.hass = hass
self._store = Store(hass, _STORAGE_VERSION, _STORAGE_KEY) self._store: Store[dict[str, Any]] = Store(hass, _STORAGE_VERSION, _STORAGE_KEY)
async def _load(self) -> dict[str, Any]: async def _load(self) -> dict[str, Any]:
data = await self._store.async_load() data = await self._store.async_load() or {}
if not data: if not isinstance(data, dict):
return {"custom_repos": []} data = {}
if "custom_repos" not in data:
if "custom_repos" not in data or not isinstance(data.get("custom_repos"), list):
data["custom_repos"] = [] data["custom_repos"] = []
if "installed_repos" not in data or not isinstance(data.get("installed_repos"), dict):
data["installed_repos"] = {}
return data return data
async def _save(self, data: dict[str, Any]) -> None: async def _save(self, data: dict[str, Any]) -> None:
@@ -43,24 +65,20 @@ class BCSStorage:
for r in repos: for r in repos:
if not isinstance(r, dict): if not isinstance(r, dict):
continue continue
rid = str(r.get("id") or "") rid = r.get("id")
url = str(r.get("url") or "") url = r.get("url")
name = r.get("name") if not rid or not url:
if rid and url: continue
out.append(CustomRepo(id=rid, url=url, name=str(name) if name else None)) out.append(CustomRepo(id=str(rid), url=str(url), name=r.get("name")))
return out return out
async def add_custom_repo(self, url: str, name: str | None) -> CustomRepo: async def add_custom_repo(self, url: str, name: str | None) -> CustomRepo:
data = await self._load() data = await self._load()
repos = data.get("custom_repos", []) repos = data.get("custom_repos", [])
# Deduplicate by URL # De-duplicate by URL
for r in repos: for r in repos:
if isinstance(r, dict) and str(r.get("url", "")).strip() == url.strip(): if isinstance(r, dict) and str(r.get("url") or "").strip() == url.strip():
# Update name if provided
if name:
r["name"] = name
await self._save(data)
return CustomRepo(id=str(r["id"]), url=str(r["url"]), name=r.get("name")) return CustomRepo(id=str(r["id"]), url=str(r["url"]), name=r.get("name"))
rid = f"custom:{uuid.uuid4().hex[:10]}" rid = f"custom:{uuid.uuid4().hex[:10]}"
@@ -73,6 +91,94 @@ class BCSStorage:
async def remove_custom_repo(self, repo_id: str) -> None: async def remove_custom_repo(self, repo_id: str) -> None:
data = await self._load() data = await self._load()
repos = data.get("custom_repos", []) repos = data.get("custom_repos", [])
data["custom_repos"] = [r for r in repos if not (isinstance(r, dict) and r.get("id") == repo_id)] data["custom_repos"] = [
r for r in repos if not (isinstance(r, dict) and r.get("id") == repo_id)
]
await self._save(data) await self._save(data)
async def get_installed_repo(self, repo_id: str) -> InstalledRepo | None:
data = await self._load()
installed = data.get("installed_repos", {})
if not isinstance(installed, dict):
return None
entry = installed.get(repo_id)
if not isinstance(entry, dict):
return None
try:
domains = entry.get("domains") or []
if not isinstance(domains, list):
domains = []
domains = [str(d) for d in domains if str(d).strip()]
installed_version = entry.get("installed_version")
ref = entry.get("ref")
# Backward compatibility:
# If installed_version wasn't stored, fall back to ref.
if (not installed_version) and ref:
installed_version = ref
installed_manifest_version = entry.get("installed_manifest_version")
return InstalledRepo(
repo_id=str(entry.get("repo_id") or repo_id),
url=str(entry.get("url") or ""),
domains=domains,
installed_at=int(entry.get("installed_at") or 0),
installed_version=str(installed_version) if installed_version else None,
installed_manifest_version=str(installed_manifest_version) if installed_manifest_version else None,
ref=str(ref) if ref else None,
)
except Exception:
return None
async def list_installed_repos(self) -> list[InstalledRepo]:
data = await self._load()
installed = data.get("installed_repos", {})
out: list[InstalledRepo] = []
if not isinstance(installed, dict):
return out
for rid in list(installed.keys()):
item = await self.get_installed_repo(str(rid))
if item:
out.append(item)
return out
async def set_installed_repo(
self,
*,
repo_id: str,
url: str,
domains: list[str],
installed_version: str | None,
installed_manifest_version: str | None = None,
ref: str | None,
) -> None:
data = await self._load()
installed = data.get("installed_repos", {})
if not isinstance(installed, dict):
installed = {}
data["installed_repos"] = installed
installed[str(repo_id)] = {
"repo_id": str(repo_id),
"url": str(url),
"domains": [str(d) for d in (domains or []) if str(d).strip()],
"installed_at": int(time.time()),
# IMPORTANT: this is what BCS uses as "installed version" (ref/tag/branch)
"installed_version": installed_version,
# informational only
"installed_manifest_version": installed_manifest_version,
# keep ref too (debug/backward compatibility)
"ref": ref,
}
await self._save(data)
async def remove_installed_repo(self, repo_id: str) -> None:
data = await self._load()
installed = data.get("installed_repos", {})
if isinstance(installed, dict) and repo_id in installed:
installed.pop(repo_id, None)
data["installed_repos"] = installed
await self._save(data)

View File

@@ -0,0 +1,18 @@
{
"issues": {
"restart_required": {
"title": "Restart required",
"description": "One or more integrations were installed or updated by Bahmcloud Store. Restart Home Assistant to load the changes."
}
},
"repair_flow": {
"restart_required": {
"step": {
"confirm": {
"title": "Restart Home Assistant",
"description": "Bahmcloud Store installed or updated integrations. Restart Home Assistant now to apply the changes."
}
}
}
}
}

View File

@@ -1,11 +1,142 @@
from __future__ import annotations from __future__ import annotations
# NOTE: import logging
# Update entities will be implemented once installation/provider resolution is in place. from dataclasses import dataclass
# This stub prevents platform load errors and keeps the integration stable in 0.3.0. from typing import Any
from homeassistant.core import HomeAssistant from homeassistant.components.update import UpdateEntity, UpdateEntityFeature
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.entity import EntityCategory
from .core import DOMAIN, SIGNAL_UPDATED, BCSCore
_LOGGER = logging.getLogger(__name__)
def _pretty_repo_name(core: BCSCore, repo_id: str) -> str:
"""Return a human-friendly name for a repo update entity."""
try:
repo = core.get_repo(repo_id)
if repo and getattr(repo, "name", None):
name = str(repo.name).strip()
if name:
return name
except Exception:
pass
if repo_id.startswith("index:"):
return f"BCS Index {repo_id.split(':', 1)[1]}"
if repo_id.startswith("custom:"):
return f"BCS Custom {repo_id.split(':', 1)[1]}"
return f"BCS {repo_id}"
@dataclass(frozen=True)
class _RepoKey:
repo_id: str
class BCSRepoUpdateEntity(UpdateEntity):
"""Update entity representing a BCS-managed repository."""
_attr_entity_category = EntityCategory.DIAGNOSTIC
_attr_supported_features = UpdateEntityFeature.INSTALL
def __init__(self, core: BCSCore, repo_id: str) -> None:
self._core = core
self._repo_id = repo_id
self._in_progress = False
# Stable unique id (do NOT change)
self._attr_unique_id = f"{DOMAIN}:{repo_id}"
self._refresh_display_name()
def _refresh_display_name(self) -> None:
pretty = _pretty_repo_name(self._core, self._repo_id)
self._attr_name = pretty
self._attr_title = pretty
@property
def available(self) -> bool:
repo = self._core.get_repo(self._repo_id)
installed = self._core.get_installed(self._repo_id)
return repo is not None and installed is not None
@property
def in_progress(self) -> bool | None:
return self._in_progress
@property
def installed_version(self) -> str | None:
installed = self._core.get_installed(self._repo_id) or {}
v = installed.get("installed_version") or installed.get("ref")
return str(v) if v else None
@property
def latest_version(self) -> str | None:
repo = self._core.get_repo(self._repo_id)
if not repo:
return None
v = getattr(repo, "latest_version", None)
return str(v) if v else None
@property
def update_available(self) -> bool:
latest = self.latest_version
installed = self.installed_version
if not latest or not installed:
return False
return latest != installed
def version_is_newer(self, latest_version: str, installed_version: str) -> bool:
return latest_version != installed_version
@property
def release_url(self) -> str | None:
repo = self._core.get_repo(self._repo_id)
return getattr(repo, "url", None) if repo else None
async def async_install(self, version: str | None, backup: bool, **kwargs: Any) -> None:
if version is not None:
_LOGGER.debug("BCS update entity requested specific version=%s (ignored)", version)
self._in_progress = True
self.async_write_ha_state()
try:
await self._core.update_repo(self._repo_id)
finally:
self._in_progress = False
self.async_write_ha_state()
@callback
def _sync_entities(core: BCSCore, existing: dict[str, BCSRepoUpdateEntity], async_add_entities: AddEntitiesCallback) -> None:
"""Ensure there is one update entity per installed repo AND keep names in sync."""
installed_map = getattr(core, "_installed_cache", {}) or {}
new_entities: list[BCSRepoUpdateEntity] = []
for repo_id, data in installed_map.items():
if not isinstance(data, dict):
continue
if repo_id in existing:
# IMPORTANT: Update display name after refresh, when repo.name becomes available.
existing[repo_id]._refresh_display_name()
continue
ent = BCSRepoUpdateEntity(core, repo_id)
existing[repo_id] = ent
new_entities.append(ent)
if new_entities:
async_add_entities(new_entities)
for ent in existing.values():
ent.async_write_ha_state()
async def async_setup_platform( async def async_setup_platform(
@@ -14,4 +145,18 @@ async def async_setup_platform(
async_add_entities: AddEntitiesCallback, async_add_entities: AddEntitiesCallback,
discovery_info=None, discovery_info=None,
): ):
"""Set up BCS update entities."""
core: BCSCore | None = hass.data.get(DOMAIN)
if not core:
_LOGGER.debug("BCS core not available, skipping update platform setup")
return return
entities: dict[str, BCSRepoUpdateEntity] = {}
_sync_entities(core, entities, async_add_entities)
@callback
def _handle_update() -> None:
_sync_entities(core, entities, async_add_entities)
async_dispatcher_connect(hass, SIGNAL_UPDATED, _handle_update)

View File

@@ -16,14 +16,12 @@ _LOGGER = logging.getLogger(__name__)
def _render_markdown_server_side(md: str) -> str | None: def _render_markdown_server_side(md: str) -> str | None:
"""Render Markdown -> sanitized HTML (server-side)."""
text = (md or "").strip() text = (md or "").strip()
if not text: if not text:
return None return None
html: str | None = None html: str | None = None
# 1) python-markdown
try: try:
import markdown as mdlib # type: ignore import markdown as mdlib # type: ignore
@@ -39,7 +37,6 @@ def _render_markdown_server_side(md: str) -> str | None:
if not html: if not html:
return None return None
# 2) Sanitize via bleach
try: try:
import bleach # type: ignore import bleach # type: ignore
@@ -124,16 +121,6 @@ def _maybe_decode_base64(content: str, encoding: Any) -> str | None:
def _extract_text_recursive(obj: Any, depth: int = 0) -> str | None: def _extract_text_recursive(obj: Any, depth: int = 0) -> str | None:
"""
Robust extraction for README markdown.
Handles:
- str / bytes
- dict with:
- {content: "...", encoding: "base64"} (possibly nested)
- {readme: "..."} etc.
- list of dicts (pick first matching)
"""
if obj is None: if obj is None:
return None return None
@@ -150,21 +137,16 @@ def _extract_text_recursive(obj: Any, depth: int = 0) -> str | None:
return None return None
if isinstance(obj, dict): if isinstance(obj, dict):
# 1) If it looks like "file content"
content = obj.get("content") content = obj.get("content")
encoding = obj.get("encoding") encoding = obj.get("encoding")
# Base64 decode if possible
decoded = _maybe_decode_base64(content, encoding) decoded = _maybe_decode_base64(content, encoding)
if decoded: if decoded:
return decoded return decoded
# content may already be plain text
if isinstance(content, str) and (not isinstance(encoding, str) or not encoding.strip()): if isinstance(content, str) and (not isinstance(encoding, str) or not encoding.strip()):
# Heuristic: treat as markdown if it has typical markdown chars, otherwise still return
return content return content
# 2) direct text keys (readme/markdown/text/body/data)
for k in _TEXT_KEYS: for k in _TEXT_KEYS:
v = obj.get(k) v = obj.get(k)
if isinstance(v, str): if isinstance(v, str):
@@ -175,7 +157,6 @@ def _extract_text_recursive(obj: Any, depth: int = 0) -> str | None:
except Exception: except Exception:
pass pass
# 3) Sometimes nested under "file" / "result" / "payload" etc.
for v in obj.values(): for v in obj.values():
out = _extract_text_recursive(v, depth + 1) out = _extract_text_recursive(v, depth + 1)
if out: if out:
@@ -198,7 +179,7 @@ class StaticAssetsView(HomeAssistantView):
name = "api:bahmcloud_store_static" name = "api:bahmcloud_store_static"
requires_auth = False requires_auth = False
async def get(self, request: web.Request, path: str) -> web.Response: async def get(self, request: web.Request, path: str) -> web.StreamResponse:
base = Path(__file__).resolve().parent / "panel" base = Path(__file__).resolve().parent / "panel"
base_resolved = base.resolve() base_resolved = base.resolve()
@@ -218,24 +199,7 @@ class StaticAssetsView(HomeAssistantView):
_LOGGER.error("BCS static asset not found: %s", target) _LOGGER.error("BCS static asset not found: %s", target)
return web.Response(status=404) return web.Response(status=404)
content_type = "text/plain" resp = web.FileResponse(path=target)
charset = None
if target.suffix == ".js":
content_type = "application/javascript"
charset = "utf-8"
elif target.suffix == ".html":
content_type = "text/html"
charset = "utf-8"
elif target.suffix == ".css":
content_type = "text/css"
charset = "utf-8"
elif target.suffix == ".svg":
content_type = "image/svg+xml"
elif target.suffix == ".png":
content_type = "image/png"
resp = web.Response(body=target.read_bytes(), content_type=content_type, charset=charset)
resp.headers["Cache-Control"] = "no-store, no-cache, must-revalidate, max-age=0" resp.headers["Cache-Control"] = "no-store, no-cache, must-revalidate, max-age=0"
resp.headers["Pragma"] = "no-cache" resp.headers["Pragma"] = "no-cache"
return resp return resp
@@ -247,7 +211,7 @@ class BCSApiView(HomeAssistantView):
requires_auth = True requires_auth = True
def __init__(self, core: Any) -> None: def __init__(self, core: Any) -> None:
self.core = core self.core: BCSCore = core
async def get(self, request: web.Request) -> web.Response: async def get(self, request: web.Request) -> web.Response:
return web.json_response( return web.json_response(
@@ -255,7 +219,21 @@ class BCSApiView(HomeAssistantView):
) )
async def post(self, request: web.Request) -> web.Response: async def post(self, request: web.Request) -> web.Response:
action = request.query.get("action")
if action == "refresh":
_LOGGER.info("BCS manual refresh triggered via API")
try:
await self.core.full_refresh(source="manual")
return web.json_response({"ok": True})
except Exception as e:
_LOGGER.error("BCS manual refresh failed: %s", e)
return web.json_response({"ok": False, "message": "Refresh failed"}, status=500)
try:
data = await request.json() data = await request.json()
except Exception:
data = {}
op = data.get("op") op = data.get("op")
if op == "add_custom_repo": if op == "add_custom_repo":
@@ -276,7 +254,7 @@ class BCSCustomRepoView(HomeAssistantView):
requires_auth = True requires_auth = True
def __init__(self, core: Any) -> None: def __init__(self, core: Any) -> None:
self.core = core self.core: BCSCore = core
async def delete(self, request: web.Request) -> web.Response: async def delete(self, request: web.Request) -> web.Response:
repo_id = request.query.get("id") repo_id = request.query.get("id")
@@ -292,7 +270,7 @@ class BCSReadmeView(HomeAssistantView):
requires_auth = True requires_auth = True
def __init__(self, core: Any) -> None: def __init__(self, core: Any) -> None:
self.core = core self.core: BCSCore = core
async def get(self, request: web.Request) -> web.Response: async def get(self, request: web.Request) -> web.Response:
repo_id = request.query.get("repo_id") repo_id = request.query.get("repo_id")
@@ -309,8 +287,86 @@ class BCSReadmeView(HomeAssistantView):
status=404, status=404,
) )
# Ensure strict JSON string output (avoid accidental objects)
md_str = str(md) md_str = str(md)
html = _render_markdown_server_side(md_str) html = _render_markdown_server_side(md_str)
return web.json_response({"ok": True, "readme": md_str, "html": html}) return web.json_response({"ok": True, "readme": md_str, "html": html})
class BCSInstallView(HomeAssistantView):
url = "/api/bcs/install"
name = "api:bcs_install"
requires_auth = True
def __init__(self, core: Any) -> None:
self.core: BCSCore = core
async def post(self, request: web.Request) -> web.Response:
repo_id = request.query.get("repo_id")
if not repo_id:
return web.json_response({"ok": False, "message": "Missing repo_id"}, status=400)
try:
result = await self.core.install_repo(repo_id)
return web.json_response(result, status=200)
except Exception as e:
_LOGGER.exception("BCS install failed: %s", e)
return web.json_response({"ok": False, "message": str(e) or "Install failed"}, status=500)
class BCSUpdateView(HomeAssistantView):
url = "/api/bcs/update"
name = "api:bcs_update"
requires_auth = True
def __init__(self, core: Any) -> None:
self.core: BCSCore = core
async def post(self, request: web.Request) -> web.Response:
repo_id = request.query.get("repo_id")
if not repo_id:
return web.json_response({"ok": False, "message": "Missing repo_id"}, status=400)
try:
result = await self.core.update_repo(repo_id)
return web.json_response(result, status=200)
except Exception as e:
_LOGGER.exception("BCS update failed: %s", e)
return web.json_response({"ok": False, "message": str(e) or "Update failed"}, status=500)
class BCSUninstallView(HomeAssistantView):
url = "/api/bcs/uninstall"
name = "api:bcs_uninstall"
requires_auth = True
def __init__(self, core: Any) -> None:
self.core: BCSCore = core
async def post(self, request: web.Request) -> web.Response:
repo_id = request.query.get("repo_id")
if not repo_id:
return web.json_response({"ok": False, "message": "Missing repo_id"}, status=400)
try:
result = await self.core.uninstall_repo(repo_id)
return web.json_response(result, status=200)
except Exception as e:
_LOGGER.exception("BCS uninstall failed: %s", e)
return web.json_response({"ok": False, "message": str(e) or "Uninstall failed"}, status=500)
class BCSRestartView(HomeAssistantView):
url = "/api/bcs/restart"
name = "api:bcs_restart"
requires_auth = True
def __init__(self, core: Any) -> None:
self.core: BCSCore = core
async def post(self, request: web.Request) -> web.Response:
try:
await self.core.request_restart()
return web.json_response({"ok": True})
except Exception as e:
_LOGGER.exception("BCS restart failed: %s", e)
return web.json_response({"ok": False, "message": str(e) or "Restart failed"}, status=500)