Compare commits
179 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 18a2b5529c | |||
| 246fab7e1e | |||
| ce5802721f | |||
| 2f46966fe2 | |||
| 132f9e27c1 | |||
| 618511be73 | |||
| 6488b434d8 | |||
| bffc594da5 | |||
| d78217100c | |||
| 09e1ef1af5 | |||
| 9ad558c9ab | |||
| 19df0eea22 | |||
| 745979b9a6 | |||
| f861b2490a | |||
| 32946c1a98 | |||
| a9a681d801 | |||
| 2ae6ac43a5 | |||
| 504c126c2c | |||
| 85cc97b557 | |||
| 4ca80a9c88 | |||
| ac5bc8a6f4 | |||
| c4361cc8bd | |||
| 1794d579d2 | |||
| bcfbf7151c | |||
| 38730cdd31 | |||
| 5d5d78d727 | |||
| 67297bfc9c | |||
| 82fda5dfc4 | |||
| 907f14b73c | |||
| 3eefd447ac | |||
| 72ce95525c | |||
| 081f277b92 | |||
| 28b86e19e1 | |||
| e863677428 | |||
| 3f6da60c0d | |||
| 8ac0ef103c | |||
| 6a0132a25c | |||
| 60c46aa121 | |||
| 1caad401f6 | |||
| 5684c3d5f1 | |||
| f3863ee227 | |||
| e867e82a2d | |||
| 2770e56219 | |||
| 25b9c79114 | |||
| 2c50765d66 | |||
| 1837ed4a13 | |||
| c4f0f94a6f | |||
| 1dbffcc27c | |||
| a2d123abbf | |||
| 0e6088070e | |||
| dc32010bf9 | |||
| 3ed6a1a18c | |||
| 8ff5ab2e31 | |||
| 2a0dc9d66c | |||
| fbfc2e3a6e | |||
| 19a5c0fecb | |||
| c0ec7b1797 | |||
| 25f966853a | |||
| 3edeab514b | |||
| d713bf779f | |||
| 2b78feeadf | |||
| a940c68e9e | |||
| 6c3cdcde61 | |||
| 1fc274bf7c | |||
| 4ff94bc185 | |||
| b95b3f5626 | |||
| 30d47b775b | |||
| bedf6b6bf8 | |||
| e2f8b4625a | |||
| bb340108e2 | |||
| a617ca6709 | |||
| 7789430d4a | |||
| 4e8116265d | |||
| 06796cf57b | |||
| dbcac9df86 | |||
| 6ca193580d | |||
| 3773b07650 | |||
| 24dcc92c00 | |||
| 596491f885 | |||
| 97c9f01a0a | |||
| c39a948c59 | |||
| 4fd0a6ec48 | |||
| b84ab944b3 | |||
| ce4bd4f4f1 | |||
| 2dce858a51 | |||
| 2c8da4a049 | |||
| 692f0b47da | |||
| 106872063a | |||
| 597d1556ff | |||
| c4d9f7b393 | |||
| f15d932d54 | |||
| ec60211339 | |||
| 1305656d10 | |||
| 1c8a83effc | |||
| 066d1ff2a4 | |||
| d1a8526d2d | |||
| f60b3a8730 | |||
| b5e98898e0 | |||
| 236099e562 | |||
| 08a59ec56e | |||
| 5cf8e6d40f | |||
| 8b21d070f3 | |||
| 7219f82e7f | |||
| c91a4ecba2 | |||
| 2b0bfb4caa | |||
| e10b23a44a | |||
| 6d273cc182 | |||
| 35839d9c65 | |||
| 6f0f588b03 | |||
| 5b56b59ae8 | |||
| 8ab487f00a | |||
| 0ea9319ba4 | |||
| 46508f1c34 | |||
| 12f4aec1f7 | |||
| 225442f549 | |||
| 714ced5d2c | |||
| 64835b719f | |||
| acb01b9768 | |||
| bf29faab04 | |||
| adb117672c | |||
| 8cee9e5e4d | |||
| 77b4522e3c | |||
| bae4d0b84f | |||
| c022b90fb5 | |||
| 97c2672119 | |||
| 47e1524aef | |||
| 0bc824fe4a | |||
| c500234e1d | |||
| d27782ea9c | |||
| 6088d0a935 | |||
| fbdc8aed0f | |||
| 3723c403c7 | |||
| 95a7a0689b | |||
| 9d04aeaa58 | |||
| f65819ffab | |||
| ad9c4ea421 | |||
| e0cecfcc68 | |||
| fbac0ac57f | |||
| daa51cd59c | |||
| 3a88d2c402 | |||
| 9ff89d18f3 | |||
| 93ace71a12 | |||
| 30a4daa884 | |||
| b40e509362 | |||
| d04bf2a3f1 | |||
| c490c7856c | |||
| bd274faf88 | |||
| 638ac9a7ec | |||
| 3eb6d24439 | |||
| 013b0baa83 | |||
| b4b6b2b987 | |||
| d226edaac8 | |||
| 0e081e8cce | |||
| bd50c487b1 | |||
| 58e62b864e | |||
| 41fc0da76c | |||
| ce52920c6d | |||
| 5c47479f45 | |||
| 80eefabbc2 | |||
| 0339ad4ecb | |||
| 3f07c09c36 | |||
| dd634fca32 | |||
| 2aebc45707 | |||
| 4f3a7fb436 | |||
| 8e01de3440 | |||
| b7ed65b49d | |||
| 15349d93a2 | |||
| 124693e545 | |||
| 3aee3886b1 | |||
| 199bda2e0f | |||
| 8d1ed31431 | |||
| c36321db43 | |||
| 806524ad33 | |||
| 2da0cfe07d | |||
| 603277d6f5 | |||
| a1bdf9dd40 | |||
| 2746c5295a | |||
| 7bac73a37f | |||
| 96cdf234db |
138
CHANGELOG.md
Normal file
138
CHANGELOG.md
Normal file
@@ -0,0 +1,138 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
All notable changes to this repository will be documented in this file.
|
||||||
|
|
||||||
|
Sections:
|
||||||
|
- Added
|
||||||
|
- Changed
|
||||||
|
- Fixed
|
||||||
|
- Removed
|
||||||
|
- Security
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.5.0] - 2026-01-15
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Manual refresh button that triggers a full backend refresh (store index + provider data).
|
||||||
|
- Unified refresh pipeline: startup, timer and UI now use the same refresh logic.
|
||||||
|
- Cache-busting for store index requests to always fetch the latest store.yaml.
|
||||||
|
|
||||||
|
### Improved
|
||||||
|
- Logging for store index loading and parsing.
|
||||||
|
- Refresh behavior now deterministic and verifiable via logs.
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Refresh button previously only reloaded cached data.
|
||||||
|
- Store index was not always reloaded immediately on user action.
|
||||||
|
|
||||||
|
## [0.4.1] - 2026-01-15
|
||||||
|
### Fixed
|
||||||
|
- Fixed GitLab README loading by using robust raw file endpoints.
|
||||||
|
- Added support for nested GitLab groups when resolving README paths.
|
||||||
|
- Added fallback handling for multiple README filenames (`README.md`, `README`, `README.rst`, etc.).
|
||||||
|
- Added branch fallback logic for README loading (`default`, `main`, `master`).
|
||||||
|
- Improved error resilience so README loading failures never break the store core.
|
||||||
|
- No behavior change for GitHub and Gitea providers.
|
||||||
|
|
||||||
|
## [0.4.0] - 2026-01-15
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Initial public release of the Bahmcloud Store integration.
|
||||||
|
- Sidebar panel with repository browser UI.
|
||||||
|
- Support for loading repositories from a central `store.yaml` index.
|
||||||
|
- Support for custom repositories added by the user.
|
||||||
|
- Provider abstraction for GitHub, GitLab and Gitea:
|
||||||
|
- Fetch repository information (name, description, default branch).
|
||||||
|
- Resolve latest version from:
|
||||||
|
- Releases
|
||||||
|
- Tags
|
||||||
|
- Fallback mechanisms.
|
||||||
|
- Repository metadata support via:
|
||||||
|
- `bcs.yaml`
|
||||||
|
- `hacs.yaml`
|
||||||
|
- `hacs.json`
|
||||||
|
- README loading and rendering pipeline:
|
||||||
|
- Fetch raw README files.
|
||||||
|
- Server-side Markdown rendering.
|
||||||
|
- Sanitized HTML output for the panel UI.
|
||||||
|
- Auto refresh mechanism for store index and repository metadata.
|
||||||
|
- API endpoints:
|
||||||
|
- List repositories
|
||||||
|
- Add custom repository
|
||||||
|
- Remove repository
|
||||||
|
Persisted via Home Assistant storage (`.storage/bcs_store`).
|
||||||
|
- Public static asset endpoint for panel JS (`/api/bahmcloud_store_static/...`) without auth (required for HA custom panels).
|
||||||
|
- Initial API namespace:
|
||||||
|
- `GET /api/bcs` list merged repositories (index + custom)
|
||||||
|
- `POST /api/bcs` add custom repository
|
||||||
|
- `DELETE /api/bcs/custom_repo` remove custom repository
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Repository cards are now clickable to open the detail view.
|
||||||
|
|
||||||
|
## [0.3.2] - 2026-01-15
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Metadata resolver:
|
||||||
|
- Reads `bcs.yaml` (preferred), then `hacs.yaml`, then `hacs.json` from repository root.
|
||||||
|
- Extracts `name`, `description`, `category`, `author`, `maintainer` (best-effort).
|
||||||
|
- UI now prefers metadata description over provider description.
|
||||||
|
- Provider repository name is now only used as a fallback if no metadata name is provided.
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Repo display name priority:
|
||||||
|
1) metadata (`bcs.yaml` / `hacs.*`)
|
||||||
|
2) store index name (store.yaml)
|
||||||
|
3) provider repo name
|
||||||
|
4) repository URL
|
||||||
|
|
||||||
|
## [0.3.1] - 2026-01-15
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Panel header version is now derived from `manifest.json` via backend API (no more hardcoded version strings).
|
||||||
|
- Mobile navigation/header visibility improved by explicitly disabling iframe embedding for the custom panel.
|
||||||
|
- When adding a custom repository without a display name, the name is now fetched from the git provider (GitHub/Gitea) and shown automatically.
|
||||||
|
|
||||||
|
## [0.3.0] - 2026-01-15
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Repository enrichment for the Store UI:
|
||||||
|
- GitHub: fetch owner and description via GitHub REST API.
|
||||||
|
- Gitea: fetch owner and description via Gitea REST API (`/api/v1`).
|
||||||
|
- Provider detection for GitHub/GitLab/Gitea (best-effort).
|
||||||
|
- Automatic UI description line populated from provider data (when available).
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Panel module URL cache-busting updated to avoid stale frontend assets.
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Store "Refresh" now triggers immediate backend refresh (from 0.2.0).
|
||||||
|
- Avoided circular imports by using TYPE_CHECKING for type references.
|
||||||
|
|
||||||
|
### Notes
|
||||||
|
- Installation/README details view/update entities will be added in later versions.
|
||||||
|
|
||||||
|
|
||||||
|
## [0.2.0] - 2026-01-15
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Foundation architecture for BCS (Bahmcloud Component Store) inside a Home Assistant custom component.
|
||||||
|
- Custom panel (no iframe) using `hass.callApi()` to avoid authentication issues.
|
||||||
|
- Store index loader (`store.yaml`) with periodic refresh (data only).
|
||||||
|
- Manual repository management:
|
||||||
|
- Add repository
|
||||||
|
- List repositories
|
||||||
|
- Remove repository
|
||||||
|
Persisted via Home Assistant storage (`.storage/bcs_store`).
|
||||||
|
- Public static asset endpoint for panel JS (`/api/bahmcloud_store_static/...`) without auth (required for HA custom panels).
|
||||||
|
- Initial API namespace:
|
||||||
|
- `GET /api/bcs` list merged repositories (index + custom)
|
||||||
|
- `POST /api/bcs` add custom repository
|
||||||
|
- `DELETE /api/bcs/custom_repo` remove custom repository
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Store API/UI terminology standardized to "BCS" (Bahmcloud Component Store), while integration domain remains `bahmcloud_store` for compatibility.
|
||||||
|
|
||||||
|
### Notes
|
||||||
|
- Installation, README rendering, provider enrichment (GitHub/Gitea/GitLab), and Update entities will be implemented in later versions.
|
||||||
@@ -1,3 +1,3 @@
|
|||||||
# bahmcloud_store
|
# bahmcloud_store
|
||||||
|
|
||||||
Bahmcloud Store für installing costum_components to Homeassistant
|
Bahmcloud Store for installing costum_components to Homeassistant
|
||||||
@@ -4,12 +4,10 @@ import logging
|
|||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.const import Platform
|
|
||||||
from homeassistant.helpers.event import async_track_time_interval
|
|
||||||
from homeassistant.helpers.discovery import async_load_platform
|
|
||||||
from homeassistant.components.panel_custom import async_register_panel
|
from homeassistant.components.panel_custom import async_register_panel
|
||||||
|
from homeassistant.helpers.event import async_track_time_interval
|
||||||
|
|
||||||
from .store import BahmcloudStore, StoreConfig, StoreError
|
from .core import BCSCore, BCSConfig, BCSError
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -20,46 +18,52 @@ CONF_STORE_URL = "store_url"
|
|||||||
|
|
||||||
|
|
||||||
async def async_setup(hass: HomeAssistant, config: dict) -> bool:
|
async def async_setup(hass: HomeAssistant, config: dict) -> bool:
|
||||||
cfg = config.get(DOMAIN, {})
|
cfg = config.get(DOMAIN, {}) or {}
|
||||||
store_url = cfg.get(CONF_STORE_URL, DEFAULT_STORE_URL)
|
store_url = cfg.get(CONF_STORE_URL, DEFAULT_STORE_URL)
|
||||||
|
|
||||||
store = BahmcloudStore(hass, StoreConfig(store_url=store_url))
|
core = BCSCore(hass, BCSConfig(store_url=store_url))
|
||||||
hass.data[DOMAIN] = store
|
hass.data[DOMAIN] = core
|
||||||
|
|
||||||
# HTTP Views (Panel static + JSON API)
|
# Avoid blocking IO during setup
|
||||||
await store.register_http_views()
|
await core.async_initialize()
|
||||||
|
|
||||||
|
# Register HTTP views and panel
|
||||||
|
from .views import StaticAssetsView, BCSApiView, BCSReadmeView, BCSCustomRepoView, BCSInstallView, BCSUpdateView, BCSRestartView
|
||||||
|
|
||||||
|
hass.http.register_view(StaticAssetsView())
|
||||||
|
hass.http.register_view(BCSApiView(core))
|
||||||
|
hass.http.register_view(BCSReadmeView(core))
|
||||||
|
hass.http.register_view(BCSCustomRepoView(core))
|
||||||
|
hass.http.register_view(BCSInstallView(core))
|
||||||
|
hass.http.register_view(BCSUpdateView(core))
|
||||||
|
hass.http.register_view(BCSRestartView(core))
|
||||||
|
|
||||||
# Sidebar Panel (Custom Panel + JS module)
|
|
||||||
await async_register_panel(
|
await async_register_panel(
|
||||||
hass,
|
hass,
|
||||||
frontend_url_path="bahmcloud-store",
|
frontend_url_path="bahmcloud-store",
|
||||||
webcomponent_name="bahmcloud-store-panel",
|
webcomponent_name="bahmcloud-store-panel",
|
||||||
module_url="/api/bahmcloud_store_static/panel.js",
|
module_url="/api/bahmcloud_store_static/panel.js?v=99",
|
||||||
sidebar_title="Bahmcloud Store",
|
sidebar_title="Bahmcloud Store",
|
||||||
sidebar_icon="mdi:store",
|
sidebar_icon="mdi:store",
|
||||||
require_admin=True,
|
require_admin=True,
|
||||||
config={},
|
config={},
|
||||||
)
|
)
|
||||||
|
|
||||||
# Initialer Index-Load
|
# Initial refresh
|
||||||
try:
|
try:
|
||||||
await store.refresh()
|
await core.full_refresh(source="startup")
|
||||||
except StoreError as e:
|
except BCSError as e:
|
||||||
_LOGGER.error("Initial store refresh failed: %s", e)
|
_LOGGER.error("Initial refresh failed: %s", e)
|
||||||
|
|
||||||
# Nur Liste + Latest-Versionen regelmäßig aktualisieren (keine Auto-Install)
|
|
||||||
async def periodic(_now) -> None:
|
async def periodic(_now) -> None:
|
||||||
try:
|
try:
|
||||||
await store.refresh()
|
await core.full_refresh(source="timer")
|
||||||
store.signal_entities_updated()
|
except BCSError as e:
|
||||||
except StoreError as e:
|
|
||||||
_LOGGER.warning("Periodic refresh failed: %s", e)
|
_LOGGER.warning("Periodic refresh failed: %s", e)
|
||||||
|
except Exception as e:
|
||||||
|
_LOGGER.exception("Unexpected error during periodic refresh: %s", e)
|
||||||
|
|
||||||
# Falls store.yaml refresh_seconds enthält, nutze das, sonst 300s
|
interval_seconds = int(getattr(core, "refresh_seconds", 300) or 300)
|
||||||
interval_seconds = store.refresh_seconds if getattr(store, "refresh_seconds", None) else 300
|
async_track_time_interval(hass, periodic, timedelta(seconds=interval_seconds))
|
||||||
async_track_time_interval(hass, periodic, timedelta(seconds=int(interval_seconds)))
|
|
||||||
|
|
||||||
# Update platform laden (damit Updates in Settings erscheinen)
|
|
||||||
await async_load_platform(hass, Platform.UPDATE, DOMAIN, {}, config)
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
561
custom_components/bahmcloud_store/core.py
Normal file
561
custom_components/bahmcloud_store/core.py
Normal file
@@ -0,0 +1,561 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
import zipfile
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
from urllib.parse import parse_qsl, urlencode, urlsplit, urlunsplit, urlparse
|
||||||
|
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
from homeassistant.components import persistent_notification
|
||||||
|
from homeassistant.util import yaml as ha_yaml
|
||||||
|
|
||||||
|
from .storage import BCSStorage, CustomRepo
|
||||||
|
from .providers import fetch_repo_info, detect_provider, RepoInfo, fetch_readme_markdown
|
||||||
|
from .metadata import fetch_repo_metadata, RepoMetadata
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
DOMAIN = "bahmcloud_store"
|
||||||
|
|
||||||
|
|
||||||
|
class BCSError(Exception):
|
||||||
|
"""BCS core error."""
|
||||||
|
|
||||||
|
|
||||||
|
class BCSInstallError(BCSError):
|
||||||
|
"""BCS installation/update error."""
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class BCSConfig:
|
||||||
|
store_url: str
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class RepoItem:
|
||||||
|
id: str
|
||||||
|
name: str
|
||||||
|
url: str
|
||||||
|
source: str # "index" | "custom"
|
||||||
|
|
||||||
|
owner: str | None = None
|
||||||
|
provider: str | None = None
|
||||||
|
provider_repo_name: str | None = None
|
||||||
|
provider_description: str | None = None
|
||||||
|
default_branch: str | None = None
|
||||||
|
|
||||||
|
latest_version: str | None = None
|
||||||
|
latest_version_source: str | None = None # "release" | "tag" | "atom" | None
|
||||||
|
|
||||||
|
meta_source: str | None = None
|
||||||
|
meta_name: str | None = None
|
||||||
|
meta_description: str | None = None
|
||||||
|
meta_category: str | None = None
|
||||||
|
meta_author: str | None = None
|
||||||
|
meta_maintainer: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class BCSCore:
|
||||||
|
def __init__(self, hass: HomeAssistant, config: BCSConfig) -> None:
|
||||||
|
self.hass = hass
|
||||||
|
self.config = config
|
||||||
|
self.storage = BCSStorage(hass)
|
||||||
|
|
||||||
|
self.refresh_seconds: int = 300
|
||||||
|
self.repos: dict[str, RepoItem] = {}
|
||||||
|
self._listeners: list[callable] = []
|
||||||
|
|
||||||
|
# Will be loaded asynchronously (no blocking IO in event loop)
|
||||||
|
self.version: str = "unknown"
|
||||||
|
|
||||||
|
# Diagnostics (helps verify refresh behavior)
|
||||||
|
self.last_index_url: str | None = None
|
||||||
|
self.last_index_bytes: int | None = None
|
||||||
|
self.last_index_hash: str | None = None
|
||||||
|
self.last_index_loaded_at: float | None = None
|
||||||
|
|
||||||
|
self._install_lock = asyncio.Lock()
|
||||||
|
self._installed_cache: dict[str, Any] = {}
|
||||||
|
|
||||||
|
async def async_initialize(self) -> None:
|
||||||
|
"""Async initialization that avoids blocking file IO."""
|
||||||
|
self.version = await self._read_manifest_version_async()
|
||||||
|
await self._refresh_installed_cache()
|
||||||
|
|
||||||
|
async def _read_manifest_version_async(self) -> str:
|
||||||
|
def _read() -> str:
|
||||||
|
try:
|
||||||
|
manifest_path = Path(__file__).resolve().parent / "manifest.json"
|
||||||
|
data = json.loads(manifest_path.read_text(encoding="utf-8"))
|
||||||
|
v = data.get("version")
|
||||||
|
return str(v) if v else "unknown"
|
||||||
|
except Exception:
|
||||||
|
return "unknown"
|
||||||
|
|
||||||
|
return await self.hass.async_add_executor_job(_read)
|
||||||
|
|
||||||
|
def add_listener(self, cb) -> None:
|
||||||
|
self._listeners.append(cb)
|
||||||
|
|
||||||
|
def signal_updated(self) -> None:
|
||||||
|
for cb in list(self._listeners):
|
||||||
|
try:
|
||||||
|
cb()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def full_refresh(self, source: str = "manual") -> None:
|
||||||
|
"""Single refresh entry-point used by both timer and manual button."""
|
||||||
|
_LOGGER.info("BCS full refresh triggered (source=%s)", source)
|
||||||
|
await self.refresh()
|
||||||
|
self.signal_updated()
|
||||||
|
|
||||||
|
def get_repo(self, repo_id: str) -> RepoItem | None:
|
||||||
|
return self.repos.get(repo_id)
|
||||||
|
|
||||||
|
async def refresh(self) -> None:
|
||||||
|
index_repos, refresh_seconds = await self._load_index_repos()
|
||||||
|
self.refresh_seconds = refresh_seconds
|
||||||
|
|
||||||
|
custom_repos = await self.storage.list_custom_repos()
|
||||||
|
|
||||||
|
merged: dict[str, RepoItem] = {}
|
||||||
|
|
||||||
|
for item in index_repos:
|
||||||
|
merged[item.id] = item
|
||||||
|
|
||||||
|
for c in custom_repos:
|
||||||
|
merged[c.id] = RepoItem(
|
||||||
|
id=c.id,
|
||||||
|
name=(c.name or c.url),
|
||||||
|
url=c.url,
|
||||||
|
source="custom",
|
||||||
|
)
|
||||||
|
|
||||||
|
for r in merged.values():
|
||||||
|
r.provider = detect_provider(r.url)
|
||||||
|
|
||||||
|
await self._enrich_and_resolve(merged)
|
||||||
|
self.repos = merged
|
||||||
|
|
||||||
|
_LOGGER.info(
|
||||||
|
"BCS refresh complete: repos=%s (index=%s, custom=%s)",
|
||||||
|
len(self.repos),
|
||||||
|
len([r for r in self.repos.values() if r.source == "index"]),
|
||||||
|
len([r for r in self.repos.values() if r.source == "custom"]),
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _enrich_and_resolve(self, merged: dict[str, RepoItem]) -> None:
|
||||||
|
sem = asyncio.Semaphore(6)
|
||||||
|
|
||||||
|
async def process_one(r: RepoItem) -> None:
|
||||||
|
async with sem:
|
||||||
|
info: RepoInfo = await fetch_repo_info(self.hass, r.url)
|
||||||
|
|
||||||
|
r.provider = info.provider or r.provider
|
||||||
|
r.owner = info.owner or r.owner
|
||||||
|
r.provider_repo_name = info.repo_name
|
||||||
|
r.provider_description = info.description
|
||||||
|
r.default_branch = info.default_branch or r.default_branch
|
||||||
|
|
||||||
|
r.latest_version = info.latest_version
|
||||||
|
r.latest_version_source = info.latest_version_source
|
||||||
|
|
||||||
|
md: RepoMetadata = await fetch_repo_metadata(self.hass, r.url, r.default_branch)
|
||||||
|
r.meta_source = md.source
|
||||||
|
r.meta_name = md.name
|
||||||
|
r.meta_description = md.description
|
||||||
|
r.meta_category = md.category
|
||||||
|
r.meta_author = md.author
|
||||||
|
r.meta_maintainer = md.maintainer
|
||||||
|
|
||||||
|
has_user_or_index_name = bool(r.name) and (r.name != r.url) and (not str(r.name).startswith("http"))
|
||||||
|
if r.meta_name:
|
||||||
|
r.name = r.meta_name
|
||||||
|
elif not has_user_or_index_name and r.provider_repo_name:
|
||||||
|
r.name = r.provider_repo_name
|
||||||
|
elif not r.name:
|
||||||
|
r.name = r.url
|
||||||
|
|
||||||
|
await asyncio.gather(*(process_one(r) for r in merged.values()), return_exceptions=True)
|
||||||
|
|
||||||
|
def _add_cache_buster(self, url: str) -> str:
|
||||||
|
parts = urlsplit(url)
|
||||||
|
q = dict(parse_qsl(parts.query, keep_blank_values=True))
|
||||||
|
q["t"] = str(int(time.time()))
|
||||||
|
new_query = urlencode(q)
|
||||||
|
return urlunsplit((parts.scheme, parts.netloc, parts.path, new_query, parts.fragment))
|
||||||
|
|
||||||
|
def _gitea_src_to_raw(self, url: str) -> str:
|
||||||
|
parts = urlsplit(url)
|
||||||
|
path = parts.path
|
||||||
|
path2 = path.replace("/src/branch/", "/raw/branch/")
|
||||||
|
if path2 == path:
|
||||||
|
return url
|
||||||
|
return urlunsplit((parts.scheme, parts.netloc, path2, parts.query, parts.fragment))
|
||||||
|
|
||||||
|
async def _fetch_store_text(self, url: str) -> str:
|
||||||
|
session = async_get_clientsession(self.hass)
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
"User-Agent": "BahmcloudStore (Home Assistant)",
|
||||||
|
"Cache-Control": "no-cache, no-store, max-age=0",
|
||||||
|
"Pragma": "no-cache",
|
||||||
|
"Expires": "0",
|
||||||
|
}
|
||||||
|
|
||||||
|
async with session.get(url, timeout=30, headers=headers) as resp:
|
||||||
|
if resp.status != 200:
|
||||||
|
raise BCSError(f"store_url returned {resp.status}")
|
||||||
|
return await resp.text()
|
||||||
|
|
||||||
|
async def _load_index_repos(self) -> tuple[list[RepoItem], int]:
|
||||||
|
store_url = (self.config.store_url or "").strip()
|
||||||
|
if not store_url:
|
||||||
|
raise BCSError("store_url is empty")
|
||||||
|
|
||||||
|
url = self._add_cache_buster(store_url)
|
||||||
|
|
||||||
|
try:
|
||||||
|
raw = await self._fetch_store_text(url)
|
||||||
|
|
||||||
|
# If we fetched a HTML page (wrong endpoint), attempt raw conversion.
|
||||||
|
if "<html" in raw.lower() or "<!doctype html" in raw.lower():
|
||||||
|
fallback = self._add_cache_buster(self._gitea_src_to_raw(store_url))
|
||||||
|
if fallback != url:
|
||||||
|
_LOGGER.warning("BCS store index looked like HTML, retrying raw URL")
|
||||||
|
raw = await self._fetch_store_text(fallback)
|
||||||
|
url = fallback
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise BCSError(f"Failed fetching store index: {e}") from e
|
||||||
|
|
||||||
|
# Diagnostics
|
||||||
|
b = raw.encode("utf-8", errors="replace")
|
||||||
|
h = hashlib.sha256(b).hexdigest()[:12]
|
||||||
|
self.last_index_url = url
|
||||||
|
self.last_index_bytes = len(b)
|
||||||
|
self.last_index_hash = h
|
||||||
|
self.last_index_loaded_at = time.time()
|
||||||
|
|
||||||
|
_LOGGER.info(
|
||||||
|
"BCS index loaded: url=%s bytes=%s sha=%s",
|
||||||
|
self.last_index_url,
|
||||||
|
self.last_index_bytes,
|
||||||
|
self.last_index_hash,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = ha_yaml.parse_yaml(raw)
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
raise BCSError("store.yaml must be a mapping")
|
||||||
|
|
||||||
|
refresh_seconds = int(data.get("refresh_seconds", 300))
|
||||||
|
repos = data.get("repos", [])
|
||||||
|
if not isinstance(repos, list):
|
||||||
|
raise BCSError("store.yaml 'repos' must be a list")
|
||||||
|
|
||||||
|
items: list[RepoItem] = []
|
||||||
|
for i, r in enumerate(repos):
|
||||||
|
if not isinstance(r, dict):
|
||||||
|
continue
|
||||||
|
repo_url = str(r.get("url", "")).strip()
|
||||||
|
if not repo_url:
|
||||||
|
continue
|
||||||
|
name = str(r.get("name") or repo_url).strip()
|
||||||
|
|
||||||
|
items.append(
|
||||||
|
RepoItem(
|
||||||
|
id=f"index:{i}",
|
||||||
|
name=name,
|
||||||
|
url=repo_url,
|
||||||
|
source="index",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
_LOGGER.info("BCS index parsed: repos=%s refresh_seconds=%s", len(items), refresh_seconds)
|
||||||
|
return items, refresh_seconds
|
||||||
|
except Exception as e:
|
||||||
|
raise BCSError(f"Invalid store.yaml: {e}") from e
|
||||||
|
|
||||||
|
async def add_custom_repo(self, url: str, name: str | None) -> CustomRepo:
|
||||||
|
url = str(url or "").strip()
|
||||||
|
if not url:
|
||||||
|
raise BCSError("Missing url")
|
||||||
|
|
||||||
|
c = await self.storage.add_custom_repo(url, name)
|
||||||
|
await self.full_refresh(source="custom_repo_add")
|
||||||
|
return c
|
||||||
|
|
||||||
|
async def remove_custom_repo(self, repo_id: str) -> None:
|
||||||
|
await self.storage.remove_custom_repo(repo_id)
|
||||||
|
await self.full_refresh(source="custom_repo_remove")
|
||||||
|
|
||||||
|
async def list_custom_repos(self) -> list[CustomRepo]:
|
||||||
|
return await self.storage.list_custom_repos()
|
||||||
|
|
||||||
|
def list_repos_public(self) -> list[dict[str, Any]]:
|
||||||
|
out: list[dict[str, Any]] = []
|
||||||
|
|
||||||
|
installed_map: dict[str, Any] = getattr(self, '_installed_cache', {}) or {}
|
||||||
|
if not isinstance(installed_map, dict):
|
||||||
|
installed_map = {}
|
||||||
|
|
||||||
|
for r in self.repos.values():
|
||||||
|
inst = installed_map.get(r.id)
|
||||||
|
installed = bool(inst)
|
||||||
|
installed_domains: list[str] = []
|
||||||
|
installed_version: str | None = None
|
||||||
|
if isinstance(inst, dict):
|
||||||
|
d = inst.get('domains') or []
|
||||||
|
if isinstance(d, list):
|
||||||
|
installed_domains = [str(x) for x in d if str(x).strip()]
|
||||||
|
v = inst.get('installed_version')
|
||||||
|
installed_version = str(v) if v is not None else None
|
||||||
|
|
||||||
|
out.append(
|
||||||
|
{
|
||||||
|
'id': r.id,
|
||||||
|
'name': r.name,
|
||||||
|
'url': r.url,
|
||||||
|
'source': r.source,
|
||||||
|
'owner': r.owner,
|
||||||
|
'provider': r.provider,
|
||||||
|
'repo_name': r.provider_repo_name,
|
||||||
|
'description': r.provider_description or r.meta_description,
|
||||||
|
'default_branch': r.default_branch,
|
||||||
|
'latest_version': r.latest_version,
|
||||||
|
'latest_version_source': r.latest_version_source,
|
||||||
|
'category': r.meta_category,
|
||||||
|
'meta_author': r.meta_author,
|
||||||
|
'meta_maintainer': r.meta_maintainer,
|
||||||
|
'meta_source': r.meta_source,
|
||||||
|
'installed': installed,
|
||||||
|
'installed_version': installed_version,
|
||||||
|
'installed_domains': installed_domains,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return out
|
||||||
|
|
||||||
|
async def fetch_readme_markdown(self, repo_id: str) -> str | None:
|
||||||
|
repo = self.get_repo(repo_id)
|
||||||
|
if not repo:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return await fetch_readme_markdown(
|
||||||
|
self.hass,
|
||||||
|
repo.url,
|
||||||
|
provider=repo.provider,
|
||||||
|
default_branch=repo.default_branch,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _pick_ref_for_install(self, repo: RepoItem) -> str:
|
||||||
|
# Prefer latest_version (release/tag/atom-derived), fallback to default branch, then main.
|
||||||
|
if repo.latest_version and str(repo.latest_version).strip():
|
||||||
|
return str(repo.latest_version).strip()
|
||||||
|
if repo.default_branch and str(repo.default_branch).strip():
|
||||||
|
return str(repo.default_branch).strip()
|
||||||
|
return "main"
|
||||||
|
|
||||||
|
def _build_zip_url(self, repo_url: str, ref: str) -> str:
|
||||||
|
"""Build a public ZIP download URL (provider-neutral, no tokens).
|
||||||
|
|
||||||
|
Supports:
|
||||||
|
- GitHub: codeload
|
||||||
|
- GitLab: /-/archive/
|
||||||
|
- Gitea (incl. Bahmcloud): /archive/<ref>.zip
|
||||||
|
"""
|
||||||
|
ref = (ref or "").strip()
|
||||||
|
if not ref:
|
||||||
|
raise BCSInstallError("Missing ref for ZIP download")
|
||||||
|
|
||||||
|
u = urlparse(repo_url.rstrip("/"))
|
||||||
|
host = (u.netloc or "").lower()
|
||||||
|
parts = [p for p in u.path.strip("/").split("/") if p]
|
||||||
|
if len(parts) < 2:
|
||||||
|
raise BCSInstallError("Invalid repository URL (missing owner/repo)")
|
||||||
|
|
||||||
|
owner = parts[0]
|
||||||
|
repo = parts[1]
|
||||||
|
if repo.endswith(".git"):
|
||||||
|
repo = repo[:-4]
|
||||||
|
|
||||||
|
if "github.com" in host:
|
||||||
|
return f"https://codeload.github.com/{owner}/{repo}/zip/{ref}"
|
||||||
|
|
||||||
|
if "gitlab" in host:
|
||||||
|
base = f"{u.scheme}://{u.netloc}"
|
||||||
|
path = u.path.strip("/")
|
||||||
|
if path.endswith(".git"):
|
||||||
|
path = path[:-4]
|
||||||
|
return f"{base}/{path}/-/archive/{ref}/{repo}-{ref}.zip"
|
||||||
|
|
||||||
|
base = f"{u.scheme}://{u.netloc}"
|
||||||
|
path = u.path.strip("/")
|
||||||
|
if path.endswith(".git"):
|
||||||
|
path = path[:-4]
|
||||||
|
return f"{base}/{path}/archive/{ref}.zip"
|
||||||
|
|
||||||
|
async def _download_zip(self, url: str, dest: Path) -> None:
|
||||||
|
session = async_get_clientsession(self.hass)
|
||||||
|
headers = {
|
||||||
|
"User-Agent": "BahmcloudStore (Home Assistant)",
|
||||||
|
"Cache-Control": "no-cache, no-store, max-age=0",
|
||||||
|
"Pragma": "no-cache",
|
||||||
|
}
|
||||||
|
|
||||||
|
async with session.get(url, timeout=120, headers=headers) as resp:
|
||||||
|
if resp.status != 200:
|
||||||
|
raise BCSInstallError(f"zip_url returned {resp.status}")
|
||||||
|
data = await resp.read()
|
||||||
|
|
||||||
|
await self.hass.async_add_executor_job(dest.write_bytes, data)
|
||||||
|
|
||||||
|
async def _extract_zip(self, zip_path: Path, extract_dir: Path) -> None:
|
||||||
|
def _extract() -> None:
|
||||||
|
with zipfile.ZipFile(zip_path, "r") as zf:
|
||||||
|
zf.extractall(extract_dir)
|
||||||
|
|
||||||
|
await self.hass.async_add_executor_job(_extract)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _find_custom_components_root(extract_root: Path) -> Path | None:
|
||||||
|
direct = extract_root / "custom_components"
|
||||||
|
if direct.exists() and direct.is_dir():
|
||||||
|
return direct
|
||||||
|
|
||||||
|
for child in extract_root.iterdir():
|
||||||
|
candidate = child / "custom_components"
|
||||||
|
if candidate.exists() and candidate.is_dir():
|
||||||
|
return candidate
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def _copy_domain_dir(self, src_domain_dir: Path, domain: str) -> None:
|
||||||
|
dest_root = Path(self.hass.config.path("custom_components"))
|
||||||
|
target = dest_root / domain
|
||||||
|
tmp_target = dest_root / f".bcs_tmp_{domain}_{int(time.time())}"
|
||||||
|
|
||||||
|
def _copy() -> None:
|
||||||
|
if tmp_target.exists():
|
||||||
|
shutil.rmtree(tmp_target, ignore_errors=True)
|
||||||
|
|
||||||
|
shutil.copytree(src_domain_dir, tmp_target, dirs_exist_ok=True)
|
||||||
|
|
||||||
|
if target.exists():
|
||||||
|
shutil.rmtree(target, ignore_errors=True)
|
||||||
|
|
||||||
|
tmp_target.rename(target)
|
||||||
|
|
||||||
|
await self.hass.async_add_executor_job(_copy)
|
||||||
|
|
||||||
|
async def _read_installed_version(self, domain: str) -> str | None:
|
||||||
|
def _read() -> str | None:
|
||||||
|
try:
|
||||||
|
p = Path(self.hass.config.path("custom_components", domain, "manifest.json"))
|
||||||
|
if not p.exists():
|
||||||
|
return None
|
||||||
|
data = json.loads(p.read_text(encoding="utf-8"))
|
||||||
|
v = data.get("version")
|
||||||
|
return str(v) if v else None
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return await self.hass.async_add_executor_job(_read)
|
||||||
|
|
||||||
|
async def _refresh_installed_cache(self) -> None:
|
||||||
|
try:
|
||||||
|
items = await self.storage.list_installed_repos()
|
||||||
|
cache: dict[str, Any] = {}
|
||||||
|
for it in items:
|
||||||
|
cache[it.repo_id] = {
|
||||||
|
"domains": it.domains,
|
||||||
|
"installed_version": it.installed_version,
|
||||||
|
"ref": it.ref,
|
||||||
|
"installed_at": it.installed_at,
|
||||||
|
}
|
||||||
|
self._installed_cache = cache
|
||||||
|
except Exception:
|
||||||
|
self._installed_cache = {}
|
||||||
|
|
||||||
|
async def install_repo(self, repo_id: str) -> dict[str, Any]:
|
||||||
|
repo = self.get_repo(repo_id)
|
||||||
|
if not repo:
|
||||||
|
raise BCSInstallError(f"repo_id not found: {repo_id}")
|
||||||
|
|
||||||
|
async with self._install_lock:
|
||||||
|
ref = self._pick_ref_for_install(repo)
|
||||||
|
zip_url = self._build_zip_url(repo.url, ref)
|
||||||
|
|
||||||
|
_LOGGER.info("BCS install started: repo_id=%s ref=%s zip_url=%s", repo_id, ref, zip_url)
|
||||||
|
|
||||||
|
with tempfile.TemporaryDirectory(prefix="bcs_install_") as td:
|
||||||
|
tmp = Path(td)
|
||||||
|
zip_path = tmp / "repo.zip"
|
||||||
|
extract_dir = tmp / "extract"
|
||||||
|
extract_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
await self._download_zip(zip_url, zip_path)
|
||||||
|
await self._extract_zip(zip_path, extract_dir)
|
||||||
|
|
||||||
|
cc_root = self._find_custom_components_root(extract_dir)
|
||||||
|
if not cc_root:
|
||||||
|
raise BCSInstallError("custom_components folder not found in repository ZIP")
|
||||||
|
|
||||||
|
installed_domains: list[str] = []
|
||||||
|
for domain_dir in cc_root.iterdir():
|
||||||
|
if not domain_dir.is_dir():
|
||||||
|
continue
|
||||||
|
manifest = domain_dir / "manifest.json"
|
||||||
|
if not manifest.exists():
|
||||||
|
continue
|
||||||
|
|
||||||
|
domain = domain_dir.name
|
||||||
|
await self._copy_domain_dir(domain_dir, domain)
|
||||||
|
installed_domains.append(domain)
|
||||||
|
|
||||||
|
if not installed_domains:
|
||||||
|
raise BCSInstallError("No integrations found under custom_components/ (missing manifest.json)")
|
||||||
|
|
||||||
|
installed_version = await self._read_installed_version(installed_domains[0])
|
||||||
|
|
||||||
|
await self.storage.set_installed_repo(
|
||||||
|
repo_id=repo_id,
|
||||||
|
url=repo.url,
|
||||||
|
domains=installed_domains,
|
||||||
|
installed_version=installed_version,
|
||||||
|
ref=ref,
|
||||||
|
)
|
||||||
|
await self._refresh_installed_cache()
|
||||||
|
|
||||||
|
persistent_notification.async_create(
|
||||||
|
self.hass,
|
||||||
|
"Bahmcloud Store installation finished. A Home Assistant restart is required to load the integration.",
|
||||||
|
title="Bahmcloud Store",
|
||||||
|
notification_id="bcs_restart_required",
|
||||||
|
)
|
||||||
|
|
||||||
|
_LOGGER.info("BCS install complete: repo_id=%s domains=%s", repo_id, installed_domains)
|
||||||
|
self.signal_updated()
|
||||||
|
return {
|
||||||
|
"ok": True,
|
||||||
|
"repo_id": repo_id,
|
||||||
|
"domains": installed_domains,
|
||||||
|
"installed_version": installed_version,
|
||||||
|
"restart_required": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
async def update_repo(self, repo_id: str) -> dict[str, Any]:
|
||||||
|
_LOGGER.info("BCS update started: repo_id=%s", repo_id)
|
||||||
|
return await self.install_repo(repo_id)
|
||||||
|
|
||||||
|
async def request_restart(self) -> None:
|
||||||
|
await self.hass.services.async_call("homeassistant", "restart", {}, blocking=False)
|
||||||
28
custom_components/bahmcloud_store/custom_repo_view.py
Normal file
28
custom_components/bahmcloud_store/custom_repo_view.py
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from homeassistant.components.http import HomeAssistantView
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from .core import BCSCore
|
||||||
|
|
||||||
|
|
||||||
|
class BCSCustomRepoView(HomeAssistantView):
|
||||||
|
"""
|
||||||
|
DELETE /api/bcs/custom_repo?id=...
|
||||||
|
"""
|
||||||
|
requires_auth = True
|
||||||
|
name = "bcs_custom_repo_api"
|
||||||
|
url = "/api/bcs/custom_repo"
|
||||||
|
|
||||||
|
def __init__(self, core: "BCSCore") -> None:
|
||||||
|
self.core = core
|
||||||
|
|
||||||
|
async def delete(self, request):
|
||||||
|
repo_id = request.query.get("id", "").strip()
|
||||||
|
if not repo_id:
|
||||||
|
return self.json({"error": "id missing"}, status_code=400)
|
||||||
|
|
||||||
|
await self.core.remove_custom_repo(repo_id)
|
||||||
|
return self.json({"ok": True})
|
||||||
@@ -1,9 +1,9 @@
|
|||||||
{
|
{
|
||||||
"domain": "bahmcloud_store",
|
"domain": "bahmcloud_store",
|
||||||
"name": "Bahmcloud Store",
|
"name": "Bahmcloud Store",
|
||||||
"version": "0.1.0",
|
"version": "0.5.0",
|
||||||
"documentation": "https://git.bahmcloud.de/bahmcloud/ha_store",
|
"documentation": "https://git.bahmcloud.de/bahmcloud/bahmcloud_store",
|
||||||
"requirements": [],
|
"requirements": [],
|
||||||
"codeowners": [],
|
"codeowners": ["@bahmcloud"],
|
||||||
"iot_class": "local_polling"
|
"iot_class": "local_polling"
|
||||||
}
|
}
|
||||||
168
custom_components/bahmcloud_store/metadata.py
Normal file
168
custom_components/bahmcloud_store/metadata.py
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
from homeassistant.util import yaml as ha_yaml
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class RepoMetadata:
|
||||||
|
source: str | None = None # "bcs.yaml" | "hacs.yaml" | "hacs.json" | None
|
||||||
|
name: str | None = None
|
||||||
|
description: str | None = None
|
||||||
|
category: str | None = None
|
||||||
|
author: str | None = None
|
||||||
|
maintainer: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def _normalize_repo_name(name: str | None) -> str | None:
|
||||||
|
if not name:
|
||||||
|
return None
|
||||||
|
n = name.strip()
|
||||||
|
if n.endswith(".git"):
|
||||||
|
n = n[:-4]
|
||||||
|
return n or None
|
||||||
|
|
||||||
|
|
||||||
|
def _split_owner_repo(repo_url: str) -> tuple[str | None, str | None]:
|
||||||
|
u = urlparse(repo_url.rstrip("/"))
|
||||||
|
parts = [p for p in u.path.strip("/").split("/") if p]
|
||||||
|
if len(parts) < 2:
|
||||||
|
return None, None
|
||||||
|
owner = parts[0].strip() or None
|
||||||
|
repo = _normalize_repo_name(parts[1])
|
||||||
|
return owner, repo
|
||||||
|
|
||||||
|
|
||||||
|
def _is_github(repo_url: str) -> bool:
|
||||||
|
return "github.com" in urlparse(repo_url).netloc.lower()
|
||||||
|
|
||||||
|
|
||||||
|
def _is_gitlab(repo_url: str) -> bool:
|
||||||
|
return "gitlab" in urlparse(repo_url).netloc.lower()
|
||||||
|
|
||||||
|
|
||||||
|
def _is_gitea(repo_url: str) -> bool:
|
||||||
|
host = urlparse(repo_url).netloc.lower()
|
||||||
|
return host and ("github.com" not in host) and ("gitlab" not in host)
|
||||||
|
|
||||||
|
|
||||||
|
async def _fetch_text(hass: HomeAssistant, url: str) -> str | None:
|
||||||
|
session = async_get_clientsession(hass)
|
||||||
|
try:
|
||||||
|
async with session.get(url, timeout=20) as resp:
|
||||||
|
if resp.status != 200:
|
||||||
|
return None
|
||||||
|
return await resp.text()
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_meta_yaml(raw: str, source: str) -> RepoMetadata:
|
||||||
|
try:
|
||||||
|
data = ha_yaml.parse_yaml(raw)
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
return RepoMetadata(source=source)
|
||||||
|
|
||||||
|
return RepoMetadata(
|
||||||
|
source=source,
|
||||||
|
name=data.get("name"),
|
||||||
|
description=data.get("description"),
|
||||||
|
category=data.get("category"),
|
||||||
|
author=data.get("author"),
|
||||||
|
maintainer=data.get("maintainer"),
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
return RepoMetadata(source=source)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_meta_hacs_json(raw: str) -> RepoMetadata:
|
||||||
|
try:
|
||||||
|
data = json.loads(raw)
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
return RepoMetadata(source="hacs.json")
|
||||||
|
|
||||||
|
name = data.get("name")
|
||||||
|
description = data.get("description")
|
||||||
|
author = data.get("author")
|
||||||
|
maintainer = data.get("maintainer")
|
||||||
|
category = data.get("category") or data.get("type")
|
||||||
|
|
||||||
|
return RepoMetadata(
|
||||||
|
source="hacs.json",
|
||||||
|
name=name if isinstance(name, str) else None,
|
||||||
|
description=description if isinstance(description, str) else None,
|
||||||
|
category=category if isinstance(category, str) else None,
|
||||||
|
author=author if isinstance(author, str) else None,
|
||||||
|
maintainer=maintainer if isinstance(maintainer, str) else None,
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
return RepoMetadata(source="hacs.json")
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_repo_metadata(hass: HomeAssistant, repo_url: str, default_branch: str | None) -> RepoMetadata:
|
||||||
|
owner, repo = _split_owner_repo(repo_url)
|
||||||
|
if not owner or not repo:
|
||||||
|
return RepoMetadata()
|
||||||
|
|
||||||
|
branch = default_branch or "main"
|
||||||
|
|
||||||
|
# Priority:
|
||||||
|
# 1) bcs.yaml
|
||||||
|
# 2) hacs.yaml
|
||||||
|
# 3) hacs.json
|
||||||
|
filenames = ["bcs.yaml", "hacs.yaml", "hacs.json"]
|
||||||
|
|
||||||
|
candidates: list[tuple[str, str]] = []
|
||||||
|
|
||||||
|
if _is_github(repo_url):
|
||||||
|
base = f"https://raw.githubusercontent.com/{owner}/{repo}/{branch}"
|
||||||
|
for fn in filenames:
|
||||||
|
candidates.append((fn, f"{base}/{fn}"))
|
||||||
|
|
||||||
|
elif _is_gitlab(repo_url):
|
||||||
|
u = urlparse(repo_url.rstrip("/"))
|
||||||
|
root = f"{u.scheme}://{u.netloc}/{owner}/{repo}"
|
||||||
|
# GitLab raw format
|
||||||
|
# https://gitlab.com/<owner>/<repo>/-/raw/<branch>/<file>
|
||||||
|
for fn in filenames:
|
||||||
|
candidates.append((fn, f"{root}/-/raw/{branch}/{fn}"))
|
||||||
|
|
||||||
|
elif _is_gitea(repo_url):
|
||||||
|
u = urlparse(repo_url.rstrip("/"))
|
||||||
|
root = f"{u.scheme}://{u.netloc}/{owner}/{repo}"
|
||||||
|
|
||||||
|
bases = [
|
||||||
|
f"{root}/raw/branch/{branch}",
|
||||||
|
f"{root}/raw/{branch}",
|
||||||
|
]
|
||||||
|
for fn in filenames:
|
||||||
|
for b in bases:
|
||||||
|
candidates.append((fn, f"{b}/{fn}"))
|
||||||
|
|
||||||
|
else:
|
||||||
|
return RepoMetadata()
|
||||||
|
|
||||||
|
for fn, url in candidates:
|
||||||
|
raw = await _fetch_text(hass, url)
|
||||||
|
if not raw:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if fn.endswith(".json"):
|
||||||
|
meta = _parse_meta_hacs_json(raw)
|
||||||
|
if meta.source:
|
||||||
|
return meta
|
||||||
|
continue
|
||||||
|
|
||||||
|
meta = _parse_meta_yaml(raw, fn)
|
||||||
|
if meta.source:
|
||||||
|
return meta
|
||||||
|
|
||||||
|
return RepoMetadata()
|
||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
432
custom_components/bahmcloud_store/providers.py
Normal file
432
custom_components/bahmcloud_store/providers.py
Normal file
@@ -0,0 +1,432 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from urllib.parse import quote_plus, urlparse
|
||||||
|
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
UA = "BahmcloudStore (Home Assistant)"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class RepoInfo:
|
||||||
|
owner: str | None = None
|
||||||
|
repo_name: str | None = None
|
||||||
|
description: str | None = None
|
||||||
|
provider: str | None = None
|
||||||
|
default_branch: str | None = None
|
||||||
|
|
||||||
|
latest_version: str | None = None
|
||||||
|
latest_version_source: str | None = None # "release" | "tag" | "atom" | None
|
||||||
|
|
||||||
|
|
||||||
|
def _normalize_repo_name(name: str | None) -> str | None:
|
||||||
|
if not name:
|
||||||
|
return None
|
||||||
|
n = name.strip()
|
||||||
|
if n.endswith(".git"):
|
||||||
|
n = n[:-4]
|
||||||
|
return n or None
|
||||||
|
|
||||||
|
|
||||||
|
def _split_owner_repo(repo_url: str) -> tuple[str | None, str | None]:
|
||||||
|
u = urlparse(repo_url.rstrip("/"))
|
||||||
|
parts = [p for p in u.path.strip("/").split("/") if p]
|
||||||
|
if len(parts) < 2:
|
||||||
|
return None, None
|
||||||
|
owner = parts[0].strip() or None
|
||||||
|
repo = _normalize_repo_name(parts[1])
|
||||||
|
return owner, repo
|
||||||
|
|
||||||
|
|
||||||
|
def detect_provider(repo_url: str) -> str:
|
||||||
|
host = urlparse(repo_url).netloc.lower()
|
||||||
|
if "github.com" in host:
|
||||||
|
return "github"
|
||||||
|
if "gitlab" in host:
|
||||||
|
return "gitlab"
|
||||||
|
return "gitea"
|
||||||
|
|
||||||
|
|
||||||
|
async def _safe_json(session, url: str, *, headers: dict | None = None, timeout: int = 20):
|
||||||
|
try:
|
||||||
|
async with session.get(url, timeout=timeout, headers=headers) as resp:
|
||||||
|
status = resp.status
|
||||||
|
if status != 200:
|
||||||
|
return None, status
|
||||||
|
return await resp.json(), status
|
||||||
|
except Exception:
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
|
||||||
|
async def _safe_text(session, url: str, *, headers: dict | None = None, timeout: int = 20):
|
||||||
|
try:
|
||||||
|
async with session.get(url, timeout=timeout, headers=headers) as resp:
|
||||||
|
status = resp.status
|
||||||
|
if status != 200:
|
||||||
|
return None, status
|
||||||
|
return await resp.text(), status
|
||||||
|
except Exception:
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_tag_from_github_url(url: str) -> str | None:
|
||||||
|
m = re.search(r"/releases/tag/([^/?#]+)", url or "")
|
||||||
|
if not m:
|
||||||
|
return None
|
||||||
|
return m.group(1).strip() or None
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_meta(html: str, *, prop: str | None = None, name: str | None = None) -> str | None:
|
||||||
|
if not html:
|
||||||
|
return None
|
||||||
|
if prop:
|
||||||
|
m = re.search(rf'<meta\s+property="{re.escape(prop)}"\s+content="([^"]+)"', html)
|
||||||
|
if m:
|
||||||
|
return m.group(1).strip()
|
||||||
|
if name:
|
||||||
|
m = re.search(rf'<meta\s+name="{re.escape(name)}"\s+content="([^"]+)"', html)
|
||||||
|
if m:
|
||||||
|
return m.group(1).strip()
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
async def _github_description_html(hass: HomeAssistant, owner: str, repo: str) -> str | None:
|
||||||
|
session = async_get_clientsession(hass)
|
||||||
|
url = f"https://github.com/{owner}/{repo}"
|
||||||
|
html, status = await _safe_text(session, url, headers={"User-Agent": UA})
|
||||||
|
if status != 200 or not html:
|
||||||
|
return None
|
||||||
|
|
||||||
|
desc = _extract_meta(html, prop="og:description")
|
||||||
|
if desc:
|
||||||
|
return desc
|
||||||
|
|
||||||
|
return _extract_meta(html, name="description")
|
||||||
|
|
||||||
|
|
||||||
|
async def _github_latest_version_atom(hass: HomeAssistant, owner: str, repo: str) -> tuple[str | None, str | None]:
|
||||||
|
session = async_get_clientsession(hass)
|
||||||
|
url = f"https://github.com/{owner}/{repo}/releases.atom"
|
||||||
|
atom, status = await _safe_text(session, url, headers={"User-Agent": UA})
|
||||||
|
if status != 200 or not atom:
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
try:
|
||||||
|
root = ET.fromstring(atom)
|
||||||
|
ns = {"a": "http://www.w3.org/2005/Atom"}
|
||||||
|
entry = root.find("a:entry", ns)
|
||||||
|
if entry is None:
|
||||||
|
return None, None
|
||||||
|
link = entry.find("a:link", ns)
|
||||||
|
if link is not None and link.attrib.get("href"):
|
||||||
|
tag = _extract_tag_from_github_url(link.attrib["href"])
|
||||||
|
if tag:
|
||||||
|
return tag, "atom"
|
||||||
|
title = entry.find("a:title", ns)
|
||||||
|
if title is not None and title.text:
|
||||||
|
t = title.text.strip()
|
||||||
|
if t:
|
||||||
|
return t, "atom"
|
||||||
|
except Exception:
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
|
||||||
|
async def _github_latest_version_redirect(hass: HomeAssistant, owner: str, repo: str) -> tuple[str | None, str | None]:
|
||||||
|
session = async_get_clientsession(hass)
|
||||||
|
url = f"https://github.com/{owner}/{repo}/releases/latest"
|
||||||
|
try:
|
||||||
|
async with session.get(url, timeout=20, headers={"User-Agent": UA}, allow_redirects=True) as resp:
|
||||||
|
if resp.status != 200:
|
||||||
|
return None, None
|
||||||
|
final = str(resp.url)
|
||||||
|
tag = _extract_tag_from_github_url(final)
|
||||||
|
if tag:
|
||||||
|
return tag, "release"
|
||||||
|
except Exception:
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
|
||||||
|
async def _github_latest_version_api(hass: HomeAssistant, owner: str, repo: str) -> tuple[str | None, str | None]:
|
||||||
|
session = async_get_clientsession(hass)
|
||||||
|
headers = {"Accept": "application/vnd.github+json", "User-Agent": UA}
|
||||||
|
|
||||||
|
data, status = await _safe_json(session, f"https://api.github.com/repos/{owner}/{repo}/releases/latest", headers=headers)
|
||||||
|
if isinstance(data, dict) and data.get("tag_name"):
|
||||||
|
return str(data["tag_name"]), "release"
|
||||||
|
|
||||||
|
if status == 404:
|
||||||
|
data, _ = await _safe_json(session, f"https://api.github.com/repos/{owner}/{repo}/tags?per_page=1", headers=headers)
|
||||||
|
if isinstance(data, list) and data:
|
||||||
|
t = data[0]
|
||||||
|
if isinstance(t, dict) and t.get("name"):
|
||||||
|
return str(t["name"]), "tag"
|
||||||
|
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
|
||||||
|
async def _github_latest_version(hass: HomeAssistant, owner: str, repo: str) -> tuple[str | None, str | None]:
|
||||||
|
tag, src = await _github_latest_version_redirect(hass, owner, repo)
|
||||||
|
if tag:
|
||||||
|
return tag, src
|
||||||
|
|
||||||
|
tag, src = await _github_latest_version_api(hass, owner, repo)
|
||||||
|
if tag:
|
||||||
|
return tag, src
|
||||||
|
|
||||||
|
return await _github_latest_version_atom(hass, owner, repo)
|
||||||
|
|
||||||
|
|
||||||
|
async def _gitea_latest_version(hass: HomeAssistant, base: str, owner: str, repo: str) -> tuple[str | None, str | None]:
|
||||||
|
session = async_get_clientsession(hass)
|
||||||
|
|
||||||
|
data, _ = await _safe_json(session, f"{base}/api/v1/repos/{owner}/{repo}/releases?limit=1")
|
||||||
|
if isinstance(data, list) and data:
|
||||||
|
r = data[0]
|
||||||
|
if isinstance(r, dict) and r.get("tag_name"):
|
||||||
|
return str(r["tag_name"]), "release"
|
||||||
|
|
||||||
|
data, _ = await _safe_json(session, f"{base}/api/v1/repos/{owner}/{repo}/tags?limit=1")
|
||||||
|
if isinstance(data, list) and data:
|
||||||
|
t = data[0]
|
||||||
|
if isinstance(t, dict) and t.get("name"):
|
||||||
|
return str(t["name"]), "tag"
|
||||||
|
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
|
||||||
|
async def _gitlab_latest_version(
|
||||||
|
hass: HomeAssistant, base: str, owner: str, repo: str
|
||||||
|
) -> tuple[str | None, str | None]:
|
||||||
|
session = async_get_clientsession(hass)
|
||||||
|
headers = {"User-Agent": UA}
|
||||||
|
|
||||||
|
project = quote_plus(f"{owner}/{repo}")
|
||||||
|
|
||||||
|
data, _ = await _safe_json(session, f"{base}/api/v4/projects/{project}/releases?per_page=1", headers=headers)
|
||||||
|
if isinstance(data, list) and data:
|
||||||
|
r = data[0]
|
||||||
|
if isinstance(r, dict) and r.get("tag_name"):
|
||||||
|
return str(r["tag_name"]), "release"
|
||||||
|
|
||||||
|
data, _ = await _safe_json(session, f"{base}/api/v4/projects/{project}/repository/tags?per_page=1", headers=headers)
|
||||||
|
if isinstance(data, list) and data:
|
||||||
|
t = data[0]
|
||||||
|
if isinstance(t, dict) and t.get("name"):
|
||||||
|
return str(t["name"]), "tag"
|
||||||
|
|
||||||
|
atom, status = await _safe_text(session, f"{base}/{owner}/{repo}/-/tags?format=atom", headers=headers)
|
||||||
|
if status == 200 and atom:
|
||||||
|
try:
|
||||||
|
root = ET.fromstring(atom)
|
||||||
|
ns = {"a": "http://www.w3.org/2005/Atom"}
|
||||||
|
entry = root.find("a:entry", ns)
|
||||||
|
if entry is not None:
|
||||||
|
title = entry.find("a:title", ns)
|
||||||
|
if title is not None and title.text:
|
||||||
|
return title.text.strip(), "atom"
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_repo_info(hass: HomeAssistant, repo_url: str) -> RepoInfo:
|
||||||
|
provider = detect_provider(repo_url)
|
||||||
|
owner, repo = _split_owner_repo(repo_url)
|
||||||
|
|
||||||
|
info = RepoInfo(
|
||||||
|
owner=owner,
|
||||||
|
repo_name=repo,
|
||||||
|
description=None,
|
||||||
|
provider=provider,
|
||||||
|
default_branch=None,
|
||||||
|
latest_version=None,
|
||||||
|
latest_version_source=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not owner or not repo:
|
||||||
|
return info
|
||||||
|
|
||||||
|
session = async_get_clientsession(hass)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if provider == "github":
|
||||||
|
headers = {"Accept": "application/vnd.github+json", "User-Agent": UA}
|
||||||
|
data, status = await _safe_json(session, f"https://api.github.com/repos/{owner}/{repo}", headers=headers)
|
||||||
|
|
||||||
|
if isinstance(data, dict):
|
||||||
|
info.description = data.get("description")
|
||||||
|
info.repo_name = _normalize_repo_name(data.get("name")) or repo
|
||||||
|
info.default_branch = data.get("default_branch") or "main"
|
||||||
|
if isinstance(data.get("owner"), dict) and data["owner"].get("login"):
|
||||||
|
info.owner = data["owner"]["login"]
|
||||||
|
else:
|
||||||
|
if status == 403:
|
||||||
|
_LOGGER.debug("GitHub API blocked/rate-limited for repo info %s/%s", owner, repo)
|
||||||
|
info.default_branch = "main"
|
||||||
|
|
||||||
|
if not info.description:
|
||||||
|
desc = await _github_description_html(hass, owner, repo)
|
||||||
|
if desc:
|
||||||
|
info.description = desc
|
||||||
|
|
||||||
|
ver, src = await _github_latest_version(hass, owner, repo)
|
||||||
|
info.latest_version = ver
|
||||||
|
info.latest_version_source = src
|
||||||
|
return info
|
||||||
|
|
||||||
|
if provider == "gitlab":
|
||||||
|
u = urlparse(repo_url.rstrip("/"))
|
||||||
|
base = f"{u.scheme}://{u.netloc}"
|
||||||
|
headers = {"User-Agent": UA}
|
||||||
|
project = quote_plus(f"{owner}/{repo}")
|
||||||
|
|
||||||
|
data, _ = await _safe_json(session, f"{base}/api/v4/projects/{project}", headers=headers)
|
||||||
|
if isinstance(data, dict):
|
||||||
|
info.description = data.get("description")
|
||||||
|
info.repo_name = _normalize_repo_name(data.get("path")) or repo
|
||||||
|
info.default_branch = data.get("default_branch") or "main"
|
||||||
|
ns = data.get("namespace")
|
||||||
|
if isinstance(ns, dict) and ns.get("path"):
|
||||||
|
info.owner = ns.get("path")
|
||||||
|
|
||||||
|
ver, src = await _gitlab_latest_version(hass, base, owner, repo)
|
||||||
|
info.latest_version = ver
|
||||||
|
info.latest_version_source = src
|
||||||
|
return info
|
||||||
|
|
||||||
|
if provider == "gitea":
|
||||||
|
u = urlparse(repo_url.rstrip("/"))
|
||||||
|
base = f"{u.scheme}://{u.netloc}"
|
||||||
|
|
||||||
|
data, _ = await _safe_json(session, f"{base}/api/v1/repos/{owner}/{repo}")
|
||||||
|
if isinstance(data, dict):
|
||||||
|
info.description = data.get("description")
|
||||||
|
info.repo_name = _normalize_repo_name(data.get("name")) or repo
|
||||||
|
info.default_branch = data.get("default_branch") or "main"
|
||||||
|
if isinstance(data.get("owner"), dict) and data["owner"].get("login"):
|
||||||
|
info.owner = data["owner"]["login"]
|
||||||
|
|
||||||
|
ver, src = await _gitea_latest_version(hass, base, owner, repo)
|
||||||
|
info.latest_version = ver
|
||||||
|
info.latest_version_source = src
|
||||||
|
return info
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
_LOGGER.debug("fetch_repo_info failed for %s: %s", repo_url, e)
|
||||||
|
|
||||||
|
return info
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_readme_markdown(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
repo_url: str,
|
||||||
|
*,
|
||||||
|
provider: str | None = None,
|
||||||
|
default_branch: str | None = None,
|
||||||
|
) -> str | None:
|
||||||
|
"""Fetch README Markdown for public repositories (GitHub/GitLab/Gitea).
|
||||||
|
|
||||||
|
Defensive behavior:
|
||||||
|
- tries multiple common README filenames
|
||||||
|
- tries multiple branches (default, main, master)
|
||||||
|
- uses public raw endpoints (no tokens required for public repositories)
|
||||||
|
"""
|
||||||
|
repo_url = (repo_url or "").strip()
|
||||||
|
if not repo_url:
|
||||||
|
return None
|
||||||
|
|
||||||
|
prov = (provider or "").strip().lower() if provider else ""
|
||||||
|
if not prov:
|
||||||
|
prov = detect_provider(repo_url)
|
||||||
|
|
||||||
|
branch_candidates: list[str] = []
|
||||||
|
if default_branch and str(default_branch).strip():
|
||||||
|
branch_candidates.append(str(default_branch).strip())
|
||||||
|
for b in ("main", "master"):
|
||||||
|
if b not in branch_candidates:
|
||||||
|
branch_candidates.append(b)
|
||||||
|
|
||||||
|
filenames = ["README.md", "readme.md", "README.MD", "README.rst", "README"]
|
||||||
|
|
||||||
|
session = async_get_clientsession(hass)
|
||||||
|
headers = {"User-Agent": UA}
|
||||||
|
|
||||||
|
def _normalize_gitlab_path(path: str) -> str | None:
|
||||||
|
p = (path or "").strip().strip("/")
|
||||||
|
if not p:
|
||||||
|
return None
|
||||||
|
parts = [x for x in p.split("/") if x]
|
||||||
|
if len(parts) < 2:
|
||||||
|
return None
|
||||||
|
if parts[-1].endswith(".git"):
|
||||||
|
parts[-1] = parts[-1][:-4]
|
||||||
|
return "/".join(parts)
|
||||||
|
|
||||||
|
candidates: list[str] = []
|
||||||
|
|
||||||
|
if prov == "github":
|
||||||
|
owner, repo = _split_owner_repo(repo_url)
|
||||||
|
if not owner or not repo:
|
||||||
|
return None
|
||||||
|
for branch in branch_candidates:
|
||||||
|
base = f"https://raw.githubusercontent.com/{owner}/{repo}/{branch}"
|
||||||
|
for fn in filenames:
|
||||||
|
candidates.append(f"{base}/{fn}")
|
||||||
|
|
||||||
|
elif prov == "gitea":
|
||||||
|
owner, repo = _split_owner_repo(repo_url)
|
||||||
|
if not owner or not repo:
|
||||||
|
return None
|
||||||
|
u = urlparse(repo_url.rstrip("/"))
|
||||||
|
root = f"{u.scheme}://{u.netloc}/{owner}/{repo}"
|
||||||
|
for branch in branch_candidates:
|
||||||
|
bases = [
|
||||||
|
f"{root}/raw/branch/{branch}",
|
||||||
|
f"{root}/raw/{branch}",
|
||||||
|
]
|
||||||
|
for b in bases:
|
||||||
|
for fn in filenames:
|
||||||
|
candidates.append(f"{b}/{fn}")
|
||||||
|
|
||||||
|
elif prov == "gitlab":
|
||||||
|
u = urlparse(repo_url.rstrip("/"))
|
||||||
|
path_repo = _normalize_gitlab_path(u.path)
|
||||||
|
if not path_repo:
|
||||||
|
return None
|
||||||
|
root = f"{u.scheme}://{u.netloc}/{path_repo}"
|
||||||
|
for branch in branch_candidates:
|
||||||
|
bases = [
|
||||||
|
f"{root}/-/raw/{branch}",
|
||||||
|
f"{root}/raw/{branch}",
|
||||||
|
]
|
||||||
|
for b in bases:
|
||||||
|
for fn in filenames:
|
||||||
|
candidates.append(f"{b}/{fn}")
|
||||||
|
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
for url in candidates:
|
||||||
|
try:
|
||||||
|
async with session.get(url, timeout=20, headers=headers) as resp:
|
||||||
|
if resp.status != 200:
|
||||||
|
continue
|
||||||
|
txt = await resp.text()
|
||||||
|
if txt and txt.strip():
|
||||||
|
return txt
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
|
||||||
|
return None
|
||||||
167
custom_components/bahmcloud_store/storage.py
Normal file
167
custom_components/bahmcloud_store/storage.py
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import time
|
||||||
|
import uuid
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.storage import Store
|
||||||
|
|
||||||
|
_STORAGE_VERSION = 1
|
||||||
|
_STORAGE_KEY = "bcs_store"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class CustomRepo:
|
||||||
|
id: str
|
||||||
|
url: str
|
||||||
|
name: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class InstalledRepo:
|
||||||
|
repo_id: str
|
||||||
|
url: str
|
||||||
|
domains: list[str]
|
||||||
|
installed_at: int
|
||||||
|
installed_version: str | None = None
|
||||||
|
ref: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class BCSStorage:
|
||||||
|
"""Persistent storage for Bahmcloud Store.
|
||||||
|
|
||||||
|
Keys:
|
||||||
|
- custom_repos: list of manually added repositories
|
||||||
|
- installed_repos: mapping repo_id -> installed metadata
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, hass: HomeAssistant) -> None:
|
||||||
|
self.hass = hass
|
||||||
|
self._store: Store[dict[str, Any]] = Store(hass, _STORAGE_VERSION, _STORAGE_KEY)
|
||||||
|
|
||||||
|
async def _load(self) -> dict[str, Any]:
|
||||||
|
data = await self._store.async_load() or {}
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
if "custom_repos" not in data or not isinstance(data.get("custom_repos"), list):
|
||||||
|
data["custom_repos"] = []
|
||||||
|
|
||||||
|
if "installed_repos" not in data or not isinstance(data.get("installed_repos"), dict):
|
||||||
|
data["installed_repos"] = {}
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
async def _save(self, data: dict[str, Any]) -> None:
|
||||||
|
await self._store.async_save(data)
|
||||||
|
|
||||||
|
async def list_custom_repos(self) -> list[CustomRepo]:
|
||||||
|
data = await self._load()
|
||||||
|
repos = data.get("custom_repos", [])
|
||||||
|
out: list[CustomRepo] = []
|
||||||
|
for r in repos:
|
||||||
|
if not isinstance(r, dict):
|
||||||
|
continue
|
||||||
|
rid = r.get("id")
|
||||||
|
url = r.get("url")
|
||||||
|
if not rid or not url:
|
||||||
|
continue
|
||||||
|
out.append(CustomRepo(id=str(rid), url=str(url), name=r.get("name")))
|
||||||
|
return out
|
||||||
|
|
||||||
|
async def add_custom_repo(self, url: str, name: str | None) -> CustomRepo:
|
||||||
|
data = await self._load()
|
||||||
|
repos = data.get("custom_repos", [])
|
||||||
|
|
||||||
|
# De-duplicate by URL
|
||||||
|
for r in repos:
|
||||||
|
if isinstance(r, dict) and str(r.get("url") or "").strip() == url.strip():
|
||||||
|
return CustomRepo(id=str(r["id"]), url=str(r["url"]), name=r.get("name"))
|
||||||
|
|
||||||
|
rid = f"custom:{uuid.uuid4().hex[:10]}"
|
||||||
|
entry = {"id": rid, "url": url.strip(), "name": name.strip() if name else None}
|
||||||
|
repos.append(entry)
|
||||||
|
data["custom_repos"] = repos
|
||||||
|
await self._save(data)
|
||||||
|
return CustomRepo(id=rid, url=entry["url"], name=entry["name"])
|
||||||
|
|
||||||
|
async def remove_custom_repo(self, repo_id: str) -> None:
|
||||||
|
data = await self._load()
|
||||||
|
repos = data.get("custom_repos", [])
|
||||||
|
data["custom_repos"] = [
|
||||||
|
r for r in repos if not (isinstance(r, dict) and r.get("id") == repo_id)
|
||||||
|
]
|
||||||
|
await self._save(data)
|
||||||
|
|
||||||
|
async def get_installed_repo(self, repo_id: str) -> InstalledRepo | None:
|
||||||
|
data = await self._load()
|
||||||
|
installed = data.get("installed_repos", {})
|
||||||
|
if not isinstance(installed, dict):
|
||||||
|
return None
|
||||||
|
entry = installed.get(repo_id)
|
||||||
|
if not isinstance(entry, dict):
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
domains = entry.get("domains") or []
|
||||||
|
if not isinstance(domains, list):
|
||||||
|
domains = []
|
||||||
|
domains = [str(d) for d in domains if str(d).strip()]
|
||||||
|
|
||||||
|
return InstalledRepo(
|
||||||
|
repo_id=str(entry.get("repo_id") or repo_id),
|
||||||
|
url=str(entry.get("url") or ""),
|
||||||
|
domains=domains,
|
||||||
|
installed_at=int(entry.get("installed_at") or 0),
|
||||||
|
installed_version=str(entry.get("installed_version")) if entry.get("installed_version") else None,
|
||||||
|
ref=str(entry.get("ref")) if entry.get("ref") else None,
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def list_installed_repos(self) -> list[InstalledRepo]:
|
||||||
|
data = await self._load()
|
||||||
|
installed = data.get("installed_repos", {})
|
||||||
|
out: list[InstalledRepo] = []
|
||||||
|
if not isinstance(installed, dict):
|
||||||
|
return out
|
||||||
|
for repo_id in list(installed.keys()):
|
||||||
|
item = await self.get_installed_repo(str(repo_id))
|
||||||
|
if item:
|
||||||
|
out.append(item)
|
||||||
|
return out
|
||||||
|
|
||||||
|
async def set_installed_repo(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
repo_id: str,
|
||||||
|
url: str,
|
||||||
|
domains: list[str],
|
||||||
|
installed_version: str | None,
|
||||||
|
ref: str | None,
|
||||||
|
) -> None:
|
||||||
|
data = await self._load()
|
||||||
|
installed = data.get("installed_repos", {})
|
||||||
|
if not isinstance(installed, dict):
|
||||||
|
installed = {}
|
||||||
|
data["installed_repos"] = installed
|
||||||
|
|
||||||
|
installed[str(repo_id)] = {
|
||||||
|
"repo_id": str(repo_id),
|
||||||
|
"url": str(url),
|
||||||
|
"domains": [str(d) for d in (domains or []) if str(d).strip()],
|
||||||
|
"installed_at": int(time.time()),
|
||||||
|
"installed_version": installed_version,
|
||||||
|
"ref": ref,
|
||||||
|
}
|
||||||
|
await self._save(data)
|
||||||
|
|
||||||
|
async def remove_installed_repo(self, repo_id: str) -> None:
|
||||||
|
data = await self._load()
|
||||||
|
installed = data.get("installed_repos", {})
|
||||||
|
if isinstance(installed, dict) and repo_id in installed:
|
||||||
|
installed.pop(repo_id, None)
|
||||||
|
data["installed_repos"] = installed
|
||||||
|
await self._save(data)
|
||||||
@@ -1,12 +1,12 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from homeassistant.components.update import UpdateEntity, UpdateEntityFeature
|
# NOTE:
|
||||||
|
# Update entities will be implemented once installation/provider resolution is in place.
|
||||||
|
# This stub prevents platform load errors and keeps the integration stable in 0.3.0.
|
||||||
|
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
|
|
||||||
from . import DOMAIN
|
|
||||||
from .store import BahmcloudStore, Package
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_platform(
|
async def async_setup_platform(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
@@ -14,81 +14,4 @@ async def async_setup_platform(
|
|||||||
async_add_entities: AddEntitiesCallback,
|
async_add_entities: AddEntitiesCallback,
|
||||||
discovery_info=None,
|
discovery_info=None,
|
||||||
):
|
):
|
||||||
store: BahmcloudStore = hass.data[DOMAIN]
|
return
|
||||||
entities: dict[str, BahmcloudPackageUpdate] = {}
|
|
||||||
|
|
||||||
def should_have_update_entity(pkg: Package) -> bool:
|
|
||||||
# Store selbst immer als Update
|
|
||||||
if pkg.type == "store":
|
|
||||||
return True
|
|
||||||
# Andere Pakete nur, wenn installiert
|
|
||||||
return store.is_installed(pkg.domain)
|
|
||||||
|
|
||||||
def rebuild_entities() -> None:
|
|
||||||
# Create entities for packages that qualify
|
|
||||||
for pkg in store.packages.values():
|
|
||||||
if not should_have_update_entity(pkg):
|
|
||||||
continue
|
|
||||||
|
|
||||||
uid = f"{DOMAIN}:{pkg.id}"
|
|
||||||
if uid not in entities:
|
|
||||||
ent = BahmcloudPackageUpdate(store, pkg.id)
|
|
||||||
entities[uid] = ent
|
|
||||||
async_add_entities([ent], update_before_add=True)
|
|
||||||
|
|
||||||
# Refresh states
|
|
||||||
for ent in entities.values():
|
|
||||||
ent.async_write_ha_state()
|
|
||||||
|
|
||||||
store.add_listener(rebuild_entities)
|
|
||||||
rebuild_entities()
|
|
||||||
|
|
||||||
|
|
||||||
class BahmcloudPackageUpdate(UpdateEntity):
|
|
||||||
_attr_supported_features = UpdateEntityFeature.INSTALL
|
|
||||||
|
|
||||||
def __init__(self, store: BahmcloudStore, package_id: str) -> None:
|
|
||||||
self.store = store
|
|
||||||
self.package_id = package_id
|
|
||||||
|
|
||||||
self._attr_unique_id = f"{DOMAIN}_{package_id}"
|
|
||||||
self._attr_name = f"{package_id} update"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def _pkg(self) -> Package | None:
|
|
||||||
return self.store.packages.get(self.package_id)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def title(self) -> str | None:
|
|
||||||
pkg = self._pkg
|
|
||||||
return pkg.name if pkg else None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def installed_version(self) -> str | None:
|
|
||||||
pkg = self._pkg
|
|
||||||
if not pkg:
|
|
||||||
return None
|
|
||||||
if not self.store.is_installed(pkg.domain):
|
|
||||||
return None
|
|
||||||
return self.store.installed_version(pkg.domain)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def latest_version(self) -> str | None:
|
|
||||||
pkg = self._pkg
|
|
||||||
return pkg.latest_version if pkg else None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def release_summary(self) -> str | None:
|
|
||||||
pkg = self._pkg
|
|
||||||
if not pkg:
|
|
||||||
return None
|
|
||||||
if pkg.release_url:
|
|
||||||
return f"Release: {pkg.release_url}"
|
|
||||||
return f"Repo: {pkg.repo}"
|
|
||||||
|
|
||||||
async def async_install(self, version: str | None, backup: bool, **kwargs) -> None:
|
|
||||||
pkg = self._pkg
|
|
||||||
if not pkg:
|
|
||||||
return
|
|
||||||
await self.store.install_from_zip(pkg)
|
|
||||||
self.async_write_ha_state()
|
|
||||||
|
|||||||
351
custom_components/bahmcloud_store/views.py
Normal file
351
custom_components/bahmcloud_store/views.py
Normal file
@@ -0,0 +1,351 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import logging
|
||||||
|
from dataclasses import asdict
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, TYPE_CHECKING
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
|
from homeassistant.components.http import HomeAssistantView
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from .core import BCSCore # typing only
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _render_markdown_server_side(md: str) -> str | None:
|
||||||
|
text = (md or "").strip()
|
||||||
|
if not text:
|
||||||
|
return None
|
||||||
|
|
||||||
|
html: str | None = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
import markdown as mdlib # type: ignore
|
||||||
|
|
||||||
|
html = mdlib.markdown(
|
||||||
|
text,
|
||||||
|
extensions=["fenced_code", "tables", "sane_lists", "toc"],
|
||||||
|
output_format="html5",
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
_LOGGER.debug("python-markdown render failed: %s", e)
|
||||||
|
html = None
|
||||||
|
|
||||||
|
if not html:
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
import bleach # type: ignore
|
||||||
|
|
||||||
|
allowed_tags = [
|
||||||
|
"p",
|
||||||
|
"br",
|
||||||
|
"hr",
|
||||||
|
"div",
|
||||||
|
"span",
|
||||||
|
"blockquote",
|
||||||
|
"pre",
|
||||||
|
"code",
|
||||||
|
"h1",
|
||||||
|
"h2",
|
||||||
|
"h3",
|
||||||
|
"h4",
|
||||||
|
"h5",
|
||||||
|
"h6",
|
||||||
|
"ul",
|
||||||
|
"ol",
|
||||||
|
"li",
|
||||||
|
"strong",
|
||||||
|
"em",
|
||||||
|
"b",
|
||||||
|
"i",
|
||||||
|
"u",
|
||||||
|
"s",
|
||||||
|
"a",
|
||||||
|
"img",
|
||||||
|
"table",
|
||||||
|
"thead",
|
||||||
|
"tbody",
|
||||||
|
"tr",
|
||||||
|
"th",
|
||||||
|
"td",
|
||||||
|
]
|
||||||
|
|
||||||
|
allowed_attrs = {
|
||||||
|
"a": ["href", "title", "target", "rel"],
|
||||||
|
"img": ["src", "alt", "title"],
|
||||||
|
"th": ["align"],
|
||||||
|
"td": ["align"],
|
||||||
|
"*": ["class"],
|
||||||
|
}
|
||||||
|
|
||||||
|
sanitized = bleach.clean(
|
||||||
|
html,
|
||||||
|
tags=allowed_tags,
|
||||||
|
attributes=allowed_attrs,
|
||||||
|
protocols=["http", "https", "mailto"],
|
||||||
|
strip=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
sanitized = sanitized.replace(
|
||||||
|
'<a href="',
|
||||||
|
'<a rel="noreferrer noopener" target="_blank" href="',
|
||||||
|
)
|
||||||
|
return sanitized
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
_LOGGER.debug("bleach sanitize failed/unavailable: %s", e)
|
||||||
|
|
||||||
|
return html
|
||||||
|
|
||||||
|
|
||||||
|
_TEXT_KEYS = ("readme", "markdown", "text", "content", "data", "body")
|
||||||
|
|
||||||
|
|
||||||
|
def _maybe_decode_base64(content: str, encoding: Any) -> str | None:
|
||||||
|
if not isinstance(content, str):
|
||||||
|
return None
|
||||||
|
enc = ""
|
||||||
|
if isinstance(encoding, str):
|
||||||
|
enc = encoding.strip().lower()
|
||||||
|
if "base64" not in enc:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
raw = base64.b64decode(content.encode("utf-8"), validate=False)
|
||||||
|
return raw.decode("utf-8", errors="replace")
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_text_recursive(obj: Any, depth: int = 0) -> str | None:
|
||||||
|
if obj is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if isinstance(obj, bytes):
|
||||||
|
try:
|
||||||
|
return obj.decode("utf-8", errors="replace")
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if isinstance(obj, str):
|
||||||
|
return obj
|
||||||
|
|
||||||
|
if depth > 8:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if isinstance(obj, dict):
|
||||||
|
content = obj.get("content")
|
||||||
|
encoding = obj.get("encoding")
|
||||||
|
|
||||||
|
decoded = _maybe_decode_base64(content, encoding)
|
||||||
|
if decoded:
|
||||||
|
return decoded
|
||||||
|
|
||||||
|
if isinstance(content, str) and (not isinstance(encoding, str) or not encoding.strip()):
|
||||||
|
return content
|
||||||
|
|
||||||
|
for k in _TEXT_KEYS:
|
||||||
|
v = obj.get(k)
|
||||||
|
if isinstance(v, str):
|
||||||
|
return v
|
||||||
|
if isinstance(v, bytes):
|
||||||
|
try:
|
||||||
|
return v.decode("utf-8", errors="replace")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
for v in obj.values():
|
||||||
|
out = _extract_text_recursive(v, depth + 1)
|
||||||
|
if out:
|
||||||
|
return out
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
if isinstance(obj, list):
|
||||||
|
for item in obj:
|
||||||
|
out = _extract_text_recursive(item, depth + 1)
|
||||||
|
if out:
|
||||||
|
return out
|
||||||
|
return None
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class StaticAssetsView(HomeAssistantView):
|
||||||
|
url = "/api/bahmcloud_store_static/{path:.*}"
|
||||||
|
name = "api:bahmcloud_store_static"
|
||||||
|
requires_auth = False
|
||||||
|
|
||||||
|
async def get(self, request: web.Request, path: str) -> web.StreamResponse:
|
||||||
|
base = Path(__file__).resolve().parent / "panel"
|
||||||
|
base_resolved = base.resolve()
|
||||||
|
|
||||||
|
req_path = (path or "").lstrip("/")
|
||||||
|
if req_path == "":
|
||||||
|
req_path = "index.html"
|
||||||
|
|
||||||
|
target = (base / req_path).resolve()
|
||||||
|
|
||||||
|
if not str(target).startswith(str(base_resolved)):
|
||||||
|
return web.Response(status=404)
|
||||||
|
|
||||||
|
if target.is_dir():
|
||||||
|
target = (target / "index.html").resolve()
|
||||||
|
|
||||||
|
if not target.exists():
|
||||||
|
_LOGGER.error("BCS static asset not found: %s", target)
|
||||||
|
return web.Response(status=404)
|
||||||
|
|
||||||
|
resp = web.FileResponse(path=target)
|
||||||
|
resp.headers["Cache-Control"] = "no-store, no-cache, must-revalidate, max-age=0"
|
||||||
|
resp.headers["Pragma"] = "no-cache"
|
||||||
|
return resp
|
||||||
|
|
||||||
|
|
||||||
|
class BCSApiView(HomeAssistantView):
|
||||||
|
url = "/api/bcs"
|
||||||
|
name = "api:bcs"
|
||||||
|
requires_auth = True
|
||||||
|
|
||||||
|
def __init__(self, core: Any) -> None:
|
||||||
|
self.core: BCSCore = core
|
||||||
|
|
||||||
|
async def get(self, request: web.Request) -> web.Response:
|
||||||
|
return web.json_response(
|
||||||
|
{"ok": True, "version": self.core.version, "repos": self.core.list_repos_public()}
|
||||||
|
)
|
||||||
|
|
||||||
|
async def post(self, request: web.Request) -> web.Response:
|
||||||
|
action = request.query.get("action")
|
||||||
|
if action == "refresh":
|
||||||
|
_LOGGER.info("BCS manual refresh triggered via API")
|
||||||
|
try:
|
||||||
|
await self.core.full_refresh(source="manual")
|
||||||
|
return web.json_response({"ok": True})
|
||||||
|
except Exception as e:
|
||||||
|
_LOGGER.error("BCS manual refresh failed: %s", e)
|
||||||
|
return web.json_response({"ok": False, "message": "Refresh failed"}, status=500)
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = await request.json()
|
||||||
|
except Exception:
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
op = data.get("op")
|
||||||
|
|
||||||
|
if op == "add_custom_repo":
|
||||||
|
url = str(data.get("url") or "").strip()
|
||||||
|
name = data.get("name")
|
||||||
|
name = str(name).strip() if name else None
|
||||||
|
if not url:
|
||||||
|
return web.json_response({"ok": False, "message": "Missing url"}, status=400)
|
||||||
|
repo = await self.core.add_custom_repo(url=url, name=name)
|
||||||
|
return web.json_response({"ok": True, "repo": asdict(repo)})
|
||||||
|
|
||||||
|
return web.json_response({"ok": False, "message": "Unknown operation"}, status=400)
|
||||||
|
|
||||||
|
|
||||||
|
class BCSCustomRepoView(HomeAssistantView):
|
||||||
|
url = "/api/bcs/custom_repo"
|
||||||
|
name = "api:bcs_custom_repo"
|
||||||
|
requires_auth = True
|
||||||
|
|
||||||
|
def __init__(self, core: Any) -> None:
|
||||||
|
self.core: BCSCore = core
|
||||||
|
|
||||||
|
async def delete(self, request: web.Request) -> web.Response:
|
||||||
|
repo_id = request.query.get("id")
|
||||||
|
if not repo_id:
|
||||||
|
return web.json_response({"ok": False, "message": "Missing id"}, status=400)
|
||||||
|
await self.core.remove_custom_repo(repo_id)
|
||||||
|
return web.json_response({"ok": True})
|
||||||
|
|
||||||
|
|
||||||
|
class BCSReadmeView(HomeAssistantView):
|
||||||
|
url = "/api/bcs/readme"
|
||||||
|
name = "api:bcs_readme"
|
||||||
|
requires_auth = True
|
||||||
|
|
||||||
|
def __init__(self, core: Any) -> None:
|
||||||
|
self.core: BCSCore = core
|
||||||
|
|
||||||
|
async def get(self, request: web.Request) -> web.Response:
|
||||||
|
repo_id = request.query.get("repo_id")
|
||||||
|
if not repo_id:
|
||||||
|
return web.json_response({"ok": False, "message": "Missing repo_id"}, status=400)
|
||||||
|
|
||||||
|
maybe_md = await self.core.fetch_readme_markdown(repo_id)
|
||||||
|
|
||||||
|
md = _extract_text_recursive(maybe_md)
|
||||||
|
if not md or not md.strip():
|
||||||
|
t = type(maybe_md).__name__
|
||||||
|
return web.json_response(
|
||||||
|
{"ok": False, "message": f"README not found or unsupported format (got {t})."},
|
||||||
|
status=404,
|
||||||
|
)
|
||||||
|
|
||||||
|
md_str = str(md)
|
||||||
|
html = _render_markdown_server_side(md_str)
|
||||||
|
return web.json_response({"ok": True, "readme": md_str, "html": html})
|
||||||
|
|
||||||
|
|
||||||
|
class BCSInstallView(HomeAssistantView):
|
||||||
|
url = "/api/bcs/install"
|
||||||
|
name = "api:bcs_install"
|
||||||
|
requires_auth = True
|
||||||
|
|
||||||
|
def __init__(self, core: Any) -> None:
|
||||||
|
self.core: BCSCore = core
|
||||||
|
|
||||||
|
async def post(self, request: web.Request) -> web.Response:
|
||||||
|
repo_id = request.query.get("repo_id")
|
||||||
|
if not repo_id:
|
||||||
|
return web.json_response({"ok": False, "message": "Missing repo_id"}, status=400)
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = await self.core.install_repo(repo_id)
|
||||||
|
return web.json_response(result, status=200)
|
||||||
|
except Exception as e:
|
||||||
|
_LOGGER.exception("BCS install failed: %s", e)
|
||||||
|
return web.json_response({"ok": False, "message": str(e) or "Install failed"}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
class BCSUpdateView(HomeAssistantView):
|
||||||
|
url = "/api/bcs/update"
|
||||||
|
name = "api:bcs_update"
|
||||||
|
requires_auth = True
|
||||||
|
|
||||||
|
def __init__(self, core: Any) -> None:
|
||||||
|
self.core: BCSCore = core
|
||||||
|
|
||||||
|
async def post(self, request: web.Request) -> web.Response:
|
||||||
|
repo_id = request.query.get("repo_id")
|
||||||
|
if not repo_id:
|
||||||
|
return web.json_response({"ok": False, "message": "Missing repo_id"}, status=400)
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = await self.core.update_repo(repo_id)
|
||||||
|
return web.json_response(result, status=200)
|
||||||
|
except Exception as e:
|
||||||
|
_LOGGER.exception("BCS update failed: %s", e)
|
||||||
|
return web.json_response({"ok": False, "message": str(e) or "Update failed"}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
class BCSRestartView(HomeAssistantView):
|
||||||
|
url = "/api/bcs/restart"
|
||||||
|
name = "api:bcs_restart"
|
||||||
|
requires_auth = True
|
||||||
|
|
||||||
|
def __init__(self, core: Any) -> None:
|
||||||
|
self.core: BCSCore = core
|
||||||
|
|
||||||
|
async def post(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self.core.request_restart()
|
||||||
|
return web.json_response({"ok": True})
|
||||||
|
except Exception as e:
|
||||||
|
_LOGGER.exception("BCS restart failed: %s", e)
|
||||||
|
return web.json_response({"ok": False, "message": str(e) or "Restart failed"}, status=500)
|
||||||
Reference in New Issue
Block a user