Compare commits
291 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 33502a6d99 | |||
| 1306ee9518 | |||
| e37546cab1 | |||
| 88c3233fd1 | |||
| 02f3047080 | |||
| d4012589e6 | |||
| 8ac67fa60c | |||
| 981490c152 | |||
| 99b2a0f0c5 | |||
| 7ead494765 | |||
| 342b6f6c57 | |||
| 66ca63b2be | |||
| e8325f722f | |||
| 7c1a91937a | |||
| 7ac3289bb7 | |||
| 19bdbd1b9a | |||
| 24363cd2ac | |||
| e19ca5bff1 | |||
| 05897d4370 | |||
| 7a3a28d87f | |||
| 240cded8a9 | |||
| 31e241f052 | |||
| de579682a0 | |||
| 9acbd5046c | |||
| 8d63c88e69 | |||
| cffb0af60e | |||
| 857b7a127a | |||
| 66b24ece48 | |||
| 0cc3b466e0 | |||
| f1e03b31a1 | |||
| 4e12d596d6 | |||
| fa97f89afb | |||
| 0718bee185 | |||
| 1a53107450 | |||
| ab82cc6fd3 | |||
| 8e51f144e1 | |||
| f292e22301 | |||
| 2eb194c001 | |||
| f4e367987a | |||
| 08aa4b5e15 | |||
| b1676482f0 | |||
| e46cd6e488 | |||
| edd2fdd3fb | |||
| a4a0c1462b | |||
| 196e63c08e | |||
| 518ac1d59d | |||
| ad699dc69a | |||
| a8e247d288 | |||
| 318d517575 | |||
| db137be5b1 | |||
| 83cec0f75a | |||
| cda9914d50 | |||
| 3acefbfbe8 | |||
| 4d10c5c91e | |||
| 810ff6fe85 | |||
| b2d3d940f2 | |||
| 8b1d828c59 | |||
| 824a9e5cad | |||
| 1cbc204e88 | |||
| 561c323e67 | |||
| 5c604b40c6 | |||
| cc8db6a034 | |||
| e0ad133221 | |||
| 0e27a03aaf | |||
| e2dfa20789 | |||
| 8e8b58d2d2 | |||
| 76ecaabd98 | |||
| 3f14dc3bd9 | |||
| 50a78714cc | |||
| 3bf01c91f1 | |||
| 7aa14284dd | |||
| 24933e980d | |||
| e10624df6b | |||
| 1a1ebd3821 | |||
| d3d61067db | |||
| 23b605becf | |||
| c07f8615e4 | |||
| 9b209a15bf | |||
| 30258bd2c0 | |||
| 2c8ca490ea | |||
| 9e8a8e81b9 | |||
| f5b2534fdb | |||
| 8b3916c3fa | |||
| 13e71046f8 | |||
| 58e3674325 | |||
| 828d84caa3 | |||
| c18e93406a | |||
| 9af18ba090 | |||
| fff50a1580 | |||
| f8e9967c3a | |||
| 7bc493eb45 | |||
| b97b970a45 | |||
| 593e0c367d | |||
| 8e0817a64b | |||
| dfc7e44565 | |||
| c9c4f99fbf | |||
| 37cc11c9ee | |||
| 9c773c07e8 | |||
| c04612e159 | |||
| 5796012189 | |||
| 01576153d8 | |||
| 30484a08c1 | |||
| faf122aa1c | |||
| 1e86df49e9 | |||
| df631eec9e | |||
| 07240d1268 | |||
| 50587ffbbd | |||
| d6347e7e59 | |||
| 870e77ec13 | |||
| 38fb9fb073 | |||
| c20bd4dd07 | |||
| 296c816633 | |||
| 18a2b5529c | |||
| 246fab7e1e | |||
| ce5802721f | |||
| 2f46966fe2 | |||
| 132f9e27c1 | |||
| 618511be73 | |||
| 6488b434d8 | |||
| bffc594da5 | |||
| d78217100c | |||
| 09e1ef1af5 | |||
| 9ad558c9ab | |||
| 19df0eea22 | |||
| 745979b9a6 | |||
| f861b2490a | |||
| 32946c1a98 | |||
| a9a681d801 | |||
| 2ae6ac43a5 | |||
| 504c126c2c | |||
| 85cc97b557 | |||
| 4ca80a9c88 | |||
| ac5bc8a6f4 | |||
| c4361cc8bd | |||
| 1794d579d2 | |||
| bcfbf7151c | |||
| 38730cdd31 | |||
| 5d5d78d727 | |||
| 67297bfc9c | |||
| 82fda5dfc4 | |||
| 907f14b73c | |||
| 3eefd447ac | |||
| 72ce95525c | |||
| 081f277b92 | |||
| 28b86e19e1 | |||
| e863677428 | |||
| 3f6da60c0d | |||
| 8ac0ef103c | |||
| 6a0132a25c | |||
| 60c46aa121 | |||
| 1caad401f6 | |||
| 5684c3d5f1 | |||
| f3863ee227 | |||
| e867e82a2d | |||
| 2770e56219 | |||
| 25b9c79114 | |||
| 2c50765d66 | |||
| 1837ed4a13 | |||
| c4f0f94a6f | |||
| 1dbffcc27c | |||
| a2d123abbf | |||
| 0e6088070e | |||
| dc32010bf9 | |||
| 3ed6a1a18c | |||
| 8ff5ab2e31 | |||
| 2a0dc9d66c | |||
| fbfc2e3a6e | |||
| 19a5c0fecb | |||
| c0ec7b1797 | |||
| 25f966853a | |||
| 3edeab514b | |||
| d713bf779f | |||
| 2b78feeadf | |||
| a940c68e9e | |||
| 6c3cdcde61 | |||
| 1fc274bf7c | |||
| 4ff94bc185 | |||
| b95b3f5626 | |||
| 30d47b775b | |||
| bedf6b6bf8 | |||
| e2f8b4625a | |||
| bb340108e2 | |||
| a617ca6709 | |||
| 7789430d4a | |||
| 4e8116265d | |||
| 06796cf57b | |||
| dbcac9df86 | |||
| 6ca193580d | |||
| 3773b07650 | |||
| 24dcc92c00 | |||
| 596491f885 | |||
| 97c9f01a0a | |||
| c39a948c59 | |||
| 4fd0a6ec48 | |||
| b84ab944b3 | |||
| ce4bd4f4f1 | |||
| 2dce858a51 | |||
| 2c8da4a049 | |||
| 692f0b47da | |||
| 106872063a | |||
| 597d1556ff | |||
| c4d9f7b393 | |||
| f15d932d54 | |||
| ec60211339 | |||
| 1305656d10 | |||
| 1c8a83effc | |||
| 066d1ff2a4 | |||
| d1a8526d2d | |||
| f60b3a8730 | |||
| b5e98898e0 | |||
| 236099e562 | |||
| 08a59ec56e | |||
| 5cf8e6d40f | |||
| 8b21d070f3 | |||
| 7219f82e7f | |||
| c91a4ecba2 | |||
| 2b0bfb4caa | |||
| e10b23a44a | |||
| 6d273cc182 | |||
| 35839d9c65 | |||
| 6f0f588b03 | |||
| 5b56b59ae8 | |||
| 8ab487f00a | |||
| 0ea9319ba4 | |||
| 46508f1c34 | |||
| 12f4aec1f7 | |||
| 225442f549 | |||
| 714ced5d2c | |||
| 64835b719f | |||
| acb01b9768 | |||
| bf29faab04 | |||
| adb117672c | |||
| 8cee9e5e4d | |||
| 77b4522e3c | |||
| bae4d0b84f | |||
| c022b90fb5 | |||
| 97c2672119 | |||
| 47e1524aef | |||
| 0bc824fe4a | |||
| c500234e1d | |||
| d27782ea9c | |||
| 6088d0a935 | |||
| fbdc8aed0f | |||
| 3723c403c7 | |||
| 95a7a0689b | |||
| 9d04aeaa58 | |||
| f65819ffab | |||
| ad9c4ea421 | |||
| e0cecfcc68 | |||
| fbac0ac57f | |||
| daa51cd59c | |||
| 3a88d2c402 | |||
| 9ff89d18f3 | |||
| 93ace71a12 | |||
| 30a4daa884 | |||
| b40e509362 | |||
| d04bf2a3f1 | |||
| c490c7856c | |||
| bd274faf88 | |||
| 638ac9a7ec | |||
| 3eb6d24439 | |||
| 013b0baa83 | |||
| b4b6b2b987 | |||
| d226edaac8 | |||
| 0e081e8cce | |||
| bd50c487b1 | |||
| 58e62b864e | |||
| 41fc0da76c | |||
| ce52920c6d | |||
| 5c47479f45 | |||
| 80eefabbc2 | |||
| 0339ad4ecb | |||
| 3f07c09c36 | |||
| dd634fca32 | |||
| 2aebc45707 | |||
| 4f3a7fb436 | |||
| 8e01de3440 | |||
| b7ed65b49d | |||
| 15349d93a2 | |||
| 124693e545 | |||
| 3aee3886b1 | |||
| 199bda2e0f | |||
| 8d1ed31431 | |||
| c36321db43 | |||
| 806524ad33 | |||
| 2da0cfe07d | |||
| 603277d6f5 | |||
| a1bdf9dd40 | |||
| 2746c5295a | |||
| 7bac73a37f | |||
| 96cdf234db |
325
CHANGELOG.md
Normal file
325
CHANGELOG.md
Normal file
@@ -0,0 +1,325 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to this repository will be documented in this file.
|
||||
|
||||
Sections:
|
||||
- Added
|
||||
- Changed
|
||||
- Fixed
|
||||
- Removed
|
||||
- Security
|
||||
|
||||
---
|
||||
|
||||
## [0.6.7] - 2026-01-19
|
||||
|
||||
### Fixed
|
||||
- Repository metadata loaded on demand is now persisted and restored after Home Assistant restart.
|
||||
- Background enrichment reliably updates latest versions and descriptions for HACS repositories.
|
||||
- Cached repository data is correctly restored on startup for non-installed repositories.
|
||||
|
||||
### Changed
|
||||
- Repository details are only considered fully enriched once latest version information is available.
|
||||
- Improved reliability of background cache synchronization without impacting startup performance.
|
||||
|
||||
## [0.6.6] - 2026-01-18
|
||||
|
||||
### Added
|
||||
- Source filter to limit repositories by origin: BCS Official, HACS, or Custom.
|
||||
- Visual source badges for repositories (BCS Official, HACS, Custom).
|
||||
- Restored HACS enable/disable toggle in the Store UI.
|
||||
|
||||
### Changed
|
||||
- HACS repositories now display human-readable names and descriptions based on official HACS metadata.
|
||||
- Improved Store usability on mobile devices by fixing back navigation from repository detail view.
|
||||
|
||||
### Fixed
|
||||
- Fixed missing HACS toggle after UI updates.
|
||||
- Fixed mobile browser back button exiting the Store instead of returning to the repository list.
|
||||
|
||||
## [0.6.5] - 2026-01-18
|
||||
|
||||
### Added
|
||||
- Separate handling of HACS official repositories with an enable/disable toggle in the Store UI.
|
||||
- HACS repositories are now loaded independently from the main store index.
|
||||
|
||||
### Changed
|
||||
- Store index can remain minimal and curated; HACS repositories are no longer required in store.yaml.
|
||||
- Improved Store performance and clarity by clearly separating repository sources.
|
||||
|
||||
### Fixed
|
||||
- Browser cache issues resolved by proper panel cache-busting for UI updates.
|
||||
|
||||
### Internal
|
||||
- No changes to install, update, backup, or restore logic.
|
||||
- Fully backward compatible with existing installations and configurations.
|
||||
|
||||
## [0.6.4] - 2026-01-18
|
||||
|
||||
### Fixed
|
||||
- Fixed long Home Assistant startup times caused by background repository enrichment running too early.
|
||||
|
||||
### Changed
|
||||
- Background repository enrichment is now started only after Home Assistant has fully started.
|
||||
- Repository cache updates now run fully asynchronous without blocking Home Assistant startup.
|
||||
|
||||
### Internal
|
||||
- Improved alignment with Home Assistant startup lifecycle.
|
||||
- No functional changes to store behavior or UI.
|
||||
|
||||
## [0.6.3] - 2026-01-18
|
||||
|
||||
### Changed
|
||||
- Improved Store performance for large indexes by avoiding full metadata enrichment during list refresh.
|
||||
- Repository details are loaded on demand, reducing initial load time and network requests.
|
||||
- Index refresh is skipped when the index content has not changed.
|
||||
|
||||
## [0.6.2] - 2026-01-18
|
||||
|
||||
### Added
|
||||
- Selectable install/update version per repository (install older releases/tags to downgrade when needed).
|
||||
- New API endpoint to list available versions for a repository: `GET /api/bcs/versions?repo_id=...`.
|
||||
|
||||
## [0.6.1] - 2026-01-18
|
||||
|
||||
### Fixed
|
||||
- Restored integrations now correctly report the restored version instead of the latest installed version.
|
||||
- Update availability is correctly recalculated after restoring a backup, allowing updates to be applied again.
|
||||
- Improved restore compatibility with backups created before version metadata was introduced.
|
||||
|
||||
## [0.6.0] - 2026-01-18
|
||||
|
||||
### Added
|
||||
- Restore feature with selection of the last available backups (up to 5 per domain).
|
||||
- New API endpoints to list and restore backups:
|
||||
- `GET /api/bcs/backups?repo_id=...`
|
||||
- `POST /api/bcs/restore?repo_id=...&backup_id=...`
|
||||
|
||||
### Safety
|
||||
- Restoring a backup triggers a “restart required” prompt to apply the recovered integration state.
|
||||
|
||||
### Notes
|
||||
- This is a major release milestone consolidating install/update/uninstall, backup/rollback, and restore workflows.
|
||||
|
||||
## [0.5.11] - 2026-01-18
|
||||
|
||||
### Added
|
||||
- Automatic backup of existing custom components before install or update.
|
||||
- Backup retention with a configurable limit per domain.
|
||||
|
||||
### Safety
|
||||
- Automatic rollback is triggered if an install or update fails after a backup was created.
|
||||
|
||||
## [0.5.10] - 2026-01-17
|
||||
|
||||
### Added
|
||||
- Store view controls: Filter and Sort dropdowns alongside the existing Category selector.
|
||||
|
||||
### Fixed
|
||||
- Search input focus is preserved while typing (cursor no longer jumps out after re-render).
|
||||
|
||||
## [0.5.9] - 2026-01-17
|
||||
|
||||
### Changed
|
||||
- README is now collapsible with a preview by default (Show more / Show less).
|
||||
- Improved mobile readability by keeping long README content contained without affecting the page layout.
|
||||
|
||||
## [0.5.8] - 2026-01-17
|
||||
|
||||
### Changed
|
||||
- Mobile UI layout stabilized to prevent horizontal shifting.
|
||||
- README rendering no longer expands the page width on mobile devices.
|
||||
- Tables and code blocks inside README now scroll within their container.
|
||||
- Floating action buttons removed to avoid UI overlap on small screens.
|
||||
- Header icon buttons improved for better visibility in light and dark mode.
|
||||
|
||||
## [0.5.7] - 2026-01-17
|
||||
|
||||
### Changed
|
||||
- Mobile UI improvements: removed floating action buttons to prevent overlay issues.
|
||||
- Improved responsive layout to avoid horizontal overflow (badges, URLs, descriptions).
|
||||
- README rendering on mobile is more stable (better wrapping and image scaling).
|
||||
- Header icon buttons are more readable in both light and dark mode.
|
||||
|
||||
## [0.5.6] - 2026-01-17
|
||||
|
||||
### Added
|
||||
- Repository uninstall support directly from the Store UI.
|
||||
- New backend API endpoint: `POST /api/bcs/uninstall`.
|
||||
- Automatic **reconcile**: repositories are marked as not installed when their `custom_components` directories are removed manually.
|
||||
|
||||
### Changed
|
||||
- Installation & Updates section extended with an Uninstall button.
|
||||
- Store state now remains consistent even after manual file system changes.
|
||||
|
||||
### Fixed
|
||||
- Repositories remained marked as installed after manual deletion of their domains.
|
||||
- UI cache issues caused by outdated static assets.
|
||||
|
||||
## [0.5.5] - 2026-01-16
|
||||
|
||||
### Fixed
|
||||
- Update entities now refresh their displayed name after store refreshes, so repository names replace fallback IDs (e.g. `index:1`) reliably.
|
||||
|
||||
## [0.5.4] - 2026-01-16
|
||||
|
||||
### Added
|
||||
- Native **Repair fix flow** for restart-required situations.
|
||||
- “Restart required” issues are now **fixable** and provide a confirmation dialog with a real restart action.
|
||||
|
||||
### Changed
|
||||
- Restart-required issues are automatically cleared after Home Assistant restarts.
|
||||
- Update entities now fully align with official Home Assistant behavior (Updates screen + Repairs integration).
|
||||
|
||||
### Fixed
|
||||
- Fixed integration startup issues caused by incorrect file placement.
|
||||
- Resolved circular import and missing setup errors during Home Assistant startup.
|
||||
- Ensured YAML-based setup remains fully supported.
|
||||
|
||||
## [0.5.3] - 2026-01-16
|
||||
|
||||
### Added
|
||||
- Native Home Assistant Update entities for installed repositories (shown under **Settings → System → Updates**).
|
||||
- Human-friendly update names based on repository name (instead of internal repo IDs like `index:1`).
|
||||
|
||||
### Changed
|
||||
- Update UI now behaves like official Home Assistant integrations (update action is triggered via the HA Updates screen).
|
||||
|
||||
## [0.5.2] - 2026-01-16
|
||||
|
||||
### Added
|
||||
- Install and update backend endpoints (`POST /api/bcs/install`, `POST /api/bcs/update`) to install repositories into `/config/custom_components`.
|
||||
- Installed version tracking based on the actually installed ref (tag/release/branch), stored persistently to support repositories with outdated/`0.0.0` manifest versions.
|
||||
- API fields `installed_version` (installed ref) and `installed_manifest_version` (informational) to improve transparency in the UI.
|
||||
|
||||
### Changed
|
||||
- Update availability is now evaluated using the stored installed ref (instead of `manifest.json` version), preventing false-positive updates when repositories do not maintain manifest versions.
|
||||
|
||||
### Fixed
|
||||
- Repositories with `manifest.json` version `0.0.0` (or stale versions) no longer appear as constantly requiring updates after installing the latest release/tag.
|
||||
|
||||
## [0.5.0] - 2026-01-15
|
||||
|
||||
### Added
|
||||
- Manual refresh button that triggers a full backend refresh (store index + provider data).
|
||||
- Unified refresh pipeline: startup, timer and UI now use the same refresh logic.
|
||||
- Cache-busting for store index requests to always fetch the latest store.yaml.
|
||||
|
||||
### Improved
|
||||
- Logging for store index loading and parsing.
|
||||
- Refresh behavior now deterministic and verifiable via logs.
|
||||
|
||||
### Fixed
|
||||
- Refresh button previously only reloaded cached data.
|
||||
- Store index was not always reloaded immediately on user action.
|
||||
|
||||
## [0.4.1] - 2026-01-15
|
||||
### Fixed
|
||||
- Fixed GitLab README loading by using robust raw file endpoints.
|
||||
- Added support for nested GitLab groups when resolving README paths.
|
||||
- Added fallback handling for multiple README filenames (`README.md`, `README`, `README.rst`, etc.).
|
||||
- Added branch fallback logic for README loading (`default`, `main`, `master`).
|
||||
- Improved error resilience so README loading failures never break the store core.
|
||||
- No behavior change for GitHub and Gitea providers.
|
||||
|
||||
## [0.4.0] - 2026-01-15
|
||||
|
||||
### Added
|
||||
- Initial public release of the Bahmcloud Store integration.
|
||||
- Sidebar panel with repository browser UI.
|
||||
- Support for loading repositories from a central `store.yaml` index.
|
||||
- Support for custom repositories added by the user.
|
||||
- Provider abstraction for GitHub, GitLab and Gitea:
|
||||
- Fetch repository information (name, description, default branch).
|
||||
- Resolve latest version from:
|
||||
- Releases
|
||||
- Tags
|
||||
- Fallback mechanisms.
|
||||
- Repository metadata support via:
|
||||
- `bcs.yaml`
|
||||
- `hacs.yaml`
|
||||
- `hacs.json`
|
||||
- README loading and rendering pipeline:
|
||||
- Fetch raw README files.
|
||||
- Server-side Markdown rendering.
|
||||
- Sanitized HTML output for the panel UI.
|
||||
- Auto refresh mechanism for store index and repository metadata.
|
||||
- API endpoints:
|
||||
- List repositories
|
||||
- Add custom repository
|
||||
- Remove repository
|
||||
Persisted via Home Assistant storage (`.storage/bcs_store`).
|
||||
- Public static asset endpoint for panel JS (`/api/bahmcloud_store_static/...`) without auth (required for HA custom panels).
|
||||
- Initial API namespace:
|
||||
- `GET /api/bcs` list merged repositories (index + custom)
|
||||
- `POST /api/bcs` add custom repository
|
||||
- `DELETE /api/bcs/custom_repo` remove custom repository
|
||||
|
||||
### Changed
|
||||
- Repository cards are now clickable to open the detail view.
|
||||
|
||||
## [0.3.2] - 2026-01-15
|
||||
|
||||
### Added
|
||||
- Metadata resolver:
|
||||
- Reads `bcs.yaml` (preferred), then `hacs.yaml`, then `hacs.json` from repository root.
|
||||
- Extracts `name`, `description`, `category`, `author`, `maintainer` (best-effort).
|
||||
- UI now prefers metadata description over provider description.
|
||||
- Provider repository name is now only used as a fallback if no metadata name is provided.
|
||||
|
||||
### Changed
|
||||
- Repo display name priority:
|
||||
1) metadata (`bcs.yaml` / `hacs.*`)
|
||||
2) store index name (store.yaml)
|
||||
3) provider repo name
|
||||
4) repository URL
|
||||
|
||||
## [0.3.1] - 2026-01-15
|
||||
|
||||
### Fixed
|
||||
- Panel header version is now derived from `manifest.json` via backend API (no more hardcoded version strings).
|
||||
- Mobile navigation/header visibility improved by explicitly disabling iframe embedding for the custom panel.
|
||||
- When adding a custom repository without a display name, the name is now fetched from the git provider (GitHub/Gitea) and shown automatically.
|
||||
|
||||
## [0.3.0] - 2026-01-15
|
||||
|
||||
### Added
|
||||
- Repository enrichment for the Store UI:
|
||||
- GitHub: fetch owner and description via GitHub REST API.
|
||||
- Gitea: fetch owner and description via Gitea REST API (`/api/v1`).
|
||||
- Provider detection for GitHub/GitLab/Gitea (best-effort).
|
||||
- Automatic UI description line populated from provider data (when available).
|
||||
|
||||
### Changed
|
||||
- Panel module URL cache-busting updated to avoid stale frontend assets.
|
||||
|
||||
### Fixed
|
||||
- Store "Refresh" now triggers immediate backend refresh (from 0.2.0).
|
||||
- Avoided circular imports by using TYPE_CHECKING for type references.
|
||||
|
||||
### Notes
|
||||
- Installation/README details view/update entities will be added in later versions.
|
||||
|
||||
|
||||
## [0.2.0] - 2026-01-15
|
||||
|
||||
### Added
|
||||
- Foundation architecture for BCS (Bahmcloud Component Store) inside a Home Assistant custom component.
|
||||
- Custom panel (no iframe) using `hass.callApi()` to avoid authentication issues.
|
||||
- Store index loader (`store.yaml`) with periodic refresh (data only).
|
||||
- Manual repository management:
|
||||
- Add repository
|
||||
- List repositories
|
||||
- Remove repository
|
||||
Persisted via Home Assistant storage (`.storage/bcs_store`).
|
||||
- Public static asset endpoint for panel JS (`/api/bahmcloud_store_static/...`) without auth (required for HA custom panels).
|
||||
- Initial API namespace:
|
||||
- `GET /api/bcs` list merged repositories (index + custom)
|
||||
- `POST /api/bcs` add custom repository
|
||||
- `DELETE /api/bcs/custom_repo` remove custom repository
|
||||
|
||||
### Changed
|
||||
- Store API/UI terminology standardized to "BCS" (Bahmcloud Component Store), while integration domain remains `bahmcloud_store` for compatibility.
|
||||
|
||||
### Notes
|
||||
- Installation, README rendering, provider enrichment (GitHub/Gitea/GitLab), and Update entities will be implemented in later versions.
|
||||
@@ -1,3 +1,3 @@
|
||||
# bahmcloud_store
|
||||
|
||||
Bahmcloud Store für installing costum_components to Homeassistant
|
||||
Bahmcloud Store for installing costum_components to Homeassistant
|
||||
19
bcs.yaml
Normal file
19
bcs.yaml
Normal file
@@ -0,0 +1,19 @@
|
||||
name: Bahmcloud Store
|
||||
description: >
|
||||
Provider-neutral custom integration store for Home Assistant.
|
||||
Supports GitHub, GitLab, Gitea and Bahmcloud repositories with
|
||||
a central index, UI panel and API, similar to HACS but independent.
|
||||
|
||||
category: Integrations
|
||||
|
||||
author: Bahmcloud
|
||||
maintainer: Bahmcloud
|
||||
|
||||
domains:
|
||||
- bahmcloud_store
|
||||
|
||||
min_ha_version: "2024.1.0"
|
||||
|
||||
homepage: https://git.bahmcloud.de/bahmcloud/bahmcloud_store
|
||||
issues: https://git.bahmcloud.de/bahmcloud/bahmcloud_store/issues
|
||||
source: https://git.bahmcloud.de/bahmcloud/bahmcloud_store
|
||||
@@ -4,12 +4,12 @@ import logging
|
||||
from datetime import timedelta
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.discovery import async_load_platform
|
||||
from homeassistant.components.panel_custom import async_register_panel
|
||||
from homeassistant.helpers.event import async_track_time_interval, async_call_later
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STARTED
|
||||
from homeassistant.helpers.discovery import async_load_platform
|
||||
|
||||
from .store import BahmcloudStore, StoreConfig, StoreError
|
||||
from .core import BCSCore, BCSConfig, BCSError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -20,46 +20,81 @@ CONF_STORE_URL = "store_url"
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: dict) -> bool:
|
||||
cfg = config.get(DOMAIN, {})
|
||||
cfg = config.get(DOMAIN, {}) or {}
|
||||
store_url = cfg.get(CONF_STORE_URL, DEFAULT_STORE_URL)
|
||||
|
||||
store = BahmcloudStore(hass, StoreConfig(store_url=store_url))
|
||||
hass.data[DOMAIN] = store
|
||||
core = BCSCore(hass, BCSConfig(store_url=store_url))
|
||||
hass.data[DOMAIN] = core
|
||||
|
||||
# HTTP Views (Panel static + JSON API)
|
||||
await store.register_http_views()
|
||||
await core.async_initialize()
|
||||
|
||||
# Provide native Update entities in Settings -> System -> Updates.
|
||||
# This integration is YAML-based (async_setup), therefore we load the platform manually.
|
||||
await async_load_platform(hass, "update", DOMAIN, {}, config)
|
||||
|
||||
from .views import (
|
||||
StaticAssetsView,
|
||||
BCSApiView,
|
||||
BCSSettingsView,
|
||||
BCSReadmeView,
|
||||
BCSVersionsView,
|
||||
BCSRepoDetailView,
|
||||
BCSCustomRepoView,
|
||||
BCSInstallView,
|
||||
BCSUpdateView,
|
||||
BCSUninstallView,
|
||||
BCSBackupsView,
|
||||
BCSRestoreView,
|
||||
BCSRestartView,
|
||||
)
|
||||
|
||||
hass.http.register_view(StaticAssetsView())
|
||||
hass.http.register_view(BCSApiView(core))
|
||||
hass.http.register_view(BCSSettingsView(core))
|
||||
hass.http.register_view(BCSReadmeView(core))
|
||||
hass.http.register_view(BCSVersionsView(core))
|
||||
hass.http.register_view(BCSRepoDetailView(core))
|
||||
hass.http.register_view(BCSCustomRepoView(core))
|
||||
hass.http.register_view(BCSInstallView(core))
|
||||
hass.http.register_view(BCSUpdateView(core))
|
||||
hass.http.register_view(BCSUninstallView(core))
|
||||
hass.http.register_view(BCSBackupsView(core))
|
||||
hass.http.register_view(BCSRestoreView(core))
|
||||
hass.http.register_view(BCSRestartView(core))
|
||||
|
||||
# Sidebar Panel (Custom Panel + JS module)
|
||||
await async_register_panel(
|
||||
hass,
|
||||
frontend_url_path="bahmcloud-store",
|
||||
webcomponent_name="bahmcloud-store-panel",
|
||||
module_url="/api/bahmcloud_store_static/panel.js",
|
||||
# IMPORTANT: bump v to avoid caching old JS
|
||||
module_url="/api/bahmcloud_store_static/panel.js?v=108",
|
||||
sidebar_title="Bahmcloud Store",
|
||||
sidebar_icon="mdi:store",
|
||||
require_admin=True,
|
||||
config={},
|
||||
)
|
||||
|
||||
# Initialer Index-Load
|
||||
async def _do_startup_refresh(_now=None) -> None:
|
||||
try:
|
||||
await store.refresh()
|
||||
except StoreError as e:
|
||||
_LOGGER.error("Initial store refresh failed: %s", e)
|
||||
await core.full_refresh(source="startup")
|
||||
except BCSError as e:
|
||||
_LOGGER.error("Initial refresh failed: %s", e)
|
||||
|
||||
# Do not block Home Assistant startup. Schedule the initial refresh after HA started.
|
||||
def _on_ha_started(_event) -> None:
|
||||
async_call_later(hass, 30, _do_startup_refresh)
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, _on_ha_started)
|
||||
|
||||
# Nur Liste + Latest-Versionen regelmäßig aktualisieren (keine Auto-Install)
|
||||
async def periodic(_now) -> None:
|
||||
try:
|
||||
await store.refresh()
|
||||
store.signal_entities_updated()
|
||||
except StoreError as e:
|
||||
await core.full_refresh(source="timer")
|
||||
except BCSError as e:
|
||||
_LOGGER.warning("Periodic refresh failed: %s", e)
|
||||
except Exception as e: # pylint: disable=broad-exception-caught
|
||||
_LOGGER.exception("Unexpected error during periodic refresh: %s", e)
|
||||
|
||||
# Falls store.yaml refresh_seconds enthält, nutze das, sonst 300s
|
||||
interval_seconds = store.refresh_seconds if getattr(store, "refresh_seconds", None) else 300
|
||||
async_track_time_interval(hass, periodic, timedelta(seconds=int(interval_seconds)))
|
||||
|
||||
# Update platform laden (damit Updates in Settings erscheinen)
|
||||
await async_load_platform(hass, Platform.UPDATE, DOMAIN, {}, config)
|
||||
interval_seconds = int(getattr(core, "refresh_seconds", 300) or 300)
|
||||
async_track_time_interval(hass, periodic, timedelta(seconds=interval_seconds))
|
||||
|
||||
return True
|
||||
1622
custom_components/bahmcloud_store/core.py
Normal file
1622
custom_components/bahmcloud_store/core.py
Normal file
File diff suppressed because it is too large
Load Diff
28
custom_components/bahmcloud_store/custom_repo_view.py
Normal file
28
custom_components/bahmcloud_store/custom_repo_view.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .core import BCSCore
|
||||
|
||||
|
||||
class BCSCustomRepoView(HomeAssistantView):
|
||||
"""
|
||||
DELETE /api/bcs/custom_repo?id=...
|
||||
"""
|
||||
requires_auth = True
|
||||
name = "bcs_custom_repo_api"
|
||||
url = "/api/bcs/custom_repo"
|
||||
|
||||
def __init__(self, core: "BCSCore") -> None:
|
||||
self.core = core
|
||||
|
||||
async def delete(self, request):
|
||||
repo_id = request.query.get("id", "").strip()
|
||||
if not repo_id:
|
||||
return self.json({"error": "id missing"}, status_code=400)
|
||||
|
||||
await self.core.remove_custom_repo(repo_id)
|
||||
return self.json({"ok": True})
|
||||
@@ -1,9 +1,10 @@
|
||||
{
|
||||
"domain": "bahmcloud_store",
|
||||
"name": "Bahmcloud Store",
|
||||
"version": "0.1.0",
|
||||
"documentation": "https://git.bahmcloud.de/bahmcloud/ha_store",
|
||||
"version": "0.6.7",
|
||||
"documentation": "https://git.bahmcloud.de/bahmcloud/bahmcloud_store",
|
||||
"platforms": ["update"],
|
||||
"requirements": [],
|
||||
"codeowners": [],
|
||||
"codeowners": ["@bahmcloud"],
|
||||
"iot_class": "local_polling"
|
||||
}
|
||||
168
custom_components/bahmcloud_store/metadata.py
Normal file
168
custom_components/bahmcloud_store/metadata.py
Normal file
@@ -0,0 +1,168 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.util import yaml as ha_yaml
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RepoMetadata:
|
||||
source: str | None = None # "bcs.yaml" | "hacs.yaml" | "hacs.json" | None
|
||||
name: str | None = None
|
||||
description: str | None = None
|
||||
category: str | None = None
|
||||
author: str | None = None
|
||||
maintainer: str | None = None
|
||||
|
||||
|
||||
def _normalize_repo_name(name: str | None) -> str | None:
|
||||
if not name:
|
||||
return None
|
||||
n = name.strip()
|
||||
if n.endswith(".git"):
|
||||
n = n[:-4]
|
||||
return n or None
|
||||
|
||||
|
||||
def _split_owner_repo(repo_url: str) -> tuple[str | None, str | None]:
|
||||
u = urlparse(repo_url.rstrip("/"))
|
||||
parts = [p for p in u.path.strip("/").split("/") if p]
|
||||
if len(parts) < 2:
|
||||
return None, None
|
||||
owner = parts[0].strip() or None
|
||||
repo = _normalize_repo_name(parts[1])
|
||||
return owner, repo
|
||||
|
||||
|
||||
def _is_github(repo_url: str) -> bool:
|
||||
return "github.com" in urlparse(repo_url).netloc.lower()
|
||||
|
||||
|
||||
def _is_gitlab(repo_url: str) -> bool:
|
||||
return "gitlab" in urlparse(repo_url).netloc.lower()
|
||||
|
||||
|
||||
def _is_gitea(repo_url: str) -> bool:
|
||||
host = urlparse(repo_url).netloc.lower()
|
||||
return host and ("github.com" not in host) and ("gitlab" not in host)
|
||||
|
||||
|
||||
async def _fetch_text(hass: HomeAssistant, url: str) -> str | None:
|
||||
session = async_get_clientsession(hass)
|
||||
try:
|
||||
async with session.get(url, timeout=20) as resp:
|
||||
if resp.status != 200:
|
||||
return None
|
||||
return await resp.text()
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def _parse_meta_yaml(raw: str, source: str) -> RepoMetadata:
|
||||
try:
|
||||
data = ha_yaml.parse_yaml(raw)
|
||||
if not isinstance(data, dict):
|
||||
return RepoMetadata(source=source)
|
||||
|
||||
return RepoMetadata(
|
||||
source=source,
|
||||
name=data.get("name"),
|
||||
description=data.get("description"),
|
||||
category=data.get("category"),
|
||||
author=data.get("author"),
|
||||
maintainer=data.get("maintainer"),
|
||||
)
|
||||
except Exception:
|
||||
return RepoMetadata(source=source)
|
||||
|
||||
|
||||
def _parse_meta_hacs_json(raw: str) -> RepoMetadata:
|
||||
try:
|
||||
data = json.loads(raw)
|
||||
if not isinstance(data, dict):
|
||||
return RepoMetadata(source="hacs.json")
|
||||
|
||||
name = data.get("name")
|
||||
description = data.get("description")
|
||||
author = data.get("author")
|
||||
maintainer = data.get("maintainer")
|
||||
category = data.get("category") or data.get("type")
|
||||
|
||||
return RepoMetadata(
|
||||
source="hacs.json",
|
||||
name=name if isinstance(name, str) else None,
|
||||
description=description if isinstance(description, str) else None,
|
||||
category=category if isinstance(category, str) else None,
|
||||
author=author if isinstance(author, str) else None,
|
||||
maintainer=maintainer if isinstance(maintainer, str) else None,
|
||||
)
|
||||
except Exception:
|
||||
return RepoMetadata(source="hacs.json")
|
||||
|
||||
|
||||
async def fetch_repo_metadata(hass: HomeAssistant, repo_url: str, default_branch: str | None) -> RepoMetadata:
|
||||
owner, repo = _split_owner_repo(repo_url)
|
||||
if not owner or not repo:
|
||||
return RepoMetadata()
|
||||
|
||||
branch = default_branch or "main"
|
||||
|
||||
# Priority:
|
||||
# 1) bcs.yaml
|
||||
# 2) hacs.yaml
|
||||
# 3) hacs.json
|
||||
filenames = ["bcs.yaml", "hacs.yaml", "hacs.json"]
|
||||
|
||||
candidates: list[tuple[str, str]] = []
|
||||
|
||||
if _is_github(repo_url):
|
||||
base = f"https://raw.githubusercontent.com/{owner}/{repo}/{branch}"
|
||||
for fn in filenames:
|
||||
candidates.append((fn, f"{base}/{fn}"))
|
||||
|
||||
elif _is_gitlab(repo_url):
|
||||
u = urlparse(repo_url.rstrip("/"))
|
||||
root = f"{u.scheme}://{u.netloc}/{owner}/{repo}"
|
||||
# GitLab raw format
|
||||
# https://gitlab.com/<owner>/<repo>/-/raw/<branch>/<file>
|
||||
for fn in filenames:
|
||||
candidates.append((fn, f"{root}/-/raw/{branch}/{fn}"))
|
||||
|
||||
elif _is_gitea(repo_url):
|
||||
u = urlparse(repo_url.rstrip("/"))
|
||||
root = f"{u.scheme}://{u.netloc}/{owner}/{repo}"
|
||||
|
||||
bases = [
|
||||
f"{root}/raw/branch/{branch}",
|
||||
f"{root}/raw/{branch}",
|
||||
]
|
||||
for fn in filenames:
|
||||
for b in bases:
|
||||
candidates.append((fn, f"{b}/{fn}"))
|
||||
|
||||
else:
|
||||
return RepoMetadata()
|
||||
|
||||
for fn, url in candidates:
|
||||
raw = await _fetch_text(hass, url)
|
||||
if not raw:
|
||||
continue
|
||||
|
||||
if fn.endswith(".json"):
|
||||
meta = _parse_meta_hacs_json(raw)
|
||||
if meta.source:
|
||||
return meta
|
||||
continue
|
||||
|
||||
meta = _parse_meta_yaml(raw, fn)
|
||||
if meta.source:
|
||||
return meta
|
||||
|
||||
return RepoMetadata()
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,10 +1,24 @@
|
||||
body { font-family: system-ui, sans-serif; margin:0; }
|
||||
.wrap { padding: 16px; max-width: 1000px; margin: 0 auto; }
|
||||
.card { border: 1px solid #ddd; border-radius: 10px; padding: 12px; margin: 10px 0; }
|
||||
.row { display:flex; justify-content:space-between; gap: 12px; align-items: flex-start; }
|
||||
.row { display:flex; justify-content:space-between; gap: 12px; align-items: flex-start; flex-wrap: wrap; min-width:0; }
|
||||
.badge { border: 1px solid #bbb; border-radius: 999px; padding: 2px 8px; font-size: 12px; height: fit-content; }
|
||||
.muted { color: #666; font-size: 13px; margin-top: 4px; }
|
||||
.actions { display:flex; gap: 8px; margin-top: 10px; }
|
||||
button { padding: 8px 12px; cursor:pointer; }
|
||||
button[disabled] { opacity: 0.6; cursor: not-allowed; }
|
||||
a { color: inherit; }
|
||||
|
||||
/* Basic markdown safety (in case styles.css is used by older panels) */
|
||||
.md { max-width: 100%; overflow-x: auto; }
|
||||
.md table { display:block; max-width:100%; overflow-x:auto; }
|
||||
.md img { max-width: 100%; height: auto; }
|
||||
|
||||
/* README UX (E2): collapsible preview (standalone page only) */
|
||||
.readmeWrap{ border:1px solid #ddd; border-radius:10px; padding:12px; background: #f7f7f7; max-width:100%; }
|
||||
.readmeWrap.collapsed{ max-height:260px; overflow:hidden; position:relative; }
|
||||
.readmeWrap.collapsed::after{ content:""; position:absolute; left:0; right:0; bottom:0; height:56px; background: linear-gradient(to bottom, rgba(247,247,247,0), #f7f7f7); pointer-events:none; }
|
||||
.readmeWrap.expanded{ max-height:70vh; overflow:auto; }
|
||||
.readmeActions{ display:flex; justify-content:flex-end; margin-top:10px; }
|
||||
button.link{ border:none; background:transparent; padding:6px 10px; color:#1E88E5; }
|
||||
button.link:hover{ text-decoration:underline; }
|
||||
|
||||
663
custom_components/bahmcloud_store/providers.py
Normal file
663
custom_components/bahmcloud_store/providers.py
Normal file
@@ -0,0 +1,663 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import re
|
||||
import xml.etree.ElementTree as ET
|
||||
from dataclasses import dataclass
|
||||
from urllib.parse import quote_plus, urlparse
|
||||
|
||||
from packaging.version import InvalidVersion, Version
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
UA = "BahmcloudStore (Home Assistant)"
|
||||
|
||||
|
||||
@dataclass
|
||||
class RepoInfo:
|
||||
owner: str | None = None
|
||||
repo_name: str | None = None
|
||||
description: str | None = None
|
||||
provider: str | None = None
|
||||
default_branch: str | None = None
|
||||
|
||||
latest_version: str | None = None
|
||||
latest_version_source: str | None = None # "release" | "tag" | "atom" | None
|
||||
|
||||
|
||||
def _normalize_repo_name(name: str | None) -> str | None:
|
||||
if not name:
|
||||
return None
|
||||
n = name.strip()
|
||||
if n.endswith(".git"):
|
||||
n = n[:-4]
|
||||
return n or None
|
||||
|
||||
|
||||
def _split_owner_repo(repo_url: str) -> tuple[str | None, str | None]:
|
||||
u = urlparse(repo_url.rstrip("/"))
|
||||
parts = [p for p in u.path.strip("/").split("/") if p]
|
||||
if len(parts) < 2:
|
||||
return None, None
|
||||
owner = parts[0].strip() or None
|
||||
repo = _normalize_repo_name(parts[1])
|
||||
return owner, repo
|
||||
|
||||
|
||||
def detect_provider(repo_url: str) -> str:
|
||||
host = urlparse(repo_url).netloc.lower()
|
||||
if "github.com" in host:
|
||||
return "github"
|
||||
if "gitlab" in host:
|
||||
return "gitlab"
|
||||
return "gitea"
|
||||
|
||||
|
||||
async def _safe_json(session, url: str, *, headers: dict | None = None, timeout: int = 20):
|
||||
try:
|
||||
async with session.get(url, timeout=timeout, headers=headers) as resp:
|
||||
status = resp.status
|
||||
if status != 200:
|
||||
return None, status
|
||||
return await resp.json(), status
|
||||
except Exception:
|
||||
return None, None
|
||||
|
||||
|
||||
async def _safe_text(session, url: str, *, headers: dict | None = None, timeout: int = 20):
|
||||
try:
|
||||
async with session.get(url, timeout=timeout, headers=headers) as resp:
|
||||
status = resp.status
|
||||
if status != 200:
|
||||
return None, status
|
||||
return await resp.text(), status
|
||||
except Exception:
|
||||
return None, None
|
||||
|
||||
|
||||
def _extract_tag_from_github_url(url: str) -> str | None:
|
||||
m = re.search(r"/releases/tag/([^/?#]+)", url or "")
|
||||
if not m:
|
||||
return None
|
||||
return m.group(1).strip() or None
|
||||
|
||||
|
||||
def _extract_meta(html: str, *, prop: str | None = None, name: str | None = None) -> str | None:
|
||||
if not html:
|
||||
return None
|
||||
if prop:
|
||||
m = re.search(rf'<meta\s+property="{re.escape(prop)}"\s+content="([^"]+)"', html)
|
||||
if m:
|
||||
return m.group(1).strip()
|
||||
if name:
|
||||
m = re.search(rf'<meta\s+name="{re.escape(name)}"\s+content="([^"]+)"', html)
|
||||
if m:
|
||||
return m.group(1).strip()
|
||||
return None
|
||||
|
||||
|
||||
def _semver_key(tag: str) -> Version | None:
|
||||
t = (tag or "").strip()
|
||||
if not t:
|
||||
return None
|
||||
if t.startswith(("v", "V")):
|
||||
t = t[1:]
|
||||
try:
|
||||
return Version(t)
|
||||
except InvalidVersion:
|
||||
return None
|
||||
|
||||
|
||||
def _pick_highest_semver(tags: list[str]) -> str | None:
|
||||
parsed: list[tuple[Version, str]] = []
|
||||
for t in tags:
|
||||
if not isinstance(t, str):
|
||||
continue
|
||||
ts = t.strip()
|
||||
if not ts:
|
||||
continue
|
||||
v = _semver_key(ts)
|
||||
if v is not None:
|
||||
parsed.append((v, ts))
|
||||
|
||||
if not parsed:
|
||||
return None
|
||||
parsed.sort(key=lambda x: x[0], reverse=True)
|
||||
return parsed[0][1]
|
||||
|
||||
|
||||
async def _github_description_html(hass: HomeAssistant, owner: str, repo: str) -> str | None:
|
||||
session = async_get_clientsession(hass)
|
||||
url = f"https://github.com/{owner}/{repo}"
|
||||
html, status = await _safe_text(session, url, headers={"User-Agent": UA})
|
||||
if status != 200 or not html:
|
||||
return None
|
||||
|
||||
desc = _extract_meta(html, prop="og:description")
|
||||
if desc:
|
||||
return desc
|
||||
|
||||
return _extract_meta(html, name="description")
|
||||
|
||||
|
||||
async def _github_latest_version_atom(hass: HomeAssistant, owner: str, repo: str) -> tuple[str | None, str | None]:
|
||||
session = async_get_clientsession(hass)
|
||||
url = f"https://github.com/{owner}/{repo}/releases.atom"
|
||||
atom, status = await _safe_text(session, url, headers={"User-Agent": UA})
|
||||
if status != 200 or not atom:
|
||||
return None, None
|
||||
|
||||
try:
|
||||
root = ET.fromstring(atom)
|
||||
ns = {"a": "http://www.w3.org/2005/Atom"}
|
||||
entry = root.find("a:entry", ns)
|
||||
if entry is None:
|
||||
return None, None
|
||||
link = entry.find("a:link", ns)
|
||||
if link is not None and link.attrib.get("href"):
|
||||
tag = _extract_tag_from_github_url(link.attrib["href"])
|
||||
if tag:
|
||||
return tag, "atom"
|
||||
title = entry.find("a:title", ns)
|
||||
if title is not None and title.text:
|
||||
t = title.text.strip()
|
||||
if t:
|
||||
return t, "atom"
|
||||
except Exception:
|
||||
return None, None
|
||||
|
||||
return None, None
|
||||
|
||||
|
||||
async def _github_latest_version_redirect(hass: HomeAssistant, owner: str, repo: str) -> tuple[str | None, str | None]:
|
||||
session = async_get_clientsession(hass)
|
||||
url = f"https://github.com/{owner}/{repo}/releases/latest"
|
||||
try:
|
||||
async with session.get(url, timeout=20, headers={"User-Agent": UA}, allow_redirects=True) as resp:
|
||||
if resp.status != 200:
|
||||
return None, None
|
||||
final = str(resp.url)
|
||||
tag = _extract_tag_from_github_url(final)
|
||||
if tag:
|
||||
return tag, "release"
|
||||
except Exception:
|
||||
return None, None
|
||||
|
||||
return None, None
|
||||
|
||||
|
||||
async def _github_latest_version_api(hass: HomeAssistant, owner: str, repo: str) -> tuple[str | None, str | None]:
|
||||
session = async_get_clientsession(hass)
|
||||
headers = {"Accept": "application/vnd.github+json", "User-Agent": UA}
|
||||
|
||||
data, status = await _safe_json(session, f"https://api.github.com/repos/{owner}/{repo}/releases/latest", headers=headers)
|
||||
if isinstance(data, dict) and data.get("tag_name"):
|
||||
return str(data["tag_name"]), "release"
|
||||
|
||||
# No releases -> pick highest semver from many tags (instead of per_page=1)
|
||||
if status == 404:
|
||||
data, _ = await _safe_json(session, f"https://api.github.com/repos/{owner}/{repo}/tags?per_page=100", headers=headers)
|
||||
tags: list[str] = []
|
||||
if isinstance(data, list):
|
||||
for t in data:
|
||||
if isinstance(t, dict) and t.get("name"):
|
||||
tags.append(str(t["name"]))
|
||||
|
||||
best = _pick_highest_semver(tags)
|
||||
if best:
|
||||
return best, "tag"
|
||||
|
||||
# fallback: keep old behavior (first tag)
|
||||
if tags:
|
||||
return tags[0], "tag"
|
||||
|
||||
return None, None
|
||||
|
||||
|
||||
async def _github_latest_version(hass: HomeAssistant, owner: str, repo: str) -> tuple[str | None, str | None]:
|
||||
tag, src = await _github_latest_version_redirect(hass, owner, repo)
|
||||
if tag:
|
||||
return tag, src
|
||||
|
||||
tag, src = await _github_latest_version_api(hass, owner, repo)
|
||||
if tag:
|
||||
return tag, src
|
||||
|
||||
return await _github_latest_version_atom(hass, owner, repo)
|
||||
|
||||
|
||||
async def _gitea_latest_version(hass: HomeAssistant, base: str, owner: str, repo: str) -> tuple[str | None, str | None]:
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
# releases: fetch multiple, pick highest semver (instead of limit=1)
|
||||
data, _ = await _safe_json(session, f"{base}/api/v1/repos/{owner}/{repo}/releases?limit=50")
|
||||
rel_tags: list[str] = []
|
||||
if isinstance(data, list):
|
||||
for r in data:
|
||||
if isinstance(r, dict) and r.get("tag_name"):
|
||||
rel_tags.append(str(r["tag_name"]))
|
||||
|
||||
best_rel = _pick_highest_semver(rel_tags)
|
||||
if best_rel:
|
||||
return best_rel, "release"
|
||||
if rel_tags:
|
||||
return rel_tags[0], "release"
|
||||
|
||||
# tags: fetch multiple, pick highest semver (instead of limit=1)
|
||||
data, _ = await _safe_json(session, f"{base}/api/v1/repos/{owner}/{repo}/tags?limit=50")
|
||||
tags: list[str] = []
|
||||
if isinstance(data, list):
|
||||
for t in data:
|
||||
if isinstance(t, dict) and t.get("name"):
|
||||
tags.append(str(t["name"]))
|
||||
|
||||
best = _pick_highest_semver(tags)
|
||||
if best:
|
||||
return best, "tag"
|
||||
if tags:
|
||||
return tags[0], "tag"
|
||||
|
||||
return None, None
|
||||
|
||||
|
||||
async def _gitlab_latest_version(
|
||||
hass: HomeAssistant, base: str, owner: str, repo: str
|
||||
) -> tuple[str | None, str | None]:
|
||||
session = async_get_clientsession(hass)
|
||||
headers = {"User-Agent": UA}
|
||||
|
||||
project = quote_plus(f"{owner}/{repo}")
|
||||
|
||||
# releases: fetch multiple, pick highest semver (instead of per_page=1)
|
||||
data, _ = await _safe_json(session, f"{base}/api/v4/projects/{project}/releases?per_page=50", headers=headers)
|
||||
rel_tags: list[str] = []
|
||||
if isinstance(data, list):
|
||||
for r in data:
|
||||
if isinstance(r, dict) and r.get("tag_name"):
|
||||
rel_tags.append(str(r["tag_name"]))
|
||||
|
||||
best_rel = _pick_highest_semver(rel_tags)
|
||||
if best_rel:
|
||||
return best_rel, "release"
|
||||
if rel_tags:
|
||||
return rel_tags[0], "release"
|
||||
|
||||
# tags: fetch multiple, pick highest semver (instead of per_page=1)
|
||||
data, _ = await _safe_json(session, f"{base}/api/v4/projects/{project}/repository/tags?per_page=50", headers=headers)
|
||||
tags: list[str] = []
|
||||
if isinstance(data, list):
|
||||
for t in data:
|
||||
if isinstance(t, dict) and t.get("name"):
|
||||
tags.append(str(t["name"]))
|
||||
|
||||
best = _pick_highest_semver(tags)
|
||||
if best:
|
||||
return best, "tag"
|
||||
if tags:
|
||||
return tags[0], "tag"
|
||||
|
||||
# atom fallback
|
||||
atom, status = await _safe_text(session, f"{base}/{owner}/{repo}/-/tags?format=atom", headers=headers)
|
||||
if status == 200 and atom:
|
||||
try:
|
||||
root = ET.fromstring(atom)
|
||||
ns = {"a": "http://www.w3.org/2005/Atom"}
|
||||
entry = root.find("a:entry", ns)
|
||||
if entry is not None:
|
||||
title = entry.find("a:title", ns)
|
||||
if title is not None and title.text:
|
||||
return title.text.strip(), "atom"
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return None, None
|
||||
|
||||
|
||||
async def fetch_repo_info(hass: HomeAssistant, repo_url: str) -> RepoInfo:
|
||||
provider = detect_provider(repo_url)
|
||||
owner, repo = _split_owner_repo(repo_url)
|
||||
|
||||
info = RepoInfo(
|
||||
owner=owner,
|
||||
repo_name=repo,
|
||||
description=None,
|
||||
provider=provider,
|
||||
default_branch=None,
|
||||
latest_version=None,
|
||||
latest_version_source=None,
|
||||
)
|
||||
|
||||
if not owner or not repo:
|
||||
return info
|
||||
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
try:
|
||||
if provider == "github":
|
||||
headers = {"Accept": "application/vnd.github+json", "User-Agent": UA}
|
||||
data, status = await _safe_json(session, f"https://api.github.com/repos/{owner}/{repo}", headers=headers)
|
||||
|
||||
if isinstance(data, dict):
|
||||
info.description = data.get("description")
|
||||
info.repo_name = _normalize_repo_name(data.get("name")) or repo
|
||||
info.default_branch = data.get("default_branch") or "main"
|
||||
if isinstance(data.get("owner"), dict) and data["owner"].get("login"):
|
||||
info.owner = data["owner"]["login"]
|
||||
else:
|
||||
if status == 403:
|
||||
_LOGGER.debug("GitHub API blocked/rate-limited for repo info %s/%s", owner, repo)
|
||||
info.default_branch = "main"
|
||||
|
||||
if not info.description:
|
||||
desc = await _github_description_html(hass, owner, repo)
|
||||
if desc:
|
||||
info.description = desc
|
||||
|
||||
ver, src = await _github_latest_version(hass, owner, repo)
|
||||
info.latest_version = ver
|
||||
info.latest_version_source = src
|
||||
return info
|
||||
|
||||
if provider == "gitlab":
|
||||
u = urlparse(repo_url.rstrip("/"))
|
||||
base = f"{u.scheme}://{u.netloc}"
|
||||
headers = {"User-Agent": UA}
|
||||
project = quote_plus(f"{owner}/{repo}")
|
||||
|
||||
data, _ = await _safe_json(session, f"{base}/api/v4/projects/{project}", headers=headers)
|
||||
if isinstance(data, dict):
|
||||
info.description = data.get("description")
|
||||
info.repo_name = _normalize_repo_name(data.get("path")) or repo
|
||||
info.default_branch = data.get("default_branch") or "main"
|
||||
ns = data.get("namespace")
|
||||
if isinstance(ns, dict) and ns.get("path"):
|
||||
info.owner = ns.get("path")
|
||||
|
||||
ver, src = await _gitlab_latest_version(hass, base, owner, repo)
|
||||
info.latest_version = ver
|
||||
info.latest_version_source = src
|
||||
return info
|
||||
|
||||
if provider == "gitea":
|
||||
u = urlparse(repo_url.rstrip("/"))
|
||||
base = f"{u.scheme}://{u.netloc}"
|
||||
|
||||
data, _ = await _safe_json(session, f"{base}/api/v1/repos/{owner}/{repo}")
|
||||
if isinstance(data, dict):
|
||||
info.description = data.get("description")
|
||||
info.repo_name = _normalize_repo_name(data.get("name")) or repo
|
||||
info.default_branch = data.get("default_branch") or "main"
|
||||
if isinstance(data.get("owner"), dict) and data["owner"].get("login"):
|
||||
info.owner = data["owner"]["login"]
|
||||
|
||||
ver, src = await _gitea_latest_version(hass, base, owner, repo)
|
||||
info.latest_version = ver
|
||||
info.latest_version_source = src
|
||||
return info
|
||||
|
||||
except Exception as e:
|
||||
_LOGGER.debug("fetch_repo_info failed for %s: %s", repo_url, e)
|
||||
|
||||
return info
|
||||
|
||||
|
||||
async def fetch_readme_markdown(
|
||||
hass: HomeAssistant,
|
||||
repo_url: str,
|
||||
*,
|
||||
provider: str | None = None,
|
||||
default_branch: str | None = None,
|
||||
) -> str | None:
|
||||
"""Fetch README Markdown for public repositories (GitHub/GitLab/Gitea).
|
||||
|
||||
Defensive behavior:
|
||||
- tries multiple common README filenames
|
||||
- tries multiple branches (default, main, master)
|
||||
- uses public raw endpoints (no tokens required for public repositories)
|
||||
"""
|
||||
repo_url = (repo_url or "").strip()
|
||||
if not repo_url:
|
||||
return None
|
||||
|
||||
prov = (provider or "").strip().lower() if provider else ""
|
||||
if not prov:
|
||||
prov = detect_provider(repo_url)
|
||||
|
||||
branch_candidates: list[str] = []
|
||||
if default_branch and str(default_branch).strip():
|
||||
branch_candidates.append(str(default_branch).strip())
|
||||
for b in ("main", "master"):
|
||||
if b not in branch_candidates:
|
||||
branch_candidates.append(b)
|
||||
|
||||
filenames = ["README.md", "readme.md", "README.MD", "README.rst", "README"]
|
||||
|
||||
session = async_get_clientsession(hass)
|
||||
headers = {"User-Agent": UA}
|
||||
|
||||
def _normalize_gitlab_path(path: str) -> str | None:
|
||||
p = (path or "").strip().strip("/")
|
||||
if not p:
|
||||
return None
|
||||
parts = [x for x in p.split("/") if x]
|
||||
if len(parts) < 2:
|
||||
return None
|
||||
if parts[-1].endswith(".git"):
|
||||
parts[-1] = parts[-1][:-4]
|
||||
return "/".join(parts)
|
||||
|
||||
candidates: list[str] = []
|
||||
|
||||
if prov == "github":
|
||||
owner, repo = _split_owner_repo(repo_url)
|
||||
if not owner or not repo:
|
||||
return None
|
||||
for branch in branch_candidates:
|
||||
base = f"https://raw.githubusercontent.com/{owner}/{repo}/{branch}"
|
||||
for fn in filenames:
|
||||
candidates.append(f"{base}/{fn}")
|
||||
|
||||
elif prov == "gitea":
|
||||
owner, repo = _split_owner_repo(repo_url)
|
||||
if not owner or not repo:
|
||||
return None
|
||||
u = urlparse(repo_url.rstrip("/"))
|
||||
root = f"{u.scheme}://{u.netloc}/{owner}/{repo}"
|
||||
for branch in branch_candidates:
|
||||
bases = [
|
||||
f"{root}/raw/branch/{branch}",
|
||||
f"{root}/raw/{branch}",
|
||||
]
|
||||
for b in bases:
|
||||
for fn in filenames:
|
||||
candidates.append(f"{b}/{fn}")
|
||||
|
||||
elif prov == "gitlab":
|
||||
u = urlparse(repo_url.rstrip("/"))
|
||||
path_repo = _normalize_gitlab_path(u.path)
|
||||
if not path_repo:
|
||||
return None
|
||||
root = f"{u.scheme}://{u.netloc}/{path_repo}"
|
||||
for branch in branch_candidates:
|
||||
bases = [
|
||||
f"{root}/-/raw/{branch}",
|
||||
f"{root}/raw/{branch}",
|
||||
]
|
||||
for b in bases:
|
||||
for fn in filenames:
|
||||
candidates.append(f"{b}/{fn}")
|
||||
|
||||
else:
|
||||
return None
|
||||
|
||||
for url in candidates:
|
||||
try:
|
||||
async with session.get(url, timeout=20, headers=headers) as resp:
|
||||
if resp.status != 200:
|
||||
continue
|
||||
txt = await resp.text()
|
||||
if txt and txt.strip():
|
||||
return txt
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
return None
|
||||
|
||||
|
||||
async def fetch_repo_versions(
|
||||
hass: HomeAssistant,
|
||||
repo_url: str,
|
||||
*,
|
||||
provider: str | None = None,
|
||||
default_branch: str | None = None,
|
||||
limit: int = 20,
|
||||
) -> list[dict[str, str]]:
|
||||
"""List available versions/refs for a repository.
|
||||
|
||||
Returns a list of dicts with keys:
|
||||
- ref: the ref to install (tag/release/branch)
|
||||
- label: human-friendly label
|
||||
- source: release|tag|branch
|
||||
|
||||
Notes:
|
||||
- Uses public endpoints (no tokens) for public repositories.
|
||||
- We prefer releases first (if available), then tags.
|
||||
"""
|
||||
|
||||
repo_url = (repo_url or "").strip()
|
||||
if not repo_url:
|
||||
return []
|
||||
|
||||
prov = (provider or "").strip().lower() if provider else ""
|
||||
if not prov:
|
||||
prov = detect_provider(repo_url)
|
||||
|
||||
owner, repo = _split_owner_repo(repo_url)
|
||||
if not owner or not repo:
|
||||
return []
|
||||
|
||||
session = async_get_clientsession(hass)
|
||||
headers = {"User-Agent": UA}
|
||||
|
||||
out: list[dict[str, str]] = []
|
||||
seen: set[str] = set()
|
||||
|
||||
def _add(ref: str | None, label: str, source: str) -> None:
|
||||
r = (ref or "").strip()
|
||||
if not r or r in seen:
|
||||
return
|
||||
seen.add(r)
|
||||
out.append({"ref": r, "label": label, "source": source})
|
||||
|
||||
# Always offer default branch as an explicit option.
|
||||
if default_branch and str(default_branch).strip():
|
||||
b = str(default_branch).strip()
|
||||
_add(b, f"Branch: {b}", "branch")
|
||||
|
||||
try:
|
||||
if prov == "github":
|
||||
# Releases
|
||||
gh_headers = {"Accept": "application/vnd.github+json", "User-Agent": UA}
|
||||
data, _ = await _safe_json(
|
||||
session,
|
||||
f"https://api.github.com/repos/{owner}/{repo}/releases?per_page={int(limit)}",
|
||||
headers=gh_headers,
|
||||
)
|
||||
if isinstance(data, list):
|
||||
for r in data:
|
||||
if not isinstance(r, dict):
|
||||
continue
|
||||
tag = r.get("tag_name")
|
||||
name = r.get("name")
|
||||
if tag:
|
||||
lbl = str(tag)
|
||||
if isinstance(name, str) and name.strip() and name.strip() != str(tag):
|
||||
lbl = f"{tag} — {name.strip()}"
|
||||
_add(str(tag), lbl, "release")
|
||||
|
||||
# Tags
|
||||
data, _ = await _safe_json(
|
||||
session,
|
||||
f"https://api.github.com/repos/{owner}/{repo}/tags?per_page={int(limit)}",
|
||||
headers=gh_headers,
|
||||
)
|
||||
if isinstance(data, list):
|
||||
for t in data:
|
||||
if isinstance(t, dict) and t.get("name"):
|
||||
_add(str(t["name"]), str(t["name"]), "tag")
|
||||
|
||||
return out
|
||||
|
||||
if prov == "gitlab":
|
||||
u = urlparse(repo_url.rstrip("/"))
|
||||
base = f"{u.scheme}://{u.netloc}"
|
||||
project = quote_plus(f"{owner}/{repo}")
|
||||
|
||||
data, _ = await _safe_json(
|
||||
session,
|
||||
f"{base}/api/v4/projects/{project}/releases?per_page={int(limit)}",
|
||||
headers=headers,
|
||||
)
|
||||
if isinstance(data, list):
|
||||
for r in data:
|
||||
if not isinstance(r, dict):
|
||||
continue
|
||||
tag = r.get("tag_name")
|
||||
name = r.get("name")
|
||||
if tag:
|
||||
lbl = str(tag)
|
||||
if isinstance(name, str) and name.strip() and name.strip() != str(tag):
|
||||
lbl = f"{tag} — {name.strip()}"
|
||||
_add(str(tag), lbl, "release")
|
||||
|
||||
data, _ = await _safe_json(
|
||||
session,
|
||||
f"{base}/api/v4/projects/{project}/repository/tags?per_page={int(limit)}",
|
||||
headers=headers,
|
||||
)
|
||||
if isinstance(data, list):
|
||||
for t in data:
|
||||
if isinstance(t, dict) and t.get("name"):
|
||||
_add(str(t["name"]), str(t["name"]), "tag")
|
||||
|
||||
return out
|
||||
|
||||
# gitea (incl. Bahmcloud)
|
||||
u = urlparse(repo_url.rstrip("/"))
|
||||
base = f"{u.scheme}://{u.netloc}"
|
||||
|
||||
data, _ = await _safe_json(
|
||||
session,
|
||||
f"{base}/api/v1/repos/{owner}/{repo}/releases?limit={int(limit)}",
|
||||
headers=headers,
|
||||
)
|
||||
if isinstance(data, list):
|
||||
for r in data:
|
||||
if not isinstance(r, dict):
|
||||
continue
|
||||
tag = r.get("tag_name")
|
||||
name = r.get("name")
|
||||
if tag:
|
||||
lbl = str(tag)
|
||||
if isinstance(name, str) and name.strip() and name.strip() != str(tag):
|
||||
lbl = f"{tag} — {name.strip()}"
|
||||
_add(str(tag), lbl, "release")
|
||||
|
||||
data, _ = await _safe_json(
|
||||
session,
|
||||
f"{base}/api/v1/repos/{owner}/{repo}/tags?limit={int(limit)}",
|
||||
headers=headers,
|
||||
)
|
||||
if isinstance(data, list):
|
||||
for t in data:
|
||||
if isinstance(t, dict) and t.get("name"):
|
||||
_add(str(t["name"]), str(t["name"]), "tag")
|
||||
|
||||
return out
|
||||
|
||||
except Exception:
|
||||
_LOGGER.debug("fetch_repo_versions failed for %s", repo_url, exc_info=True)
|
||||
return out
|
||||
55
custom_components/bahmcloud_store/repairs.py
Normal file
55
custom_components/bahmcloud_store/repairs.py
Normal file
@@ -0,0 +1,55 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.repairs import RepairsFlow
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant import data_entry_flow
|
||||
|
||||
from .core import RESTART_REQUIRED_ISSUE_ID
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BCSRestartRequiredFlow(RepairsFlow):
|
||||
"""Repairs flow to restart Home Assistant after BCS install/update."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
self.hass = hass
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
return await self.async_step_confirm(user_input)
|
||||
|
||||
async def async_step_confirm(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
if user_input is not None:
|
||||
_LOGGER.info("BCS repairs flow: restarting Home Assistant (user confirmed)")
|
||||
await self.hass.services.async_call(
|
||||
"homeassistant",
|
||||
"restart",
|
||||
{},
|
||||
blocking=False,
|
||||
)
|
||||
return self.async_create_entry(title="", data={})
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="confirm",
|
||||
data_schema=vol.Schema({}),
|
||||
)
|
||||
|
||||
|
||||
async def async_create_fix_flow(
|
||||
hass: HomeAssistant,
|
||||
issue_id: str,
|
||||
data: dict[str, str | int | float | None] | None,
|
||||
) -> RepairsFlow:
|
||||
"""Create a repairs flow for BCS fixable issues."""
|
||||
if issue_id == RESTART_REQUIRED_ISSUE_ID:
|
||||
return BCSRestartRequiredFlow(hass)
|
||||
|
||||
raise data_entry_flow.UnknownHandler
|
||||
271
custom_components/bahmcloud_store/storage.py
Normal file
271
custom_components/bahmcloud_store/storage.py
Normal file
@@ -0,0 +1,271 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import time
|
||||
import uuid
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.storage import Store
|
||||
|
||||
_STORAGE_VERSION = 1
|
||||
_STORAGE_KEY = "bcs_store"
|
||||
|
||||
|
||||
@dataclass
|
||||
class CustomRepo:
|
||||
id: str
|
||||
url: str
|
||||
name: str | None = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class InstalledRepo:
|
||||
repo_id: str
|
||||
url: str
|
||||
domains: list[str]
|
||||
installed_at: int
|
||||
installed_version: str | None = None # BCS "installed ref" (tag/release/branch)
|
||||
installed_manifest_version: str | None = None # informational only
|
||||
ref: str | None = None # kept for backward compatibility / diagnostics
|
||||
|
||||
|
||||
class BCSStorage:
|
||||
"""Persistent storage for Bahmcloud Store.
|
||||
|
||||
Keys:
|
||||
- custom_repos: list of manually added repositories
|
||||
- installed_repos: mapping repo_id -> installed metadata
|
||||
- settings: persistent user settings (e.g. toggles in the UI)
|
||||
- hacs_cache: cached HACS metadata to improve UX (display names/descriptions)
|
||||
- repo_cache: cached per-repo enrichment (names/descriptions/versions) to keep the UI populated after restart
|
||||
"""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
self.hass = hass
|
||||
self._store: Store[dict[str, Any]] = Store(hass, _STORAGE_VERSION, _STORAGE_KEY)
|
||||
|
||||
async def _load(self) -> dict[str, Any]:
|
||||
data = await self._store.async_load() or {}
|
||||
if not isinstance(data, dict):
|
||||
data = {}
|
||||
|
||||
if "custom_repos" not in data or not isinstance(data.get("custom_repos"), list):
|
||||
data["custom_repos"] = []
|
||||
|
||||
if "installed_repos" not in data or not isinstance(data.get("installed_repos"), dict):
|
||||
data["installed_repos"] = {}
|
||||
|
||||
if "settings" not in data or not isinstance(data.get("settings"), dict):
|
||||
data["settings"] = {}
|
||||
|
||||
if "hacs_cache" not in data or not isinstance(data.get("hacs_cache"), dict):
|
||||
data["hacs_cache"] = {}
|
||||
|
||||
if "repo_cache" not in data or not isinstance(data.get("repo_cache"), dict):
|
||||
data["repo_cache"] = {}
|
||||
|
||||
return data
|
||||
|
||||
async def get_repo_cache(self) -> dict[str, Any]:
|
||||
"""Return cached per-repo enrichment data.
|
||||
|
||||
Shape:
|
||||
{
|
||||
"fetched_at": <unix_ts>,
|
||||
"repos": {
|
||||
"<repo_id>": {
|
||||
"ts": <unix_ts>,
|
||||
"url": "...",
|
||||
"name": "...",
|
||||
"provider_description": "...",
|
||||
"meta_name": "...",
|
||||
"meta_description": "...",
|
||||
"meta_category": "...",
|
||||
"meta_source": "...",
|
||||
"latest_version": "...",
|
||||
"latest_version_source": "...",
|
||||
"default_branch": "...",
|
||||
"owner": "...",
|
||||
"provider_repo_name": "..."
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
data = await self._load()
|
||||
cache = data.get("repo_cache", {})
|
||||
return cache if isinstance(cache, dict) else {}
|
||||
|
||||
async def set_repo_cache(self, cache: dict[str, Any]) -> None:
|
||||
"""Persist cached per-repo enrichment data."""
|
||||
data = await self._load()
|
||||
data["repo_cache"] = cache if isinstance(cache, dict) else {}
|
||||
await self._save(data)
|
||||
|
||||
async def get_hacs_cache(self) -> dict[str, Any]:
|
||||
"""Return cached HACS metadata.
|
||||
|
||||
Shape:
|
||||
{
|
||||
"fetched_at": <unix_ts>,
|
||||
"repos": {"owner/repo": {"name": "...", "description": "...", "domain": "..."}}
|
||||
}
|
||||
"""
|
||||
data = await self._load()
|
||||
cache = data.get("hacs_cache", {})
|
||||
return cache if isinstance(cache, dict) else {}
|
||||
|
||||
async def set_hacs_cache(self, cache: dict[str, Any]) -> None:
|
||||
"""Persist cached HACS metadata."""
|
||||
data = await self._load()
|
||||
data["hacs_cache"] = cache if isinstance(cache, dict) else {}
|
||||
await self._save(data)
|
||||
|
||||
async def get_settings(self) -> dict[str, Any]:
|
||||
"""Return persistent settings.
|
||||
|
||||
Currently used for UI/behavior toggles.
|
||||
"""
|
||||
data = await self._load()
|
||||
settings = data.get("settings", {})
|
||||
return settings if isinstance(settings, dict) else {}
|
||||
|
||||
async def set_settings(self, updates: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Update persistent settings and return the merged settings."""
|
||||
data = await self._load()
|
||||
settings = data.get("settings", {})
|
||||
if not isinstance(settings, dict):
|
||||
settings = {}
|
||||
for k, v in (updates or {}).items():
|
||||
settings[str(k)] = v
|
||||
data["settings"] = settings
|
||||
await self._save(data)
|
||||
return settings
|
||||
|
||||
async def _save(self, data: dict[str, Any]) -> None:
|
||||
await self._store.async_save(data)
|
||||
|
||||
async def list_custom_repos(self) -> list[CustomRepo]:
|
||||
data = await self._load()
|
||||
repos = data.get("custom_repos", [])
|
||||
out: list[CustomRepo] = []
|
||||
for r in repos:
|
||||
if not isinstance(r, dict):
|
||||
continue
|
||||
rid = r.get("id")
|
||||
url = r.get("url")
|
||||
if not rid or not url:
|
||||
continue
|
||||
out.append(CustomRepo(id=str(rid), url=str(url), name=r.get("name")))
|
||||
return out
|
||||
|
||||
async def add_custom_repo(self, url: str, name: str | None) -> CustomRepo:
|
||||
data = await self._load()
|
||||
repos = data.get("custom_repos", [])
|
||||
|
||||
# De-duplicate by URL
|
||||
for r in repos:
|
||||
if isinstance(r, dict) and str(r.get("url") or "").strip() == url.strip():
|
||||
return CustomRepo(id=str(r["id"]), url=str(r["url"]), name=r.get("name"))
|
||||
|
||||
rid = f"custom:{uuid.uuid4().hex[:10]}"
|
||||
entry = {"id": rid, "url": url.strip(), "name": name.strip() if name else None}
|
||||
repos.append(entry)
|
||||
data["custom_repos"] = repos
|
||||
await self._save(data)
|
||||
return CustomRepo(id=rid, url=entry["url"], name=entry["name"])
|
||||
|
||||
async def remove_custom_repo(self, repo_id: str) -> None:
|
||||
data = await self._load()
|
||||
repos = data.get("custom_repos", [])
|
||||
data["custom_repos"] = [
|
||||
r for r in repos if not (isinstance(r, dict) and r.get("id") == repo_id)
|
||||
]
|
||||
await self._save(data)
|
||||
|
||||
async def get_installed_repo(self, repo_id: str) -> InstalledRepo | None:
|
||||
data = await self._load()
|
||||
installed = data.get("installed_repos", {})
|
||||
if not isinstance(installed, dict):
|
||||
return None
|
||||
entry = installed.get(repo_id)
|
||||
if not isinstance(entry, dict):
|
||||
return None
|
||||
|
||||
try:
|
||||
domains = entry.get("domains") or []
|
||||
if not isinstance(domains, list):
|
||||
domains = []
|
||||
domains = [str(d) for d in domains if str(d).strip()]
|
||||
|
||||
installed_version = entry.get("installed_version")
|
||||
ref = entry.get("ref")
|
||||
|
||||
# Backward compatibility:
|
||||
# If installed_version wasn't stored, fall back to ref.
|
||||
if (not installed_version) and ref:
|
||||
installed_version = ref
|
||||
|
||||
installed_manifest_version = entry.get("installed_manifest_version")
|
||||
|
||||
return InstalledRepo(
|
||||
repo_id=str(entry.get("repo_id") or repo_id),
|
||||
url=str(entry.get("url") or ""),
|
||||
domains=domains,
|
||||
installed_at=int(entry.get("installed_at") or 0),
|
||||
installed_version=str(installed_version) if installed_version else None,
|
||||
installed_manifest_version=str(installed_manifest_version) if installed_manifest_version else None,
|
||||
ref=str(ref) if ref else None,
|
||||
)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
async def list_installed_repos(self) -> list[InstalledRepo]:
|
||||
data = await self._load()
|
||||
installed = data.get("installed_repos", {})
|
||||
out: list[InstalledRepo] = []
|
||||
if not isinstance(installed, dict):
|
||||
return out
|
||||
for rid in list(installed.keys()):
|
||||
item = await self.get_installed_repo(str(rid))
|
||||
if item:
|
||||
out.append(item)
|
||||
return out
|
||||
|
||||
async def set_installed_repo(
|
||||
self,
|
||||
*,
|
||||
repo_id: str,
|
||||
url: str,
|
||||
domains: list[str],
|
||||
installed_version: str | None,
|
||||
installed_manifest_version: str | None = None,
|
||||
ref: str | None,
|
||||
) -> None:
|
||||
data = await self._load()
|
||||
installed = data.get("installed_repos", {})
|
||||
if not isinstance(installed, dict):
|
||||
installed = {}
|
||||
data["installed_repos"] = installed
|
||||
|
||||
installed[str(repo_id)] = {
|
||||
"repo_id": str(repo_id),
|
||||
"url": str(url),
|
||||
"domains": [str(d) for d in (domains or []) if str(d).strip()],
|
||||
"installed_at": int(time.time()),
|
||||
# IMPORTANT: this is what BCS uses as "installed version" (ref/tag/branch)
|
||||
"installed_version": installed_version,
|
||||
# informational only
|
||||
"installed_manifest_version": installed_manifest_version,
|
||||
# keep ref too (debug/backward compatibility)
|
||||
"ref": ref,
|
||||
}
|
||||
await self._save(data)
|
||||
|
||||
async def remove_installed_repo(self, repo_id: str) -> None:
|
||||
data = await self._load()
|
||||
installed = data.get("installed_repos", {})
|
||||
if isinstance(installed, dict) and repo_id in installed:
|
||||
installed.pop(repo_id, None)
|
||||
data["installed_repos"] = installed
|
||||
await self._save(data)
|
||||
18
custom_components/bahmcloud_store/strings.json
Normal file
18
custom_components/bahmcloud_store/strings.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"issues": {
|
||||
"restart_required": {
|
||||
"title": "Restart required",
|
||||
"description": "One or more integrations were installed or updated by Bahmcloud Store. Restart Home Assistant to load the changes."
|
||||
}
|
||||
},
|
||||
"repair_flow": {
|
||||
"restart_required": {
|
||||
"step": {
|
||||
"confirm": {
|
||||
"title": "Restart Home Assistant",
|
||||
"description": "Bahmcloud Store installed or updated integrations. Restart Home Assistant now to apply the changes."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,142 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.update import UpdateEntity, UpdateEntityFeature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from . import DOMAIN
|
||||
from .store import BahmcloudStore, Package
|
||||
from homeassistant.components.update import UpdateEntity, UpdateEntityFeature
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.entity import EntityCategory
|
||||
|
||||
from .core import DOMAIN, SIGNAL_UPDATED, BCSCore
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _pretty_repo_name(core: BCSCore, repo_id: str) -> str:
|
||||
"""Return a human-friendly name for a repo update entity."""
|
||||
try:
|
||||
repo = core.get_repo(repo_id)
|
||||
if repo and getattr(repo, "name", None):
|
||||
name = str(repo.name).strip()
|
||||
if name:
|
||||
return name
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if repo_id.startswith("index:"):
|
||||
return f"BCS Index {repo_id.split(':', 1)[1]}"
|
||||
if repo_id.startswith("custom:"):
|
||||
return f"BCS Custom {repo_id.split(':', 1)[1]}"
|
||||
return f"BCS {repo_id}"
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class _RepoKey:
|
||||
repo_id: str
|
||||
|
||||
|
||||
class BCSRepoUpdateEntity(UpdateEntity):
|
||||
"""Update entity representing a BCS-managed repository."""
|
||||
|
||||
_attr_entity_category = EntityCategory.DIAGNOSTIC
|
||||
_attr_supported_features = UpdateEntityFeature.INSTALL
|
||||
|
||||
def __init__(self, core: BCSCore, repo_id: str) -> None:
|
||||
self._core = core
|
||||
self._repo_id = repo_id
|
||||
self._in_progress = False
|
||||
|
||||
# Stable unique id (do NOT change)
|
||||
self._attr_unique_id = f"{DOMAIN}:{repo_id}"
|
||||
|
||||
self._refresh_display_name()
|
||||
|
||||
def _refresh_display_name(self) -> None:
|
||||
pretty = _pretty_repo_name(self._core, self._repo_id)
|
||||
self._attr_name = pretty
|
||||
self._attr_title = pretty
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
repo = self._core.get_repo(self._repo_id)
|
||||
installed = self._core.get_installed(self._repo_id)
|
||||
return repo is not None and installed is not None
|
||||
|
||||
@property
|
||||
def in_progress(self) -> bool | None:
|
||||
return self._in_progress
|
||||
|
||||
@property
|
||||
def installed_version(self) -> str | None:
|
||||
installed = self._core.get_installed(self._repo_id) or {}
|
||||
v = installed.get("installed_version") or installed.get("ref")
|
||||
return str(v) if v else None
|
||||
|
||||
@property
|
||||
def latest_version(self) -> str | None:
|
||||
repo = self._core.get_repo(self._repo_id)
|
||||
if not repo:
|
||||
return None
|
||||
v = getattr(repo, "latest_version", None)
|
||||
return str(v) if v else None
|
||||
|
||||
@property
|
||||
def update_available(self) -> bool:
|
||||
latest = self.latest_version
|
||||
installed = self.installed_version
|
||||
if not latest or not installed:
|
||||
return False
|
||||
return latest != installed
|
||||
|
||||
def version_is_newer(self, latest_version: str, installed_version: str) -> bool:
|
||||
return latest_version != installed_version
|
||||
|
||||
@property
|
||||
def release_url(self) -> str | None:
|
||||
repo = self._core.get_repo(self._repo_id)
|
||||
return getattr(repo, "url", None) if repo else None
|
||||
|
||||
async def async_install(self, version: str | None, backup: bool, **kwargs: Any) -> None:
|
||||
if version is not None:
|
||||
_LOGGER.debug("BCS update entity requested specific version=%s (ignored)", version)
|
||||
|
||||
self._in_progress = True
|
||||
self.async_write_ha_state()
|
||||
|
||||
try:
|
||||
await self._core.update_repo(self._repo_id)
|
||||
finally:
|
||||
self._in_progress = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
@callback
|
||||
def _sync_entities(core: BCSCore, existing: dict[str, BCSRepoUpdateEntity], async_add_entities: AddEntitiesCallback) -> None:
|
||||
"""Ensure there is one update entity per installed repo AND keep names in sync."""
|
||||
installed_map = getattr(core, "_installed_cache", {}) or {}
|
||||
new_entities: list[BCSRepoUpdateEntity] = []
|
||||
|
||||
for repo_id, data in installed_map.items():
|
||||
if not isinstance(data, dict):
|
||||
continue
|
||||
|
||||
if repo_id in existing:
|
||||
# IMPORTANT: Update display name after refresh, when repo.name becomes available.
|
||||
existing[repo_id]._refresh_display_name()
|
||||
continue
|
||||
|
||||
ent = BCSRepoUpdateEntity(core, repo_id)
|
||||
existing[repo_id] = ent
|
||||
new_entities.append(ent)
|
||||
|
||||
if new_entities:
|
||||
async_add_entities(new_entities)
|
||||
|
||||
for ent in existing.values():
|
||||
ent.async_write_ha_state()
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
@@ -14,81 +145,18 @@ async def async_setup_platform(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
discovery_info=None,
|
||||
):
|
||||
store: BahmcloudStore = hass.data[DOMAIN]
|
||||
entities: dict[str, BahmcloudPackageUpdate] = {}
|
||||
|
||||
def should_have_update_entity(pkg: Package) -> bool:
|
||||
# Store selbst immer als Update
|
||||
if pkg.type == "store":
|
||||
return True
|
||||
# Andere Pakete nur, wenn installiert
|
||||
return store.is_installed(pkg.domain)
|
||||
|
||||
def rebuild_entities() -> None:
|
||||
# Create entities for packages that qualify
|
||||
for pkg in store.packages.values():
|
||||
if not should_have_update_entity(pkg):
|
||||
continue
|
||||
|
||||
uid = f"{DOMAIN}:{pkg.id}"
|
||||
if uid not in entities:
|
||||
ent = BahmcloudPackageUpdate(store, pkg.id)
|
||||
entities[uid] = ent
|
||||
async_add_entities([ent], update_before_add=True)
|
||||
|
||||
# Refresh states
|
||||
for ent in entities.values():
|
||||
ent.async_write_ha_state()
|
||||
|
||||
store.add_listener(rebuild_entities)
|
||||
rebuild_entities()
|
||||
|
||||
|
||||
class BahmcloudPackageUpdate(UpdateEntity):
|
||||
_attr_supported_features = UpdateEntityFeature.INSTALL
|
||||
|
||||
def __init__(self, store: BahmcloudStore, package_id: str) -> None:
|
||||
self.store = store
|
||||
self.package_id = package_id
|
||||
|
||||
self._attr_unique_id = f"{DOMAIN}_{package_id}"
|
||||
self._attr_name = f"{package_id} update"
|
||||
|
||||
@property
|
||||
def _pkg(self) -> Package | None:
|
||||
return self.store.packages.get(self.package_id)
|
||||
|
||||
@property
|
||||
def title(self) -> str | None:
|
||||
pkg = self._pkg
|
||||
return pkg.name if pkg else None
|
||||
|
||||
@property
|
||||
def installed_version(self) -> str | None:
|
||||
pkg = self._pkg
|
||||
if not pkg:
|
||||
return None
|
||||
if not self.store.is_installed(pkg.domain):
|
||||
return None
|
||||
return self.store.installed_version(pkg.domain)
|
||||
|
||||
@property
|
||||
def latest_version(self) -> str | None:
|
||||
pkg = self._pkg
|
||||
return pkg.latest_version if pkg else None
|
||||
|
||||
@property
|
||||
def release_summary(self) -> str | None:
|
||||
pkg = self._pkg
|
||||
if not pkg:
|
||||
return None
|
||||
if pkg.release_url:
|
||||
return f"Release: {pkg.release_url}"
|
||||
return f"Repo: {pkg.repo}"
|
||||
|
||||
async def async_install(self, version: str | None, backup: bool, **kwargs) -> None:
|
||||
pkg = self._pkg
|
||||
if not pkg:
|
||||
"""Set up BCS update entities."""
|
||||
core: BCSCore | None = hass.data.get(DOMAIN)
|
||||
if not core:
|
||||
_LOGGER.debug("BCS core not available, skipping update platform setup")
|
||||
return
|
||||
await self.store.install_from_zip(pkg)
|
||||
self.async_write_ha_state()
|
||||
|
||||
entities: dict[str, BCSRepoUpdateEntity] = {}
|
||||
|
||||
_sync_entities(core, entities, async_add_entities)
|
||||
|
||||
@callback
|
||||
def _handle_update() -> None:
|
||||
_sync_entities(core, entities, async_add_entities)
|
||||
|
||||
async_dispatcher_connect(hass, SIGNAL_UPDATED, _handle_update)
|
||||
532
custom_components/bahmcloud_store/views.py
Normal file
532
custom_components/bahmcloud_store/views.py
Normal file
@@ -0,0 +1,532 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import logging
|
||||
from dataclasses import asdict
|
||||
from pathlib import Path
|
||||
from typing import Any, TYPE_CHECKING
|
||||
|
||||
from aiohttp import web
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .core import BCSCore # typing only
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _render_markdown_server_side(md: str) -> str | None:
|
||||
text = (md or "").strip()
|
||||
if not text:
|
||||
return None
|
||||
|
||||
html: str | None = None
|
||||
|
||||
try:
|
||||
import markdown as mdlib # type: ignore
|
||||
|
||||
html = mdlib.markdown(
|
||||
text,
|
||||
extensions=["fenced_code", "tables", "sane_lists", "toc"],
|
||||
output_format="html5",
|
||||
)
|
||||
except Exception as e:
|
||||
_LOGGER.debug("python-markdown render failed: %s", e)
|
||||
html = None
|
||||
|
||||
if not html:
|
||||
return None
|
||||
|
||||
try:
|
||||
import bleach # type: ignore
|
||||
|
||||
allowed_tags = [
|
||||
"p",
|
||||
"br",
|
||||
"hr",
|
||||
"div",
|
||||
"span",
|
||||
"blockquote",
|
||||
"pre",
|
||||
"code",
|
||||
"h1",
|
||||
"h2",
|
||||
"h3",
|
||||
"h4",
|
||||
"h5",
|
||||
"h6",
|
||||
"ul",
|
||||
"ol",
|
||||
"li",
|
||||
"strong",
|
||||
"em",
|
||||
"b",
|
||||
"i",
|
||||
"u",
|
||||
"s",
|
||||
"a",
|
||||
"img",
|
||||
"table",
|
||||
"thead",
|
||||
"tbody",
|
||||
"tr",
|
||||
"th",
|
||||
"td",
|
||||
]
|
||||
|
||||
allowed_attrs = {
|
||||
"a": ["href", "title", "target", "rel"],
|
||||
"img": ["src", "alt", "title"],
|
||||
"th": ["align"],
|
||||
"td": ["align"],
|
||||
"*": ["class"],
|
||||
}
|
||||
|
||||
sanitized = bleach.clean(
|
||||
html,
|
||||
tags=allowed_tags,
|
||||
attributes=allowed_attrs,
|
||||
protocols=["http", "https", "mailto"],
|
||||
strip=True,
|
||||
)
|
||||
|
||||
sanitized = sanitized.replace(
|
||||
'<a href="',
|
||||
'<a rel="noreferrer noopener" target="_blank" href="',
|
||||
)
|
||||
return sanitized
|
||||
|
||||
except Exception as e:
|
||||
_LOGGER.debug("bleach sanitize failed/unavailable: %s", e)
|
||||
|
||||
return html
|
||||
|
||||
|
||||
_TEXT_KEYS = ("readme", "markdown", "text", "content", "data", "body")
|
||||
|
||||
|
||||
def _maybe_decode_base64(content: str, encoding: Any) -> str | None:
|
||||
if not isinstance(content, str):
|
||||
return None
|
||||
enc = ""
|
||||
if isinstance(encoding, str):
|
||||
enc = encoding.strip().lower()
|
||||
if "base64" not in enc:
|
||||
return None
|
||||
try:
|
||||
raw = base64.b64decode(content.encode("utf-8"), validate=False)
|
||||
return raw.decode("utf-8", errors="replace")
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def _extract_text_recursive(obj: Any, depth: int = 0) -> str | None:
|
||||
if obj is None:
|
||||
return None
|
||||
|
||||
if isinstance(obj, bytes):
|
||||
try:
|
||||
return obj.decode("utf-8", errors="replace")
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
if isinstance(obj, str):
|
||||
return obj
|
||||
|
||||
if depth > 8:
|
||||
return None
|
||||
|
||||
if isinstance(obj, dict):
|
||||
content = obj.get("content")
|
||||
encoding = obj.get("encoding")
|
||||
|
||||
decoded = _maybe_decode_base64(content, encoding)
|
||||
if decoded:
|
||||
return decoded
|
||||
|
||||
if isinstance(content, str) and (not isinstance(encoding, str) or not encoding.strip()):
|
||||
return content
|
||||
|
||||
for k in _TEXT_KEYS:
|
||||
v = obj.get(k)
|
||||
if isinstance(v, str):
|
||||
return v
|
||||
if isinstance(v, bytes):
|
||||
try:
|
||||
return v.decode("utf-8", errors="replace")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
for v in obj.values():
|
||||
out = _extract_text_recursive(v, depth + 1)
|
||||
if out:
|
||||
return out
|
||||
|
||||
return None
|
||||
|
||||
if isinstance(obj, list):
|
||||
for item in obj:
|
||||
out = _extract_text_recursive(item, depth + 1)
|
||||
if out:
|
||||
return out
|
||||
return None
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class StaticAssetsView(HomeAssistantView):
|
||||
url = "/api/bahmcloud_store_static/{path:.*}"
|
||||
name = "api:bahmcloud_store_static"
|
||||
requires_auth = False
|
||||
|
||||
async def get(self, request: web.Request, path: str) -> web.StreamResponse:
|
||||
base = Path(__file__).resolve().parent / "panel"
|
||||
base_resolved = base.resolve()
|
||||
|
||||
req_path = (path or "").lstrip("/")
|
||||
if req_path == "":
|
||||
req_path = "index.html"
|
||||
|
||||
target = (base / req_path).resolve()
|
||||
|
||||
if not str(target).startswith(str(base_resolved)):
|
||||
return web.Response(status=404)
|
||||
|
||||
if target.is_dir():
|
||||
target = (target / "index.html").resolve()
|
||||
|
||||
if not target.exists():
|
||||
_LOGGER.error("BCS static asset not found: %s", target)
|
||||
return web.Response(status=404)
|
||||
|
||||
resp = web.FileResponse(path=target)
|
||||
resp.headers["Cache-Control"] = "no-store, no-cache, must-revalidate, max-age=0"
|
||||
resp.headers["Pragma"] = "no-cache"
|
||||
return resp
|
||||
|
||||
|
||||
class BCSApiView(HomeAssistantView):
|
||||
url = "/api/bcs"
|
||||
name = "api:bcs"
|
||||
requires_auth = True
|
||||
|
||||
def __init__(self, core: Any) -> None:
|
||||
self.core: BCSCore = core
|
||||
|
||||
async def get(self, request: web.Request) -> web.Response:
|
||||
return web.json_response(
|
||||
{
|
||||
"ok": True,
|
||||
"version": self.core.version,
|
||||
"settings": self.core.get_settings_public(),
|
||||
"repos": self.core.list_repos_public(),
|
||||
}
|
||||
)
|
||||
|
||||
async def post(self, request: web.Request) -> web.Response:
|
||||
action = request.query.get("action")
|
||||
if action == "refresh":
|
||||
_LOGGER.info("BCS manual refresh triggered via API")
|
||||
try:
|
||||
await self.core.full_refresh(source="manual")
|
||||
return web.json_response({"ok": True})
|
||||
except Exception as e:
|
||||
_LOGGER.error("BCS manual refresh failed: %s", e)
|
||||
return web.json_response({"ok": False, "message": "Refresh failed"}, status=500)
|
||||
|
||||
try:
|
||||
data = await request.json()
|
||||
except Exception:
|
||||
data = {}
|
||||
|
||||
op = data.get("op")
|
||||
|
||||
if op == "add_custom_repo":
|
||||
url = str(data.get("url") or "").strip()
|
||||
name = data.get("name")
|
||||
name = str(name).strip() if name else None
|
||||
if not url:
|
||||
return web.json_response({"ok": False, "message": "Missing url"}, status=400)
|
||||
repo = await self.core.add_custom_repo(url=url, name=name)
|
||||
return web.json_response({"ok": True, "repo": asdict(repo)})
|
||||
|
||||
return web.json_response({"ok": False, "message": "Unknown operation"}, status=400)
|
||||
|
||||
|
||||
class BCSSettingsView(HomeAssistantView):
|
||||
"""Persistent UI settings (e.g. toggles)."""
|
||||
|
||||
url = "/api/bcs/settings"
|
||||
name = "api:bcs_settings"
|
||||
requires_auth = True
|
||||
|
||||
def __init__(self, core: Any) -> None:
|
||||
self.core: BCSCore = core
|
||||
|
||||
async def get(self, request: web.Request) -> web.Response:
|
||||
return web.json_response({"ok": True, "settings": self.core.get_settings_public()})
|
||||
|
||||
async def post(self, request: web.Request) -> web.Response:
|
||||
try:
|
||||
data = await request.json()
|
||||
except Exception:
|
||||
data = {}
|
||||
|
||||
updates: dict[str, Any] = {}
|
||||
if "hacs_enabled" in data:
|
||||
updates["hacs_enabled"] = bool(data.get("hacs_enabled"))
|
||||
|
||||
try:
|
||||
settings = await self.core.set_settings(updates)
|
||||
return web.json_response({"ok": True, "settings": settings})
|
||||
except Exception as e:
|
||||
_LOGGER.exception("BCS set settings failed: %s", e)
|
||||
return web.json_response({"ok": False, "message": str(e) or "Failed"}, status=500)
|
||||
|
||||
|
||||
class BCSCustomRepoView(HomeAssistantView):
|
||||
url = "/api/bcs/custom_repo"
|
||||
name = "api:bcs_custom_repo"
|
||||
requires_auth = True
|
||||
|
||||
def __init__(self, core: Any) -> None:
|
||||
self.core: BCSCore = core
|
||||
|
||||
async def delete(self, request: web.Request) -> web.Response:
|
||||
repo_id = request.query.get("id")
|
||||
if not repo_id:
|
||||
return web.json_response({"ok": False, "message": "Missing id"}, status=400)
|
||||
await self.core.remove_custom_repo(repo_id)
|
||||
return web.json_response({"ok": True})
|
||||
|
||||
|
||||
class BCSReadmeView(HomeAssistantView):
|
||||
url = "/api/bcs/readme"
|
||||
name = "api:bcs_readme"
|
||||
requires_auth = True
|
||||
|
||||
def __init__(self, core: Any) -> None:
|
||||
self.core: BCSCore = core
|
||||
|
||||
async def get(self, request: web.Request) -> web.Response:
|
||||
repo_id = request.query.get("repo_id")
|
||||
if not repo_id:
|
||||
return web.json_response({"ok": False, "message": "Missing repo_id"}, status=400)
|
||||
|
||||
maybe_md = await self.core.fetch_readme_markdown(repo_id)
|
||||
|
||||
md = _extract_text_recursive(maybe_md)
|
||||
if not md or not md.strip():
|
||||
t = type(maybe_md).__name__
|
||||
return web.json_response(
|
||||
{"ok": False, "message": f"README not found or unsupported format (got {t})."},
|
||||
status=404,
|
||||
)
|
||||
|
||||
md_str = str(md)
|
||||
html = _render_markdown_server_side(md_str)
|
||||
return web.json_response({"ok": True, "readme": md_str, "html": html})
|
||||
|
||||
|
||||
class BCSVersionsView(HomeAssistantView):
|
||||
url = "/api/bcs/versions"
|
||||
name = "api:bcs_versions"
|
||||
requires_auth = True
|
||||
|
||||
def __init__(self, core: Any) -> None:
|
||||
self.core: BCSCore = core
|
||||
|
||||
async def get(self, request: web.Request) -> web.Response:
|
||||
repo_id = request.query.get("repo_id")
|
||||
if not repo_id:
|
||||
return web.json_response({"ok": False, "message": "Missing repo_id"}, status=400)
|
||||
|
||||
try:
|
||||
versions = await self.core.list_repo_versions(repo_id)
|
||||
return web.json_response({"ok": True, "repo_id": repo_id, "versions": versions}, status=200)
|
||||
except Exception as e:
|
||||
_LOGGER.exception("BCS list versions failed: %s", e)
|
||||
return web.json_response({"ok": False, "message": str(e) or "List versions failed"}, status=500)
|
||||
|
||||
|
||||
class BCSInstallView(HomeAssistantView):
|
||||
url = "/api/bcs/install"
|
||||
name = "api:bcs_install"
|
||||
requires_auth = True
|
||||
|
||||
def __init__(self, core: Any) -> None:
|
||||
self.core: BCSCore = core
|
||||
|
||||
async def post(self, request: web.Request) -> web.Response:
|
||||
repo_id = request.query.get("repo_id")
|
||||
version = request.query.get("version")
|
||||
if not repo_id:
|
||||
return web.json_response({"ok": False, "message": "Missing repo_id"}, status=400)
|
||||
|
||||
try:
|
||||
v = str(version).strip() if version is not None else None
|
||||
result = await self.core.install_repo(repo_id, version=v)
|
||||
return web.json_response(result, status=200)
|
||||
except Exception as e:
|
||||
_LOGGER.exception("BCS install failed: %s", e)
|
||||
return web.json_response({"ok": False, "message": str(e) or "Install failed"}, status=500)
|
||||
|
||||
|
||||
class BCSUpdateView(HomeAssistantView):
|
||||
url = "/api/bcs/update"
|
||||
name = "api:bcs_update"
|
||||
requires_auth = True
|
||||
|
||||
def __init__(self, core: Any) -> None:
|
||||
self.core: BCSCore = core
|
||||
|
||||
async def post(self, request: web.Request) -> web.Response:
|
||||
repo_id = request.query.get("repo_id")
|
||||
version = request.query.get("version")
|
||||
if not repo_id:
|
||||
return web.json_response({"ok": False, "message": "Missing repo_id"}, status=400)
|
||||
|
||||
try:
|
||||
v = str(version).strip() if version is not None else None
|
||||
result = await self.core.update_repo(repo_id, version=v)
|
||||
return web.json_response(result, status=200)
|
||||
except Exception as e:
|
||||
_LOGGER.exception("BCS update failed: %s", e)
|
||||
return web.json_response({"ok": False, "message": str(e) or "Update failed"}, status=500)
|
||||
|
||||
|
||||
class BCSUninstallView(HomeAssistantView):
|
||||
url = "/api/bcs/uninstall"
|
||||
name = "api:bcs_uninstall"
|
||||
requires_auth = True
|
||||
|
||||
def __init__(self, core: Any) -> None:
|
||||
self.core: BCSCore = core
|
||||
|
||||
async def post(self, request: web.Request) -> web.Response:
|
||||
repo_id = request.query.get("repo_id")
|
||||
if not repo_id:
|
||||
return web.json_response({"ok": False, "message": "Missing repo_id"}, status=400)
|
||||
|
||||
try:
|
||||
result = await self.core.uninstall_repo(repo_id)
|
||||
return web.json_response(result, status=200)
|
||||
except Exception as e:
|
||||
_LOGGER.exception("BCS uninstall failed: %s", e)
|
||||
return web.json_response({"ok": False, "message": str(e) or "Uninstall failed"}, status=500)
|
||||
|
||||
|
||||
|
||||
|
||||
class BCSBackupsView(HomeAssistantView):
|
||||
url = "/api/bcs/backups"
|
||||
name = "api:bcs_backups"
|
||||
requires_auth = True
|
||||
|
||||
def __init__(self, core: Any) -> None:
|
||||
self.core: BCSCore = core
|
||||
|
||||
async def get(self, request: web.Request) -> web.Response:
|
||||
repo_id = request.query.get("repo_id")
|
||||
if not repo_id:
|
||||
return web.json_response({"ok": False, "message": "Missing repo_id"}, status=400)
|
||||
|
||||
try:
|
||||
backups = await self.core.list_repo_backups(repo_id)
|
||||
return web.json_response({"ok": True, "repo_id": repo_id, "backups": backups}, status=200)
|
||||
except Exception as e:
|
||||
_LOGGER.exception("BCS list backups failed: %s", e)
|
||||
return web.json_response({"ok": False, "message": str(e) or "List backups failed"}, status=500)
|
||||
|
||||
|
||||
class BCSRestoreView(HomeAssistantView):
|
||||
url = "/api/bcs/restore"
|
||||
name = "api:bcs_restore"
|
||||
requires_auth = True
|
||||
|
||||
def __init__(self, core: Any) -> None:
|
||||
self.core: BCSCore = core
|
||||
|
||||
async def post(self, request: web.Request) -> web.Response:
|
||||
repo_id = request.query.get("repo_id")
|
||||
backup_id = request.query.get("backup_id")
|
||||
if not repo_id:
|
||||
return web.json_response({"ok": False, "message": "Missing repo_id"}, status=400)
|
||||
if not backup_id:
|
||||
return web.json_response({"ok": False, "message": "Missing backup_id"}, status=400)
|
||||
|
||||
try:
|
||||
result = await self.core.restore_repo_backup(repo_id, backup_id)
|
||||
return web.json_response(result, status=200)
|
||||
except Exception as e:
|
||||
_LOGGER.exception("BCS restore failed: %s", e)
|
||||
return web.json_response({"ok": False, "message": str(e) or "Restore failed"}, status=500)
|
||||
|
||||
|
||||
class BCSRestartView(HomeAssistantView):
|
||||
url = "/api/bcs/restart"
|
||||
name = "api:bcs_restart"
|
||||
requires_auth = True
|
||||
|
||||
def __init__(self, core: Any) -> None:
|
||||
self.core: BCSCore = core
|
||||
|
||||
async def post(self, request: web.Request) -> web.Response:
|
||||
try:
|
||||
await self.core.request_restart()
|
||||
return web.json_response({"ok": True})
|
||||
except Exception as e:
|
||||
_LOGGER.exception("BCS restart failed: %s", e)
|
||||
return web.json_response({"ok": False, "message": str(e) or "Restart failed"}, status=500)
|
||||
|
||||
class BCSRepoDetailView(HomeAssistantView):
|
||||
url = "/api/bcs/repo"
|
||||
name = "api:bcs_repo"
|
||||
requires_auth = True
|
||||
|
||||
def __init__(self, core: Any) -> None:
|
||||
self.core: BCSCore = core
|
||||
|
||||
async def get(self, request: web.Request) -> web.Response:
|
||||
repo_id = (request.query.get("repo_id") or "").strip()
|
||||
if not repo_id:
|
||||
return web.json_response({"ok": False, "message": "Missing repo_id"}, status=400)
|
||||
|
||||
try:
|
||||
repo = await self.core.ensure_repo_details(repo_id)
|
||||
if not repo:
|
||||
return web.json_response({"ok": False, "message": "Repo not found"}, status=404)
|
||||
|
||||
inst = self.core.get_installed(repo_id) or {}
|
||||
installed = bool(inst)
|
||||
domains = inst.get("domains") or []
|
||||
if not isinstance(domains, list):
|
||||
domains = []
|
||||
|
||||
return web.json_response({
|
||||
"ok": True,
|
||||
"repo": {
|
||||
"id": repo.id,
|
||||
"name": repo.name,
|
||||
"url": repo.url,
|
||||
"source": repo.source,
|
||||
"owner": repo.owner,
|
||||
"provider": repo.provider,
|
||||
"repo_name": repo.provider_repo_name,
|
||||
"description": repo.provider_description or repo.meta_description,
|
||||
"default_branch": repo.default_branch,
|
||||
"latest_version": repo.latest_version,
|
||||
"latest_version_source": repo.latest_version_source,
|
||||
"category": repo.meta_category,
|
||||
"meta_author": repo.meta_author,
|
||||
"meta_maintainer": repo.meta_maintainer,
|
||||
"meta_source": repo.meta_source,
|
||||
"installed": installed,
|
||||
"installed_version": inst.get("installed_version"),
|
||||
"installed_manifest_version": inst.get("installed_manifest_version"),
|
||||
"installed_domains": domains,
|
||||
}
|
||||
}, status=200)
|
||||
except Exception as e:
|
||||
_LOGGER.exception("BCS repo details failed: %s", e)
|
||||
return web.json_response({"ok": False, "message": str(e) or "Repo details failed"}, status=500)
|
||||
Reference in New Issue
Block a user