10 Commits
0.4.0 ... 0.4.1

4 changed files with 255 additions and 244 deletions

View File

@@ -11,19 +11,47 @@ Sections:
---
## [0.4.1] - 2026-01-15
### Fixed
- Fixed GitLab README loading by using robust raw file endpoints.
- Added support for nested GitLab groups when resolving README paths.
- Added fallback handling for multiple README filenames (`README.md`, `README`, `README.rst`, etc.).
- Added branch fallback logic for README loading (`default`, `main`, `master`).
- Improved error resilience so README loading failures never break the store core.
- No behavior change for GitHub and Gitea providers.
## [0.4.0] - 2026-01-15
### Added
- Repository detail view (second page) in the Store UI.
- README rendering using Home Assistant's `ha-markdown` element.
- Floating action buttons (FAB):
- Open repository
- Reload README
- Install (coming soon)
- Update (coming soon)
- Search field and category filter on the repository list page.
- New authenticated API endpoint:
- `GET /api/bcs/readme?repo_id=<id>` returns README markdown (best-effort).
- Initial public release of the Bahmcloud Store integration.
- Sidebar panel with repository browser UI.
- Support for loading repositories from a central `store.yaml` index.
- Support for custom repositories added by the user.
- Provider abstraction for GitHub, GitLab and Gitea:
- Fetch repository information (name, description, default branch).
- Resolve latest version from:
- Releases
- Tags
- Fallback mechanisms.
- Repository metadata support via:
- `bcs.yaml`
- `hacs.yaml`
- `hacs.json`
- README loading and rendering pipeline:
- Fetch raw README files.
- Server-side Markdown rendering.
- Sanitized HTML output for the panel UI.
- Auto refresh mechanism for store index and repository metadata.
- API endpoints:
- List repositories
- Add custom repository
- Remove repository
Persisted via Home Assistant storage (`.storage/bcs_store`).
- Public static asset endpoint for panel JS (`/api/bahmcloud_store_static/...`) without auth (required for HA custom panels).
- Initial API namespace:
- `GET /api/bcs` list merged repositories (index + custom)
- `POST /api/bcs` add custom repository
- `DELETE /api/bcs/custom_repo` remove custom repository
### Changed
- Repository cards are now clickable to open the detail view.

View File

@@ -15,7 +15,7 @@ from homeassistant.util import yaml as ha_yaml
from .storage import BCSStorage, CustomRepo
from .views import StaticAssetsView, BCSApiView, BCSReadmeView
from .custom_repo_view import BCSCustomRepoView
from .providers import fetch_repo_info, detect_provider, RepoInfo
from .providers import fetch_repo_info, detect_provider, RepoInfo, fetch_readme_markdown
from .metadata import fetch_repo_metadata, RepoMetadata
_LOGGER = logging.getLogger(__name__)
@@ -46,7 +46,7 @@ class RepoItem:
default_branch: str | None = None
latest_version: str | None = None
latest_version_source: str | None = None # "release" | "tag" | None
latest_version_source: str | None = None # "release" | "tag" | "atom" | None
meta_source: str | None = None
meta_name: str | None = None
@@ -198,10 +198,14 @@ class BCSCore:
raise BCSError(f"Invalid store.yaml: {e}") from e
async def add_custom_repo(self, url: str, name: str | None) -> CustomRepo:
repo = await self.storage.add_custom_repo(url=url, name=name)
url = str(url or "").strip()
if not url:
raise BCSError("Missing url")
c = await self.storage.add_custom_repo(url, name)
await self.refresh()
self.signal_updated()
return repo
return c
async def remove_custom_repo(self, repo_id: str) -> None:
await self.storage.remove_custom_repo(repo_id)
@@ -214,7 +218,6 @@ class BCSCore:
def list_repos_public(self) -> list[dict[str, Any]]:
out: list[dict[str, Any]] = []
for r in self.repos.values():
resolved_description = r.meta_description or r.provider_description
out.append(
{
"id": r.id,
@@ -223,101 +226,27 @@ class BCSCore:
"source": r.source,
"owner": r.owner,
"provider": r.provider,
"meta_source": r.meta_source,
"meta_name": r.meta_name,
"meta_description": r.meta_description,
"meta_category": r.meta_category,
"meta_author": r.meta_author,
"meta_maintainer": r.meta_maintainer,
"provider_repo_name": r.provider_repo_name,
"provider_description": r.provider_description,
"description": resolved_description,
"category": r.meta_category,
"repo_name": r.provider_repo_name,
"description": r.provider_description or r.meta_description,
"default_branch": r.default_branch,
"latest_version": r.latest_version,
"latest_version_source": r.latest_version_source,
"category": r.meta_category,
"meta_author": r.meta_author,
"meta_maintainer": r.meta_maintainer,
"meta_source": r.meta_source,
}
)
return out
# ----------------------------
# README fetching
# ----------------------------
def _normalize_repo_name(self, name: str | None) -> str | None:
if not name:
return None
n = name.strip()
if n.endswith(".git"):
n = n[:-4]
return n or None
def _split_owner_repo(self, repo_url: str) -> tuple[str | None, str | None]:
u = urlparse(repo_url.rstrip("/"))
parts = [p for p in u.path.strip("/").split("/") if p]
if len(parts) < 2:
return None, None
owner = parts[0].strip() or None
repo = self._normalize_repo_name(parts[1])
return owner, repo
def _is_github(self, repo_url: str) -> bool:
return "github.com" in urlparse(repo_url).netloc.lower()
def _is_gitea(self, repo_url: str) -> bool:
host = urlparse(repo_url).netloc.lower()
return host and "github.com" not in host and "gitlab.com" not in host
async def _fetch_text(self, url: str) -> str | None:
session = async_get_clientsession(self.hass)
try:
async with session.get(url, timeout=20) as resp:
if resp.status != 200:
return None
return await resp.text()
except Exception:
return None
async def fetch_readme_markdown(self, repo_id: str) -> str | None:
repo = self.get_repo(repo_id)
if not repo:
return None
owner, name = self._split_owner_repo(repo.url)
if not owner or not name:
return None
branch = repo.default_branch or "main"
filenames = ["README.md", "readme.md", "README.MD"]
candidates: list[str] = []
if self._is_github(repo.url):
# raw github content
base = f"https://raw.githubusercontent.com/{owner}/{name}/{branch}"
candidates.extend([f"{base}/{fn}" for fn in filenames])
elif self._is_gitea(repo.url):
u = urlparse(repo.url.rstrip("/"))
root = f"{u.scheme}://{u.netloc}/{owner}/{name}"
# gitea raw endpoints (both common forms)
bases = [
f"{root}/raw/branch/{branch}",
f"{root}/raw/{branch}",
]
for b in bases:
candidates.extend([f"{b}/{fn}" for fn in filenames])
else:
return None
for url in candidates:
txt = await self._fetch_text(url)
if txt:
return txt
return None
return await fetch_readme_markdown(
self.hass,
repo.url,
provider=repo.provider,
default_branch=repo.default_branch,
)

View File

@@ -1,9 +1,9 @@
{
"domain": "bahmcloud_store",
"name": "Bahmcloud Store",
"version": "0.4.0",
"version": "0.4.1",
"documentation": "https://git.bahmcloud.de/bahmcloud/bahmcloud_store",
"requirements": [],
"codeowners": [],
"codeowners": ["@bahmcloud"],
"iot_class": "local_polling"
}
}

View File

@@ -51,12 +51,7 @@ def detect_provider(repo_url: str) -> str:
return "github"
if "gitlab" in host:
return "gitlab"
owner, repo = _split_owner_repo(repo_url)
if owner and repo:
return "gitea"
return "generic"
return "gitea"
async def _safe_json(session, url: str, *, headers: dict | None = None, timeout: int = 20):
@@ -82,130 +77,83 @@ async def _safe_text(session, url: str, *, headers: dict | None = None, timeout:
def _extract_tag_from_github_url(url: str) -> str | None:
m = re.search(r"/releases/tag/([^/?#]+)", url)
if m:
return m.group(1)
m = re.search(r"/tag/([^/?#]+)", url)
if m:
return m.group(1)
return None
def _strip_html(s: str) -> str:
# minimal HTML entity cleanup for meta descriptions
out = (
s.replace("&amp;", "&")
.replace("&quot;", '"')
.replace("&#39;", "'")
.replace("&lt;", "<")
.replace("&gt;", ">")
)
return re.sub(r"\s+", " ", out).strip()
m = re.search(r"/releases/tag/([^/?#]+)", url or "")
if not m:
return None
return m.group(1).strip() or None
def _extract_meta(html: str, *, prop: str | None = None, name: str | None = None) -> str | None:
# Extract <meta property="og:description" content="...">
# or <meta name="description" content="...">
if not html:
return None
if prop:
# property="..." content="..."
m = re.search(
r'<meta[^>]+property=["\']' + re.escape(prop) + r'["\'][^>]+content=["\']([^"\']+)["\']',
html,
flags=re.IGNORECASE,
)
m = re.search(rf'<meta\s+property="{re.escape(prop)}"\s+content="([^"]+)"', html)
if m:
return _strip_html(m.group(1))
m = re.search(
r'<meta[^>]+content=["\']([^"\']+)["\'][^>]+property=["\']' + re.escape(prop) + r'["\']',
html,
flags=re.IGNORECASE,
)
if m:
return _strip_html(m.group(1))
return m.group(1).strip()
if name:
m = re.search(
r'<meta[^>]+name=["\']' + re.escape(name) + r'["\'][^>]+content=["\']([^"\']+)["\']',
html,
flags=re.IGNORECASE,
)
m = re.search(rf'<meta\s+name="{re.escape(name)}"\s+content="([^"]+)"', html)
if m:
return _strip_html(m.group(1))
m = re.search(
r'<meta[^>]+content=["\']([^"\']+)["\'][^>]+name=["\']' + re.escape(name) + r'["\']',
html,
flags=re.IGNORECASE,
)
if m:
return _strip_html(m.group(1))
return m.group(1).strip()
return None
async def _github_description_html(hass: HomeAssistant, owner: str, repo: str) -> str | None:
"""
GitHub API may be rate-limited; fetch public HTML and read meta description.
"""
session = async_get_clientsession(hass)
headers = {
"User-Agent": UA,
"Accept": "text/html,application/xhtml+xml",
}
html, status = await _safe_text(session, f"https://github.com/{owner}/{repo}", headers=headers)
if not html or status != 200:
url = f"https://github.com/{owner}/{repo}"
html, status = await _safe_text(session, url, headers={"User-Agent": UA})
if status != 200 or not html:
return None
desc = _extract_meta(html, prop="og:description")
if desc:
return desc
desc = _extract_meta(html, name="description")
if desc:
return desc
return None
return _extract_meta(html, name="description")
async def _github_latest_version_atom(hass: HomeAssistant, owner: str, repo: str) -> tuple[str | None, str | None]:
session = async_get_clientsession(hass)
headers = {"User-Agent": UA, "Accept": "application/atom+xml,text/xml;q=0.9,*/*;q=0.8"}
xml_text, _ = await _safe_text(session, f"https://github.com/{owner}/{repo}/releases.atom", headers=headers)
if not xml_text:
url = f"https://github.com/{owner}/{repo}/releases.atom"
atom, status = await _safe_text(session, url, headers={"User-Agent": UA})
if status != 200 or not atom:
return None, None
try:
root = ET.fromstring(xml_text)
except Exception:
return None, None
for entry in root.findall(".//{*}entry"):
for link in entry.findall(".//{*}link"):
href = link.attrib.get("href")
if not href:
continue
tag = _extract_tag_from_github_url(href)
root = ET.fromstring(atom)
ns = {"a": "http://www.w3.org/2005/Atom"}
entry = root.find("a:entry", ns)
if entry is None:
return None, None
link = entry.find("a:link", ns)
if link is not None and link.attrib.get("href"):
tag = _extract_tag_from_github_url(link.attrib["href"])
if tag:
return tag, "atom"
title = entry.find("a:title", ns)
if title is not None and title.text:
t = title.text.strip()
if t:
return t, "atom"
except Exception:
return None, None
return None, None
async def _github_latest_version_redirect(hass: HomeAssistant, owner: str, repo: str) -> tuple[str | None, str | None]:
session = async_get_clientsession(hass)
headers = {"User-Agent": UA}
url = f"https://github.com/{owner}/{repo}/releases/latest"
try:
async with session.head(url, allow_redirects=False, timeout=15, headers=headers) as resp:
if resp.status in (301, 302, 303, 307, 308):
loc = resp.headers.get("Location")
if loc:
tag = _extract_tag_from_github_url(loc)
if tag:
return tag, "release"
async with session.get(url, timeout=20, headers={"User-Agent": UA}, allow_redirects=True) as resp:
if resp.status != 200:
return None, None
final = str(resp.url)
tag = _extract_tag_from_github_url(final)
if tag:
return tag, "release"
except Exception:
pass
return None, None
return None, None
@@ -213,31 +161,30 @@ async def _github_latest_version_api(hass: HomeAssistant, owner: str, repo: str)
session = async_get_clientsession(hass)
headers = {"Accept": "application/vnd.github+json", "User-Agent": UA}
data, _ = await _safe_json(session, f"https://api.github.com/repos/{owner}/{repo}/releases/latest", headers=headers)
if isinstance(data, dict):
tag = data.get("tag_name") or data.get("name")
if isinstance(tag, str) and tag.strip():
return tag.strip(), "release"
data, status = await _safe_json(session, f"https://api.github.com/repos/{owner}/{repo}/releases/latest", headers=headers)
if isinstance(data, dict) and data.get("tag_name"):
return str(data["tag_name"]), "release"
data, _ = await _safe_json(session, f"https://api.github.com/repos/{owner}/{repo}/tags?per_page=1", headers=headers)
if isinstance(data, list) and data:
tag = data[0].get("name")
if isinstance(tag, str) and tag.strip():
return tag.strip(), "tag"
if status == 404:
data, _ = await _safe_json(session, f"https://api.github.com/repos/{owner}/{repo}/tags?per_page=1", headers=headers)
if isinstance(data, list) and data:
t = data[0]
if isinstance(t, dict) and t.get("name"):
return str(t["name"]), "tag"
return None, None
async def _github_latest_version(hass: HomeAssistant, owner: str, repo: str) -> tuple[str | None, str | None]:
tag, src = await _github_latest_version_atom(hass, owner, repo)
if tag:
return tag, src
tag, src = await _github_latest_version_redirect(hass, owner, repo)
if tag:
return tag, src
return await _github_latest_version_api(hass, owner, repo)
tag, src = await _github_latest_version_api(hass, owner, repo)
if tag:
return tag, src
return await _github_latest_version_atom(hass, owner, repo)
async def _gitea_latest_version(hass: HomeAssistant, base: str, owner: str, repo: str) -> tuple[str | None, str | None]:
@@ -245,43 +192,51 @@ async def _gitea_latest_version(hass: HomeAssistant, base: str, owner: str, repo
data, _ = await _safe_json(session, f"{base}/api/v1/repos/{owner}/{repo}/releases?limit=1")
if isinstance(data, list) and data:
tag = data[0].get("tag_name") or data[0].get("name")
if isinstance(tag, str) and tag.strip():
return tag.strip(), "release"
r = data[0]
if isinstance(r, dict) and r.get("tag_name"):
return str(r["tag_name"]), "release"
data, _ = await _safe_json(session, f"{base}/api/v1/repos/{owner}/{repo}/tags?limit=1")
if isinstance(data, list) and data:
tag = data[0].get("name")
if isinstance(tag, str) and tag.strip():
return tag.strip(), "tag"
t = data[0]
if isinstance(t, dict) and t.get("name"):
return str(t["name"]), "tag"
return None, None
async def _gitlab_latest_version(hass: HomeAssistant, base: str, owner: str, repo: str) -> tuple[str | None, str | None]:
async def _gitlab_latest_version(
hass: HomeAssistant, base: str, owner: str, repo: str
) -> tuple[str | None, str | None]:
session = async_get_clientsession(hass)
headers = {"User-Agent": UA}
project = quote_plus(f"{owner}/{repo}")
data, _ = await _safe_json(
session,
f"{base}/api/v4/projects/{project}/releases?per_page=1&order_by=released_at&sort=desc",
headers=headers,
)
data, _ = await _safe_json(session, f"{base}/api/v4/projects/{project}/releases?per_page=1", headers=headers)
if isinstance(data, list) and data:
tag = data[0].get("tag_name") or data[0].get("name")
if isinstance(tag, str) and tag.strip():
return tag.strip(), "release"
r = data[0]
if isinstance(r, dict) and r.get("tag_name"):
return str(r["tag_name"]), "release"
data, _ = await _safe_json(
session,
f"{base}/api/v4/projects/{project}/repository/tags?per_page=1&order_by=updated&sort=desc",
headers=headers,
)
data, _ = await _safe_json(session, f"{base}/api/v4/projects/{project}/repository/tags?per_page=1", headers=headers)
if isinstance(data, list) and data:
tag = data[0].get("name")
if isinstance(tag, str) and tag.strip():
return tag.strip(), "tag"
t = data[0]
if isinstance(t, dict) and t.get("name"):
return str(t["name"]), "tag"
atom, status = await _safe_text(session, f"{base}/{owner}/{repo}/-/tags?format=atom", headers=headers)
if status == 200 and atom:
try:
root = ET.fromstring(atom)
ns = {"a": "http://www.w3.org/2005/Atom"}
entry = root.find("a:entry", ns)
if entry is not None:
title = entry.find("a:title", ns)
if title is not None and title.text:
return title.text.strip(), "atom"
except Exception:
pass
return None, None
@@ -307,7 +262,6 @@ async def fetch_repo_info(hass: HomeAssistant, repo_url: str) -> RepoInfo:
try:
if provider == "github":
# Try API repo details (may be rate-limited)
headers = {"Accept": "application/vnd.github+json", "User-Agent": UA}
data, status = await _safe_json(session, f"https://api.github.com/repos/{owner}/{repo}", headers=headers)
@@ -318,12 +272,10 @@ async def fetch_repo_info(hass: HomeAssistant, repo_url: str) -> RepoInfo:
if isinstance(data.get("owner"), dict) and data["owner"].get("login"):
info.owner = data["owner"]["login"]
else:
# If API blocked, still set reasonable defaults
if status == 403:
_LOGGER.debug("GitHub API blocked/rate-limited for repo info %s/%s", owner, repo)
info.default_branch = "main"
# If description missing, fetch from GitHub HTML
if not info.description:
desc = await _github_description_html(hass, owner, repo)
if desc:
@@ -371,8 +323,110 @@ async def fetch_repo_info(hass: HomeAssistant, repo_url: str) -> RepoInfo:
info.latest_version_source = src
return info
return info
except Exception as e:
_LOGGER.debug("Provider fetch failed for %s: %s", repo_url, e)
return info
_LOGGER.debug("fetch_repo_info failed for %s: %s", repo_url, e)
return info
async def fetch_readme_markdown(
hass: HomeAssistant,
repo_url: str,
*,
provider: str | None = None,
default_branch: str | None = None,
) -> str | None:
"""Fetch README Markdown for public repositories (GitHub/GitLab/Gitea).
Defensive behavior:
- tries multiple common README filenames
- tries multiple branches (default, main, master)
- uses public raw endpoints (no tokens required for public repositories)
"""
repo_url = (repo_url or "").strip()
if not repo_url:
return None
prov = (provider or "").strip().lower() if provider else ""
if not prov:
prov = detect_provider(repo_url)
branch_candidates: list[str] = []
if default_branch and str(default_branch).strip():
branch_candidates.append(str(default_branch).strip())
for b in ("main", "master"):
if b not in branch_candidates:
branch_candidates.append(b)
filenames = ["README.md", "readme.md", "README.MD", "README.rst", "README"]
session = async_get_clientsession(hass)
headers = {"User-Agent": UA}
def _normalize_gitlab_path(path: str) -> str | None:
p = (path or "").strip().strip("/")
if not p:
return None
parts = [x for x in p.split("/") if x]
if len(parts) < 2:
return None
if parts[-1].endswith(".git"):
parts[-1] = parts[-1][:-4]
return "/".join(parts)
candidates: list[str] = []
if prov == "github":
owner, repo = _split_owner_repo(repo_url)
if not owner or not repo:
return None
for branch in branch_candidates:
base = f"https://raw.githubusercontent.com/{owner}/{repo}/{branch}"
for fn in filenames:
candidates.append(f"{base}/{fn}")
elif prov == "gitea":
owner, repo = _split_owner_repo(repo_url)
if not owner or not repo:
return None
u = urlparse(repo_url.rstrip("/"))
root = f"{u.scheme}://{u.netloc}/{owner}/{repo}"
for branch in branch_candidates:
bases = [
f"{root}/raw/branch/{branch}",
f"{root}/raw/{branch}",
]
for b in bases:
for fn in filenames:
candidates.append(f"{b}/{fn}")
elif prov == "gitlab":
u = urlparse(repo_url.rstrip("/"))
path_repo = _normalize_gitlab_path(u.path)
if not path_repo:
return None
root = f"{u.scheme}://{u.netloc}/{path_repo}"
for branch in branch_candidates:
bases = [
f"{root}/-/raw/{branch}",
f"{root}/raw/{branch}",
]
for b in bases:
for fn in filenames:
candidates.append(f"{b}/{fn}")
else:
return None
for url in candidates:
try:
async with session.get(url, timeout=20, headers=headers) as resp:
if resp.status != 200:
continue
txt = await resp.text()
if txt and txt.strip():
return txt
except Exception:
continue
return None