custom_components/bahmcloud_store/metadata.py aktualisiert

This commit is contained in:
2026-01-15 11:18:34 +00:00
parent 5cf8e6d40f
commit 08a59ec56e

View File

@@ -15,7 +15,7 @@ _LOGGER = logging.getLogger(__name__)
@dataclass
class RepoMetadata:
source: str | None = None # "bcs.yaml" | "hacs.yaml" | "hacs.json"
source: str | None = None # "bcs.yaml" | "hacs.yaml" | "hacs.json" | None
name: str | None = None
description: str | None = None
category: str | None = None
@@ -23,63 +23,13 @@ class RepoMetadata:
maintainer: str | None = None
def _clean_str(v: Any) -> str | None:
if v is None:
def _normalize_repo_name(name: str | None) -> str | None:
if not name:
return None
if isinstance(v, (int, float)):
v = str(v)
if not isinstance(v, str):
return None
s = v.strip()
return s or None
def _extract_common_fields(data: dict[str, Any]) -> RepoMetadata:
"""
Best-effort extraction across possible schemas.
We keep this forgiving because third-party repos vary widely.
"""
md = RepoMetadata()
# Common / preferred keys for BCS
md.name = _clean_str(data.get("name"))
md.description = _clean_str(data.get("description"))
md.category = _clean_str(data.get("category"))
md.author = _clean_str(data.get("author"))
md.maintainer = _clean_str(data.get("maintainer"))
# HACS compatibility fields
# Some repos use 'render_readme', 'content_in_root', etc. ignored for now.
# Some use "authors" list or "maintainers" list:
if not md.author:
a = data.get("authors") or data.get("author")
if isinstance(a, list) and a:
md.author = _clean_str(a[0])
elif isinstance(a, str):
md.author = _clean_str(a)
if not md.maintainer:
m = data.get("maintainers") or data.get("maintainer")
if isinstance(m, list) and m:
md.maintainer = _clean_str(m[0])
elif isinstance(m, str):
md.maintainer = _clean_str(m)
# Some HACS style manifests use "documentation" or "info" as description-like
if not md.description:
md.description = _clean_str(data.get("info")) or _clean_str(data.get("documentation"))
return md
def _is_github(repo_url: str) -> bool:
return "github.com" in urlparse(repo_url).netloc.lower()
def _is_gitea(repo_url: str) -> bool:
# We treat self-hosted owner/repo as gitea in this project.
host = urlparse(repo_url).netloc.lower()
return host and "github.com" not in host and "gitlab.com" not in host
n = name.strip()
if n.endswith(".git"):
n = n[:-4]
return n or None
def _split_owner_repo(repo_url: str) -> tuple[str | None, str | None]:
@@ -87,13 +37,24 @@ def _split_owner_repo(repo_url: str) -> tuple[str | None, str | None]:
parts = [p for p in u.path.strip("/").split("/") if p]
if len(parts) < 2:
return None, None
return parts[0], parts[1]
owner = parts[0].strip() or None
repo = _normalize_repo_name(parts[1])
return owner, repo
def _is_github(repo_url: str) -> bool:
return "github.com" in urlparse(repo_url).netloc.lower()
def _is_gitea(repo_url: str) -> bool:
host = urlparse(repo_url).netloc.lower()
return host and ("github.com" not in host) and ("gitlab.com" not in host)
async def _fetch_text(hass: HomeAssistant, url: str) -> str | None:
session = async_get_clientsession(hass)
try:
async with session.get(url, timeout=15) as resp:
async with session.get(url, timeout=20) as resp:
if resp.status != 200:
return None
return await resp.text()
@@ -101,64 +62,100 @@ async def _fetch_text(hass: HomeAssistant, url: str) -> str | None:
return None
async def fetch_repo_metadata(hass: HomeAssistant, repo_url: str, default_branch: str | None) -> RepoMetadata:
"""
Best-effort metadata resolution from repo root:
1) bcs.yaml
2) hacs.yaml
3) hacs.json
def _parse_meta_yaml(raw: str, source: str) -> RepoMetadata:
try:
data = ha_yaml.parse_yaml(raw)
if not isinstance(data, dict):
return RepoMetadata(source=source)
Works for:
- GitHub: raw.githubusercontent.com
- Gitea: /raw/branch/<branch>/
"""
return RepoMetadata(
source=source,
name=data.get("name"),
description=data.get("description"),
category=data.get("category"),
author=data.get("author"),
maintainer=data.get("maintainer"),
)
except Exception:
return RepoMetadata(source=source)
def _parse_meta_hacs_json(raw: str) -> RepoMetadata:
try:
data = json.loads(raw)
if not isinstance(data, dict):
return RepoMetadata(source="hacs.json")
# HACS metadata is not standardized for description/category across all repos,
# but we support common fields and keep them optional.
name = data.get("name")
description = data.get("description")
author = data.get("author")
maintainer = data.get("maintainer")
# Optional: some repos use "category" or "type" for store grouping
category = data.get("category") or data.get("type")
return RepoMetadata(
source="hacs.json",
name=name if isinstance(name, str) else None,
description=description if isinstance(description, str) else None,
category=category if isinstance(category, str) else None,
author=author if isinstance(author, str) else None,
maintainer=maintainer if isinstance(maintainer, str) else None,
)
except Exception:
return RepoMetadata(source="hacs.json")
async def fetch_repo_metadata(hass: HomeAssistant, repo_url: str, default_branch: str | None) -> RepoMetadata:
owner, repo = _split_owner_repo(repo_url)
if not owner or not repo:
return RepoMetadata()
branch = default_branch or "main"
# Priority:
# 1) bcs.yaml
# 2) hacs.yaml
# 3) hacs.json
filenames = ["bcs.yaml", "hacs.yaml", "hacs.json"]
candidates: list[tuple[str, str]] = []
if _is_github(repo_url):
base = f"https://raw.githubusercontent.com/{owner}/{repo}/{branch}"
candidates = [
("bcs.yaml", f"{base}/bcs.yaml"),
("hacs.yaml", f"{base}/hacs.yaml"),
("hacs.json", f"{base}/hacs.json"),
]
for fn in filenames:
candidates.append((fn, f"{base}/{fn}"))
elif _is_gitea(repo_url):
u = urlparse(repo_url.rstrip("/"))
base = f"{u.scheme}://{u.netloc}/{owner}/{repo}/raw/branch/{branch}"
candidates = [
("bcs.yaml", f"{base}/bcs.yaml"),
("hacs.yaml", f"{base}/hacs.yaml"),
("hacs.json", f"{base}/hacs.json"),
root = f"{u.scheme}://{u.netloc}/{owner}/{repo}"
bases = [
f"{root}/raw/branch/{branch}",
f"{root}/raw/{branch}",
]
for fn in filenames:
for b in bases:
candidates.append((fn, f"{b}/{fn}"))
else:
# Unsupported provider for metadata raw fetch in 0.3.2
return RepoMetadata()
for source, url in candidates:
text = await _fetch_text(hass, url)
if not text:
for fn, url in candidates:
raw = await _fetch_text(hass, url)
if not raw:
continue
try:
if source.endswith(".json"):
data = json.loads(text)
if not isinstance(data, dict):
continue
else:
data = ha_yaml.parse_yaml(text)
if not isinstance(data, dict):
continue
md = _extract_common_fields(data)
md.source = source
return md
except Exception as e:
_LOGGER.debug("Failed parsing %s for %s: %s", source, repo_url, e)
if fn.endswith(".json"):
meta = _parse_meta_hacs_json(raw)
if meta.source:
return meta
continue
meta = _parse_meta_yaml(raw, fn)
if meta.source:
return meta
return RepoMetadata()