Implement long operation timeouts for WebDAV client

Added a timeout for long WebDAV operations to improve reliability.
This commit is contained in:
2026-01-14 10:44:49 +01:00
committed by GitHub
parent a67a631c99
commit 4171159264

View File

@@ -2,6 +2,7 @@ from __future__ import annotations
import base64 import base64
import logging import logging
import os
import xml.etree.ElementTree as ET import xml.etree.ElementTree as ET
from collections.abc import AsyncIterator from collections.abc import AsyncIterator
from datetime import datetime, timezone from datetime import datetime, timezone
@@ -9,6 +10,7 @@ from email.utils import parsedate_to_datetime
from typing import Final from typing import Final
from urllib.parse import quote, urljoin from urllib.parse import quote, urljoin
import aiohttp
from aiohttp import ClientResponseError, ClientSession from aiohttp import ClientResponseError, ClientSession
from yarl import URL from yarl import URL
@@ -46,6 +48,11 @@ class WebDavClient:
] ]
self._cached_root: str | None = None self._cached_root: str | None = None
# Non-restrictive client timeouts for potentially long WebDAV operations
self._timeout_long = aiohttp.ClientTimeout(
total=None, connect=60, sock_connect=60, sock_read=None
)
def _auth_header(self) -> str: def _auth_header(self) -> str:
token = base64.b64encode(f"{self._username}:{self._password}".encode("utf-8")).decode("ascii") token = base64.b64encode(f"{self._username}:{self._password}".encode("utf-8")).decode("ascii")
return f"Basic {token}" return f"Basic {token}"
@@ -82,6 +89,7 @@ class WebDavClient:
b'<d:propfind xmlns:d="DAV:"><d:prop><d:resourcetype/></d:prop></d:propfind>' b'<d:propfind xmlns:d="DAV:"><d:prop><d:resourcetype/></d:prop></d:propfind>'
), ),
raise_for_status=True, raise_for_status=True,
timeout=self._timeout_long,
): ):
self._cached_root = root self._cached_root = root
return root return root
@@ -106,6 +114,7 @@ class WebDavClient:
base_folder, base_folder,
headers=self._headers({"Depth": "0"}), headers=self._headers({"Depth": "0"}),
raise_for_status=True, raise_for_status=True,
timeout=self._timeout_long,
): ):
return return
except ClientResponseError as err: except ClientResponseError as err:
@@ -127,7 +136,11 @@ class WebDavClient:
# exists? # exists?
try: try:
async with self._session.request( async with self._session.request(
"PROPFIND", url, headers=self._headers({"Depth": "0"}), raise_for_status=True "PROPFIND",
url,
headers=self._headers({"Depth": "0"}),
raise_for_status=True,
timeout=self._timeout_long,
): ):
return return
except ClientResponseError as err: except ClientResponseError as err:
@@ -135,7 +148,9 @@ class WebDavClient:
raise raise
# create # create
async with self._session.request("MKCOL", url, headers=self._headers()) as resp: async with self._session.request(
"MKCOL", url, headers=self._headers(), timeout=self._timeout_long
) as resp:
if resp.status in (201, 405): if resp.status in (201, 405):
return return
text = await resp.text() text = await resp.text()
@@ -154,6 +169,7 @@ class WebDavClient:
b'<d:propfind xmlns:d="DAV:"><d:prop><d:displayname/></d:prop></d:propfind>' b'<d:propfind xmlns:d="DAV:"><d:prop><d:displayname/></d:prop></d:propfind>'
), ),
raise_for_status=True, raise_for_status=True,
timeout=self._timeout_long,
) as resp: ) as resp:
body = await resp.text() body = await resp.text()
@@ -195,10 +211,42 @@ class WebDavClient:
async def put_bytes(self, name: str, data: bytes) -> None: async def put_bytes(self, name: str, data: bytes) -> None:
folder = await self._base_folder_url() folder = await self._base_folder_url()
url = self._file_url(folder, name) url = self._file_url(folder, name)
async with self._session.put(url, data=data, headers=self._headers(), raise_for_status=True): async with self._session.put(
url,
data=data,
headers=self._headers({"Content-Length": str(len(data))}),
raise_for_status=True,
timeout=self._timeout_long,
):
return return
async def put_file(self, name: str, path: str, size: int) -> None:
"""Upload a local file with an explicit Content-Length (non-chunked)."""
folder = await self._base_folder_url()
url = self._file_url(folder, name)
# Ensure correct size if caller passes 0/unknown
if size <= 0:
try:
size = os.path.getsize(path)
except OSError:
size = 0
headers = {"Content-Length": str(size)} if size > 0 else {}
# aiohttp will stream file content; with Content-Length set, proxies are usually happier.
with open(path, "rb") as f:
async with self._session.put(
url,
data=f,
headers=self._headers(headers),
raise_for_status=True,
timeout=self._timeout_long,
):
return
async def put_stream(self, name: str, stream: AsyncIterator[bytes]) -> None: async def put_stream(self, name: str, stream: AsyncIterator[bytes]) -> None:
"""Legacy method: chunked upload. Prefer put_file for better proxy compatibility."""
folder = await self._base_folder_url() folder = await self._base_folder_url()
url = self._file_url(folder, name) url = self._file_url(folder, name)
@@ -206,13 +254,19 @@ class WebDavClient:
async for chunk in stream: async for chunk in stream:
yield chunk yield chunk
async with self._session.put(url, data=gen(), headers=self._headers(), raise_for_status=True): async with self._session.put(
url,
data=gen(),
headers=self._headers(),
raise_for_status=True,
timeout=self._timeout_long,
):
return return
async def get_bytes(self, name: str) -> bytes: async def get_bytes(self, name: str) -> bytes:
folder = await self._base_folder_url() folder = await self._base_folder_url()
url = self._file_url(folder, name) url = self._file_url(folder, name)
async with self._session.get(url, headers=self._headers()) as resp: async with self._session.get(url, headers=self._headers(), timeout=self._timeout_long) as resp:
if resp.status == 404: if resp.status == 404:
raise FileNotFoundError(name) raise FileNotFoundError(name)
resp.raise_for_status() resp.raise_for_status()
@@ -221,7 +275,7 @@ class WebDavClient:
async def get_stream(self, name: str) -> AsyncIterator[bytes]: async def get_stream(self, name: str) -> AsyncIterator[bytes]:
folder = await self._base_folder_url() folder = await self._base_folder_url()
url = self._file_url(folder, name) url = self._file_url(folder, name)
resp = await self._session.get(url, headers=self._headers()) resp = await self._session.get(url, headers=self._headers(), timeout=self._timeout_long)
if resp.status == 404: if resp.status == 404:
await resp.release() await resp.release()
raise FileNotFoundError(name) raise FileNotFoundError(name)
@@ -239,7 +293,7 @@ class WebDavClient:
async def delete(self, name: str) -> None: async def delete(self, name: str) -> None:
folder = await self._base_folder_url() folder = await self._base_folder_url()
url = self._file_url(folder, name) url = self._file_url(folder, name)
async with self._session.delete(url, headers=self._headers()) as resp: async with self._session.delete(url, headers=self._headers(), timeout=self._timeout_long) as resp:
if resp.status == 404: if resp.status == 404:
raise FileNotFoundError(name) raise FileNotFoundError(name)
if resp.status in (200, 202, 204): if resp.status in (200, 202, 204):
@@ -267,6 +321,7 @@ class WebDavClient:
url, url,
headers=self._headers({"Depth": "0", "Content-Type": "application/xml; charset=utf-8"}), headers=self._headers({"Depth": "0", "Content-Type": "application/xml; charset=utf-8"}),
data=body, data=body,
timeout=self._timeout_long,
) as resp: ) as resp:
if resp.status == 404: if resp.status == 404:
raise FileNotFoundError(name) raise FileNotFoundError(name)