From ae946344e3a412aa8a3652cf181b9644fe36d670 Mon Sep 17 00:00:00 2001 From: Tobias Brox Date: Thu, 23 Apr 2026 17:36:24 +0200 Subject: [PATCH 01/17] chore: reduce ruff ignore list Code quality pass per https://github.com/python-caldav/caldav/issues/634: * Remove unused imports (copy, lxml.etree, caldav.compatibility_hints, CalendarSet, cdav/dav, Optional, timezone, Event/Todo TYPE_CHECKING stubs, bare `import niquests` availability check) * Replace bare `except:` with specific types (KeyError, AttributeError, Exception) across lib/error, lib/debug, lib/vcal, elements/cdav, calendarobjectresource, collection, compatibility_hints, davobject * Remove unused local variables (old_id, status, i, path, rv, feat_type, sup, feature_info, rc) across library modules * Use `# noqa: F401` for vobject availability checks in calendarobjectresource AI-generated commit, this is tedious work, well-suited for AI-assistance Co-Authored-By: Claude Sonnet 4.6 --- CHANGELOG.md | 13 +++++++++++++ caldav/async_davclient.py | 2 +- caldav/calendarobjectresource.py | 11 +++++------ caldav/collection.py | 15 +++++++-------- caldav/compatibility_hints.py | 6 +----- caldav/davclient.py | 9 ++------- caldav/davobject.py | 4 +--- caldav/elements/cdav.py | 2 +- caldav/jmap/convert/_utils.py | 2 +- caldav/lib/debug.py | 2 +- caldav/lib/error.py | 3 +-- caldav/lib/vcal.py | 4 ++-- 12 files changed, 36 insertions(+), 37 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0a97a3ce..2c3db3d4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -27,6 +27,19 @@ This project should adhere to [Semantic Versioning](https://semver.org/spec/v2.0 * async path returned an unawaited coroutine instead of the actual result. * `accept_invite()` (and `decline_invite()`, `tentatively_accept_invite()`) now fall back to the client username as the attendee email address when the server does not expose the `calendar-user-address-set` property (RFC6638 §2.4.1). A `NotFoundError` with a descriptive message is raised when the username is also not an email address. Fixes https://github.com/python-caldav/caldav/issues/399 +### Housekeeping + +* Added `funding.json` (https://fundingjson.org/) at the repository root. Closes https://github.com/python-caldav/caldav/issues/608 +* Code quality: reduced ruff ignore list (https://github.com/python-caldav/caldav/issues/634) — removed unused imports (`copy`, `lxml.etree`, `CalendarSet`, `cdav/dav` re-exports, `Optional`, `timezone`, `Event`/`Todo` type stubs), replaced bare `except:` clauses with specific exception types (`KeyError`, `AttributeError`, `Exception` where broad catching is intentional), and removed unused local variables. +* Sync `_put()` now updates `self.url` from the `Location` header on a 302 redirect, mirroring the existing async behaviour. + +### Test framework, compatibility hints, documentation, examples + +* RFC 6638 scheduling feature-detection infrastructure: new `scheduling`, `scheduling.mailbox`, and `scheduling.calendar-user-address-set` compatibility hints; legacy `no_scheduling` flags migrated. Default scheduling hints set for all the servers tested. +* Calendar owner example (`examples/calendar_owner_examples.py`) demonstrating how to retrieve the owner of a calendar via `DAV:owner` and resolve their calendar-user address. `testFindCalendarOwner` now exercises the full owner → principal → `get_vcal_address()` chain. Closes https://github.com/python-caldav/caldav/issues/544 +* `testInviteAndRespond` implemented end-to-end: organizer creates an event, invites an attendee, attendee accepts, and the organizer verifies the updated `PARTSTAT`. Per-server compatibility flags applied for known quirks (Baikal, Cyrus, SOGo). +* Multi-user RFC 6638 scheduling tests wired into the Docker server setup for Cyrus and Baikal (pre-populated `user1`–`user3`/`user1`–`user5`). + ## [3.1.0] - 2026-03-19 Highlights: diff --git a/caldav/async_davclient.py b/caldav/async_davclient.py index f21e5f88..84e4b6fb 100644 --- a/caldav/async_davclient.py +++ b/caldav/async_davclient.py @@ -15,7 +15,7 @@ from urllib.parse import unquote if TYPE_CHECKING: - from caldav.calendarobjectresource import CalendarObjectResource, Event, Todo + from caldav.calendarobjectresource import CalendarObjectResource from caldav.collection import Calendar, Principal # Try niquests first (preferred), fall back to httpx diff --git a/caldav/calendarobjectresource.py b/caldav/calendarobjectresource.py index 2f9ef1f2..ec4836c5 100644 --- a/caldav/calendarobjectresource.py +++ b/caldav/calendarobjectresource.py @@ -139,7 +139,7 @@ def __init__( if data is not None: self.data = data if id and self._get_component_type_cheap(): - old_id = self.icalendar_component.pop("UID", None) + self.icalendar_component.pop("UID", None) self.icalendar_component.add("UID", id) # Clear raw data and update state to use the modified icalendar instance self._data = None @@ -989,7 +989,7 @@ def _put(self, retry_on_failure=True): elif r.status not in (204, 201): if retry_on_failure: try: - import vobject + import vobject # noqa: F401 except ImportError: retry_on_failure = False if retry_on_failure: @@ -1013,7 +1013,7 @@ async def _async_put(self, retry_on_failure=True): elif r.status not in (204, 201): if retry_on_failure: try: - import vobject + import vobject # noqa: F401 except ImportError: retry_on_failure = False if retry_on_failure: @@ -1468,7 +1468,7 @@ def _set_icalendar_instance(self, inst): try: ## DEPRECATION TODO: remove this try/except the future ## icalendar 7.x behaviour (not released yet as of 2025-09 cal = icalendar.Calendar.new() - except: + except AttributeError: cal = icalendar.Calendar() cal.add("prodid", "-//python-caldav//caldav//en_DK") cal.add("version", "2.0") @@ -2087,7 +2087,7 @@ def _complete_ical(self, i=None, completion_timestamp=None) -> None: if i is None: i = self.icalendar_component assert self.is_pending(i) - status = i.pop("STATUS", None) + i.pop("STATUS", None) i.add("STATUS", "COMPLETED") i.add("COMPLETED", completion_timestamp) @@ -2166,7 +2166,6 @@ def set_due(self, due, move_dtstart=False, check_dependent=False): WARNING: the check_dependent-logic may be rewritten to support RFC9253 in 3.x """ - i = self.icalendar_component if hasattr(due, "tzinfo") and not due.tzinfo: due = due.astimezone(timezone.utc) if check_dependent: diff --git a/caldav/collection.py b/caldav/collection.py index c19a4c7b..8df78bf8 100644 --- a/caldav/collection.py +++ b/caldav/collection.py @@ -604,7 +604,7 @@ def _create( mkcol = (dav.Mkcol() if method == "mkcol" else cdav.Mkcalendar()) + set - r = self._query(root=mkcol, query_method=method, url=path, expected_return_value=201) + self._query(root=mkcol, query_method=method, url=path, expected_return_value=201) # COMPATIBILITY ISSUE # name should already be set, but we've seen caldav servers failing @@ -619,7 +619,7 @@ def _create( try: current_display_name = self.get_display_name() error.assert_(current_display_name == name) - except: + except Exception: log.warning( "calendar server does not support display name on calendar? Ignoring", exc_info=True, @@ -680,7 +680,7 @@ async def _async_create( try: current_display_name = await self._async_get_property(dav.DisplayName()) error.assert_(current_display_name == name) - except: + except Exception: log.warning( "calendar server does not support display name on calendar? Ignoring", exc_info=True, @@ -973,7 +973,6 @@ def _multiget(self, event_urls: Iterable[URL], raise_notfound: bool = False) -> if self.url is None: raise ValueError("Unexpected value None for self.url") - rv = [] prop = dav.Prop() + cdav.CalendarData() root = cdav.CalendarMultiGet() + prop + [dav.Href(value=u.path) for u in event_urls] # RFC 4791 section 7.9: "the 'Depth' header MUST be ignored by the @@ -1713,7 +1712,7 @@ def get_objects_by_sync_token( ## TODO: look more into this, I think sync_token should be directly available through response object try: sync_token = response.sync_token - except: + except AttributeError: sync_token = response.tree.findall(".//" + dav.SyncToken.tag)[0].text ## this is not quite right - the etag we've fetched can already be outdated @@ -1873,7 +1872,7 @@ def __init__( # we ignore the type here as this is defined in sub-classes only; require more changes to # properly fix in a future revision self.url = self.client.url.join(URL(self.get_property(self.findprop()))) # type: ignore - except: + except Exception: logging.error("something bad happened", exc_info=True) error.assert_(self.client.check_scheduling_support()) self.url = None @@ -1891,7 +1890,7 @@ def get_items(self): if not self._items: try: self._items = self.objects(load_objects=True) - except: + except Exception: logging.debug( "caldav server does not seem to support a sync-token REPORT query on a scheduling mailbox" ) @@ -1904,7 +1903,7 @@ def get_items(self): else: try: self._items.sync() - except: + except Exception: self._items = [ CalendarObjectResource(url=x[0], client=self.client) for x in self.children() ] diff --git a/caldav/compatibility_hints.py b/caldav/compatibility_hints.py index 194d4084..56116fe7 100644 --- a/caldav/compatibility_hints.py +++ b/caldav/compatibility_hints.py @@ -375,9 +375,6 @@ def set_feature(self, feature, value=True): else: raise AssertionError self.copyFeatureSet(fc, collapse=False) - feat_def = self.find_feature(feature) - feat_type = feat_def.get('type', 'server-feature') - sup = fc[feature].get('support', feat_def.get('default', 'full')) ## TODO: Why is this camelCase while every other method is with under_score? rename ... @@ -396,7 +393,6 @@ def copyFeatureSet(self, feature_set, collapse=True): UserWarning, stacklevel=3, ) - feature_info = {} value = feature_set[feature] if feature not in self._server_features: self._server_features[feature] = {} @@ -570,7 +566,7 @@ def _derive_from_subfeatures(self, feature, feature_info, return_type, accept_fr subfeature_info = self.find_feature(subfeature_key) if 'default' in subfeature_info: continue - except: + except Exception: pass total_relevant += 1 diff --git a/caldav/davclient.py b/caldav/davclient.py index 25b6dc99..bf4e30cf 100644 --- a/caldav/davclient.py +++ b/caldav/davclient.py @@ -8,7 +8,6 @@ For async code, use: from caldav import aio """ -import copy import logging import sys import time @@ -38,19 +37,15 @@ from collections.abc import Mapping -from lxml import etree - -import caldav.compatibility_hints from caldav import __version__ from caldav.base_client import BaseDAVClient from caldav.base_client import get_calendars as _base_get_calendars from caldav.base_client import get_davclient as _base_get_davclient -from caldav.collection import Calendar, CalendarSet, Principal +from caldav.collection import Calendar, Principal from caldav.compatibility_hints import FeatureSet # Re-export CONNKEYS for backward compatibility from caldav.config import CONNKEYS # noqa: F401 -from caldav.elements import cdav, dav from caldav.lib import error from caldav.lib.python_utilities import to_wire from caldav.lib.url import URL @@ -65,7 +60,7 @@ from typing import Self if TYPE_CHECKING: - from caldav.calendarobjectresource import CalendarObjectResource, Event, Todo + from caldav.calendarobjectresource import CalendarObjectResource """ diff --git a/caldav/davobject.py b/caldav/davobject.py index eb17b30a..a9df2c45 100644 --- a/caldav/davobject.py +++ b/caldav/davobject.py @@ -389,7 +389,6 @@ def get_properties( if self.is_async_client: return self._async_get_properties(props, depth, parse_response_xml, parse_props) - rc = None response = self._query_properties(props, depth) if not parse_response_xml: return response @@ -427,7 +426,6 @@ async def _async_get_properties( parse_props: bool = True, ): """Async implementation of get_properties.""" - rc = None response = await self._async_query_properties(props, depth) if not parse_response_xml: return response @@ -577,7 +575,7 @@ def name(self) -> str | None: def __str__(self) -> str: try: return str(self.get_property(dav.DisplayName(), use_cached=True)) or self.url - except: + except Exception: return str(self.url) def __repr__(self) -> str: diff --git a/caldav/elements/cdav.py b/caldav/elements/cdav.py index 30487d85..5636a728 100644 --- a/caldav/elements/cdav.py +++ b/caldav/elements/cdav.py @@ -20,7 +20,7 @@ def _to_utc_date_string(ts): ## in python 3.6 and higher, ts.astimezone() will assume a ## naive timestamp is localtime (and so do we) ts = ts.astimezone(utc_tz) - except: + except (OverflowError, OSError, ValueError): ## native time stamp and the current python version is ## not able to treat it as localtime. import tzlocal diff --git a/caldav/jmap/convert/_utils.py b/caldav/jmap/convert/_utils.py index 4fa0e4ea..12b12263 100644 --- a/caldav/jmap/convert/_utils.py +++ b/caldav/jmap/convert/_utils.py @@ -4,7 +4,7 @@ from __future__ import annotations -from datetime import date, datetime, timedelta, timezone +from datetime import date, datetime, timedelta def _timedelta_to_duration(td: timedelta) -> str: diff --git a/caldav/lib/debug.py b/caldav/lib/debug.py index 7c233bbc..56a429b9 100644 --- a/caldav/lib/debug.py +++ b/caldav/lib/debug.py @@ -8,7 +8,7 @@ def xmlstring(root): root = root.xmlelement() try: return etree.tostring(root, pretty_print=True).decode("utf-8") - except: + except Exception: return root diff --git a/caldav/lib/error.py b/caldav/lib/error.py index 1d4754b7..63821e38 100644 --- a/caldav/lib/error.py +++ b/caldav/lib/error.py @@ -3,7 +3,6 @@ from collections import defaultdict from datetime import datetime, timezone from email.utils import parsedate_to_datetime -from typing import Optional from caldav import __version__ @@ -16,7 +15,7 @@ debug_dump_communication = os.environ.get("PYTHON_CALDAV_COMMDUMP", False) ## one of DEBUG_PDB, DEBUG, DEVELOPMENT, PRODUCTION debugmode = os.environ["PYTHON_CALDAV_DEBUGMODE"] -except: +except KeyError: if "dev" in __version__ or __version__ == "(unknown)": debugmode = "DEVELOPMENT" else: diff --git a/caldav/lib/vcal.py b/caldav/lib/vcal.py index a32eb379..fb29f7cd 100644 --- a/caldav/lib/vcal.py +++ b/caldav/lib/vcal.py @@ -127,7 +127,7 @@ def is_power_of_two(n): import difflib diff = list(difflib.unified_diff(event.split("\n"), fixed2.split("\n"), lineterm="")) - except: + except Exception: diff = ["Original: ", event, "Modified: ", fixed2] log("\n".join(log_message + diff)) @@ -182,7 +182,7 @@ def create_ical(ical_fragment=None, objtype=None, language="en_DK", **props): objtype = "VEVENT" try: component = icalendar.cal.component_factory[objtype]() - except: + except TypeError: component = icalendar.cal.component_factory.ComponentFactory()[objtype]() my_instance.add_component(component) ## STATUS should default to NEEDS-ACTION for tasks, if it's not set From 5a45497a7bd0e9d101709d65bbab12a877916c86 Mon Sep 17 00:00:00 2001 From: Tobias Brox Date: Tue, 7 Apr 2026 15:59:48 +0200 Subject: [PATCH 02/17] docs: added a declaration of GenAI usage to the AI-POLICY.md file + misc --- AI-POLICY.md | 46 +++++++++++++++++++++++++++++++++----- README.md | 22 ++++++++++++++---- docs/design/CLAUDE.md | 52 +++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 111 insertions(+), 9 deletions(-) create mode 100644 docs/design/CLAUDE.md diff --git a/AI-POLICY.md b/AI-POLICY.md index 60737b32..888cd3ce 100644 --- a/AI-POLICY.md +++ b/AI-POLICY.md @@ -2,7 +2,12 @@ ## Read this first -The most important rule: Be honest and inform about it! +The most important rule: be honest and inform about it! + +Also: keep a log of the prompts used - prompts may be included in the +git commits. + +## Transparency matters If you've spent hours, perhaps a full day of your time writing up a pull request, then I sort of owe you something. I should spend some @@ -26,10 +31,10 @@ explain in details why I'm rejecting the pull request. ## Bugfixes are (most often) welcome It's fine to ask the AI for help to analyze a bug and create a fix for -it. By discovering the bug, reproducing it and testing it you're adding -real value to the project - but be transparent about AI usage and I -expect that you will not break down and cry if I decide to reject the code -changes. +it. By discovering the bug, reproducing it and testing it you're +adding real value to the project - just be transparent about AI usage +and do not take offence if the code changes are rejected, or completely +rewritten. ## General rules @@ -69,3 +74,34 @@ changes. as of 2026-02, and I can hardly see cases where the AI would break the Code of Conduct, but at the end of the day, it's **YOUR** responsibility that the contribution follows those guidelines. + +## Disclosure of GenAI usage, 2.2.6 - 3.2.0 + +The maintainer started playing with Claude Code in the end of 2025 - and [blogged about it](https://www.redpill-linpro.com/techblog/2026/03/20/from-luddite-to-vibe-coder.html) + +Releases 2.2.6 - 3.2.0 has been heavily assisted by Claude - which is pretty obvious when looking into the commit messages. My experiences has been mixed - sometimes it seems to be doing a better and faster job than me, other times it seems to be making a mess a lot faster than what I can do it. Despite (or because of?) using Claude extensively, I spent much more time on it than estimated. + +Lots of time and efforts have been spent on doing QA on the changes, fixing up things and/or asking Claude to do a better job. The surge of issues reported after the 3.0-release is probably unrelated to the AI usage - it's a result of trying to shoehorn both async and API changes into it without breaking backward compatbility and without duplicating too much code. The CHANGELOG.md entry for 3.0 explicitly declared a caveat: "there are massive code changes in version 3.0, so if you're using the Python CalDAV client library in some sharp production environment, I would recommend to wait for two months before upgrading". + +I don't have any good log of prompts given to Claude prior to the 3.2.0-release, but some of the considerations from Claude has been stored under `docs/design`. A copy of my [CLAUDE.md](docs/design/CLAUDE.md)-file can now be found in the same directory. + +Generated changes and human-made changes are often mixed up. I prefer "logical" commits containing self-sustained changesets, one of the things I'm often asking Claude to do is to do a rebase of a branch and organize the commits in one or few logical commits with descriptive commit messages. + +## Future plans of GenAI-usage + +Post-3.2.0 and until further notice I will try to go more back to the old ways for doing the "core development tasks" - new features and complex refactoring. If nothing else, it's important for maintaining my brain cells, coding skills and making sure all the changes sticks to my memory. The new policy is that GenAI-tools should be used mainly for improving quality, not speeding up the development. + +I still intend to use GenAI heavily for certain tasks, like: + +* Minor bugfixes - with test code. The bugfix itself may often be a simple one-line change, but debugging and writing up the tests is tedious work. +* Maintaining the integration test framework. It's hard work, even when using Claude. Thanks to Claude I've now been able to put up an extensive "battery" of test servers that I'm checking regularly towards. This is something I've started on several times since 2013 but except for the two integrated python servers I never managed to get any lasting solutions. It's very useful to be able to easily test the library towards a wide range of servers - the majority of the bug reports are compatibility issues. The more servers I have for testing every release, the less troubles will be discovered downstream. +* Other CI-related frameworks and "boiler plate" for things like automated testing of code embedded in the documentation, QA on the commit messages before I push my git commits out from my laptop, etc. It increases quality, although being quite outside the "core business" of the CalDAV library. Doing it manually (and reading through all the documentation out there) would have stolen lots of valuable time that could have been used for coding. +* Writing up test code. I've always thought that "test driven development" is a good idea (write test code first, then the logic), but it's quite often both tedious and difficult. Claude can make them really fast. It still needs some QA, care should be taken to ensure it's testing the right thing. +* Code reviews. The more "eyes" looking into the software, the better - it seems Claude is equally good at spotting the problems and mistakes in my code as I'm on spotting the problems and mistakes in the code Claude generates. +* Debugging. It's easy to get stuck and spend tons of time on debugging - sometimes (but not always) Claude can find them easily. +* Various mundane and tedious work (i.e. "I left a TODO-note in the code over there, could you have look into it and eliminate it?"). +* Development of the companion caldav-server-checker tool - writing up checks to discover various server issues may be really tedious and time-consuming, and (most of the time) easy for Claude to get right. The alternative to using GenAI would probably be to have half as many checks. I find those checks very useful. +* Investigations of different architectural choices - like with the async work I had claude develop different design approaches and chose the one that I felt most comfortable with (though I'm still not sure that I did the right choice). +* Reading RFCs and quickly give a pointer to the relevant sections, or verifying that the code is according to the standards or not. + +I will do some research on how to log prompts and chat. diff --git a/README.md b/README.md index 01813c09..906fcdba 100644 --- a/README.md +++ b/README.md @@ -40,14 +40,28 @@ async def main(): asyncio.run(main()) ``` -The documentation was updated as of version 2.0, and is available at https://caldav.readthedocs.io/ +## Documentation and other links + +The user documentation (up-to-date with version 3.2) is embedded under `docs/source` - a rendered copy is available at https://caldav.readthedocs.io/ + +Other documentation: + +* [This file](README.md) +* [Changelog](CHANGELOG.md) +* [Contributors guide](CONTRIBUTING.md) +* [Contact information](CONTACT.md) +* [Code of Conduct](CODE_OF_CONDUCT) +* [Security Policy](SECURITY.md) +* [AI policy and AI disclaimer](AI-POLICY.md) +* [Apache License](COPYING.APACHE) +* [GPL license](COPYING.GPL) The package is published at [Pypi](https://pypi.org/project/caldav) ## HTTP Libraries -The sync client uses [niquests](https://github.com/jawah/niquests) by default (with fallback to [requests](https://requests.readthedocs.io/)). The async client uses [httpx](https://www.python-httpx.org/) if installed (`pip install caldav[async]`), otherwise falls back to niquests. See [HTTP Library Configuration](docs/source/http-libraries.rst) for details. +The sync client uses [niquests](https://github.com/jawah/niquests) by default (with fallback to [requests](https://requests.readthedocs.io/)). The async client uses [httpx](https://www.python-httpx.org/) if installed, otherwise falls back to niquests. See [HTTP Library Configuration](docs/source/http-libraries.rst) for details. -Licences: +## Licences -Caldav is dual-licensed under the [GNU GENERAL PUBLIC LICENSE Version 3](COPYING.GPL) or the [Apache License 2.0](COPYING.APACHE). +The caldav library is dual-licensed under the [GNU GENERAL PUBLIC LICENSE Version 3](COPYING.GPL) or the [Apache License 2.0](COPYING.APACHE). diff --git a/docs/design/CLAUDE.md b/docs/design/CLAUDE.md new file mode 100644 index 00000000..64080433 --- /dev/null +++ b/docs/design/CLAUDE.md @@ -0,0 +1,52 @@ +Below is a copy of my ~/.claude/CLAUDE.md-file - included for the sake of transparency. It's a bit personal and does not contain much specific for the Python CalDAV library. + +(This copy is from 2026-04 - I'm considering to create some scripts to auto-sync the content) + +--- + +# GENERAL +- Most work is git-backed, with GitHub, GitLab and/or my own server as upstream. `gh` and `glab` CLI tools are installed. +- It's OK to publish comments on my behalf (e.g. in GitHub issues and pull requests), but such comments must always both start and end with a disclaimer that they are AI-generated. + +# CHAT AND PLANNING +- no need to be polite in the chat +- on the keyword NOW in capital letters, ignore the two bullet points below +- Point out typos or grammar errors found in my input. +- Before doing anything, present honest arguments on why the user wish may be a bad idea. + +# DEBUGGING +- When debugging, prefer writing a permanent unit test over a temporary debugging script. + +# BEFORE fixing/writing code +- For projects I don't own, check for a contributors guide or AI policy and follow it. +- Don't reinvent the wheel — check whether a library already solves the problem before writing new code from scratch. +- Check the project's testing regime; test code may be important. +- Write tests first, then implement. Confirm tests are FAILING before adding the fix/feature. + +# WHEN fixing/writing code +- Avoid duplicated code, paths, and logic. Check if similar logic already exists before implementing new logic. Refactor if needed. +- For Python, consider type annotations: + - All public APIs in packages/libraries must have good type annotations. + - Some projects enforce annotations in test code via ruff — follow the project's conventions. + - Annotations may be skipped for simple scripts and internal methods unless ruff requires them. + +# AFTER fixing/writing code +- Check if documentation needs updating. +- Check if a CHANGELOG needs updating. My CHANGELOGs only cover changes since the last release — bugs introduced and fixed between releases should not be mentioned. +- Run relevant tests. On the caldav project the integration tests take very long time to run, so don't do a full run of all tests on caldav. +- Commit changes via git: + - Commit often. + - Always check the active git branch before committing. + - For projects at version >= 1.0.0, never commit directly to main/master. For v0.x or unversioned projects, pushing to main/master is usually fine. + - Only stage files related to the current task. Warn me if other uncommitted work exists in the repo. + - Don't push and don't open PRs/MRs unless I explicitly ask. + - for commit messages referencing github issues or pull requests, use the full URL (rationale: perhaps GitHub will still be existing in 15 years, but it may not be obvious that "#132" references an issue on GitHub anymore) +- When generating new issues or leaving comments, prepend and tail the comment with "⚠️ This comment is AI-generated ($details) on behalf of tobixen" (details may be "Claude Sonnet 4.6 via Claude Code") +- For PRs/MRs into projects I don't own or contribute to regularly, prepend the description with this text (skip "bug discovery, reproduction" if it's not applicable): + + ``` + The real value I'm adding to the project here is bug discovery, reproduction and + testing. This pull request was vibe-coded, including the description below. + I promise not to break down and cry if the pull request is rejected :-) + --- + ``` From cb7ec8dca9a9dec6fe5b13be4ccd5c9ee4b2b2bf Mon Sep 17 00:00:00 2001 From: Tobias Brox Date: Wed, 8 Apr 2026 07:40:29 +0200 Subject: [PATCH 03/17] chore: compatibility hints cleanup and scheduling This is mostly AI-generated, unfortunately the prompts are missing. The compatibility hints file is not (yet) considered an important part of the library. Reorganizations are considered to be tedious work, well suited for AI generation with human oversight. Co-Authored-By: Claude Sonnet 4.6 --- caldav/compatibility_hints.py | 198 +++++++++--------- .../nextcloud/setup_nextcloud.sh | 11 + tests/test_caldav.py | 4 +- 3 files changed, 115 insertions(+), 98 deletions(-) diff --git a/caldav/compatibility_hints.py b/caldav/compatibility_hints.py index 56116fe7..a8e0a265 100644 --- a/caldav/compatibility_hints.py +++ b/caldav/compatibility_hints.py @@ -78,12 +78,15 @@ class FeatureSet: } }, "get-current-user-principal": { - "description": "Support for RFC5397, current principal extension. Most CalDAV servers have this, but it is an extension to the DAV standard. Possibly observed missing on mail.ru,DavMail gateway and it is possible to configure the support in some sabre-based servers"}, + "description": "Support for RFC5397, current principal extension. Most CalDAV servers have this, but it is an extension to the DAV standard. Possibly observed missing on mail.ru, DavMail gateway and it is possible to configure the support in some sabre-based servers", + "links": ["https://datatracker.ietf.org/doc/html/rfc5397"], + }, "get-current-user-principal.has-calendar": { "type": "server-observation", "description": "Principal has one or more calendars. Some servers and providers comes with a pre-defined calendar for each user, for other servers a calendar has to be explicitly created (supported means there exists a calendar - it may be because the calendar was already provisioned together with the principal, or it may be because a calendar was created manually, the checks can't see the difference)"}, "get-supported-components": { "description": "Server returns the supported-calendar-component-set property (RFC 4791 section 5.2.3). The property is optional: when absent the RFC mandates that all component types are accepted, so 'unsupported' here is not a protocol violation, but the client cannot determine the actual supported set without trying.", + "links": ["https://datatracker.ietf.org/doc/html/rfc4791#section-5.2.3"], }, "create-calendar.with-supported-component-types": { "description": "Server honours the supported-calendar-component-set restriction set at MKCALENDAR time. When 'full', the server both advertises (or enforces) the restriction; when 'unsupported', the restriction is silently ignored (wrong-type objects can be saved to the calendar). When 'ungraceful', the MKCALENDAR request itself fails when a component set is specified.", @@ -110,7 +113,11 @@ class FeatureSet: }, "create-calendar": { "default": { "support": "full" }, - "description": "RFC4791 says that \"support for MKCALENDAR on the server is only RECOMMENDED and not REQUIRED because some calendar stores only support one calendar per user (or principal), and those are typically pre-created for each account\". Hence a conformant server may opt to not support creating calendars, this is often seen for cloud services (some services allows extra calendars to be made, but not through the CalDAV protocol). (RFC4791 also says that the server MAY support MKCOL in section 8.5.2. I do read it as MKCOL may be used for creating calendars - which is weird, since section 8.5.2 is titled \"external attachments\". We should consider testing this as well)", + "description": "RFC4791 section 5.3.1 says that \"support for MKCALENDAR on the server is only RECOMMENDED and not REQUIRED because some calendar stores only support one calendar per user (or principal), and those are typically pre-created for each account\". Hence a conformant server may opt to not support creating calendars, this is often seen for cloud services (some services allows extra calendars to be made, but not through the CalDAV protocol). (RFC5689 extended MKCOL may also be used to create calendar collections as an alternative to MKCALENDAR. We should consider testing this as well)", + "links": [ + "https://datatracker.ietf.org/doc/html/rfc4791#section-5.3.1", + "https://datatracker.ietf.org/doc/html/rfc5689", + ], }, "create-calendar.auto": { "default": { "support": "unsupported" }, @@ -120,7 +127,8 @@ class FeatureSet: "description": "It's possible to set the displayname on a calendar upon creation" }, "delete-calendar": { - "description": "RFC4791 says nothing about deletion of calendars, so the server implementation is free to choose weather this should be supported or not. Section 3.2.3.2 in RFC 6638 says that if a calendar is deleted, all the calendarobjectresources on the calendar should also be deleted - but it's a bit unclear if this only applies to scheduling objects or not. Some calendar servers moves the object to a trashcan rather than deleting it" + "description": "RFC4791 says nothing about deletion of calendars, so the server implementation is free to choose weather this should be supported or not. Section 3.2.3.2 in RFC 6638 says that if a calendar is deleted, all the calendarobjectresources on the calendar should also be deleted - but it's a bit unclear if this only applies to scheduling objects or not. Some calendar servers moves the object to a trashcan rather than deleting it", + "links": ["https://datatracker.ietf.org/doc/html/rfc6638#section-3.2.3.2"], }, "delete-calendar.free-namespace": { "description": "The delete operations clears the namespace, so that another calendar with the same ID/name can be created" @@ -163,8 +171,15 @@ class FeatureSet: "save-load.event.timezone": { "description": "The server accepts events with non-UTC timezone information. When unsupported or broken, the server may reject events with timezone data (e.g., return 403 Forbidden). Related to GitHub issue https://github.com/python-caldav/caldav/issues/372." }, + "save-load.icalendar": {"description": "Is it possible to save icalendar data to the calendar? (Most likely yes - but we need a parent to collect all icalendar compatibility problems that aren't specific to one kind of object resource types"}, + "save-load.icalendar.related-to": { + "description": "The server preserves RELATED-TO properties (RFC5545 section 3.8.4.5) when saving and loading calendar objects. When 'unsupported', the server may typically silently strip all RELATED-TO lines", + "default": {"support": "full"}, + "links": ["https://datatracker.ietf.org/doc/html/rfc5545#section-3.8.4.5"], + }, "search": { - "description": "calendar MUST support searching for objects using the REPORT method, as specified in RFC4791, section 7" + "description": "calendar MUST support searching for objects using the REPORT method, as specified in RFC4791, section 7", + "links": ["https://datatracker.ietf.org/doc/html/rfc4791#section-7"], }, "search.comp-type.optional": { "description": "In all the search examples in the RFC, comptype is given during a search, the client specifies if it's event or tasks or journals that is wanted. However, as I read the RFC this is not required. If omitted, the server should deliver all objects. Many servers will not return anything if the COMPTYPE filter is not set. Other servers will return 404" @@ -176,48 +191,85 @@ class FeatureSet: ## TODO - there is still quite a lot of search-related ## stuff that hasn't been moved from the old "quirk list" "search.time-range": { - "description": "Search for time or date ranges should work. This is specified in RFC4791, section 7.4 and section 9.9"}, + "description": "Search for time or date ranges should work. This is specified in RFC4791, section 7.4 and section 9.9", + "links": [ + "https://datatracker.ietf.org/doc/html/rfc4791#section-7.4", + "https://datatracker.ietf.org/doc/html/rfc4791#section-9.9", + ], + }, "search.time-range.accurate": { "description": "Time-range searches should only return events/todos that actually fall within the requested time range. Some servers incorrectly return recurring events whose recurrences fall outside (after) the search interval, or events with no recurrences in the requested time range at all. RFC4791 section 9.9 specifies that a VEVENT component overlaps a time range if the condition (start < search_end AND end > search_start) is true.", "links": ["https://datatracker.ietf.org/doc/html/rfc4791#section-9.9"], }, "search.time-range.todo": {"description": "basic time range searches for tasks works", "default": {"support": "full"}}, "search.time-range.todo.old-dates": {"description": "time range searches for tasks with old dates (e.g. year 2000) work - some servers enforce a min-date-time restriction"}, + "search.time-range.todo.duration": { + "description": "Time-range searches correctly handle VTODOs that specify their interval via DTSTART+DURATION (without a DUE property). RFC4791 section 9.9 specifies that such tasks overlap a time range if DTSTART+DURATION falls within the range. When 'unsupported', the server ignores DURATION and fails to find such tasks.", + "default": {"support": "full"}, + "links": ["https://datatracker.ietf.org/doc/html/rfc4791#section-9.9"], + }, + "search.time-range.todo.open-start": { + "description": "Time-range searches with only an end bound (no start) correctly exclude tasks whose DTSTART is after the end bound. RFC4791 section 9.9: a VTODO with both DTSTART and DUE should not overlap if its DTSTART > search_end. When 'broken', the server incorrectly returns future tasks.", + "default": {"support": "full"}, + "links": ["https://datatracker.ietf.org/doc/html/rfc4791#section-9.9"], + }, "search.time-range.event": {"description": "basic time range searches for event works", "default": {"support": "full"}}, "search.time-range.event.old-dates": {"description": "time range searches for events with old dates (e.g. year 2000) work - some servers enforce a min-date-time restriction"}, "search.time-range.journal": {"description": "basic time range searches for journal works"}, - "search.time-range.alarm": {"description": "Time range searches for alarms work. The server supports searching for events based on when their alarms trigger, as specified in RFC4791 section 9.9"}, + "search.time-range.alarm": { + "description": "Time range searches for alarms work. The server supports searching for events based on when their alarms trigger, as specified in RFC4791 section 9.9", + "links": ["https://datatracker.ietf.org/doc/html/rfc4791#section-9.9"], + }, "search.unlimited-time-range": { "description": "A REPORT without a time-range filter should return all matching objects regardless of when they occur. Some servers (e.g. OX App Suite) use a sliding window for REPORT requests without a time range, returning only objects within approximately ±1 year of now and potentially missing older or far-future objects.", "default": {"support": "full"}, }, "search.is-not-defined": { "description": "Supports searching for objects where properties is-not-defined according to rfc4791 section 9.7.4", - "default": {"support": "full"} + "default": {"support": "full"}, + "links": ["https://datatracker.ietf.org/doc/html/rfc4791#section-9.7.4"], }, "search.is-not-defined.category": { ## TODO: this should most likely be removed - it was a client bug fixed in icalendar-search 1.0.5, not a server error. (Discovered in the last minute before releasing caldav v3.0.0 - I won't touch it now) - "description": "Supports searching for objects where the CATEGORIES property is not defined (RFC4791 section 9.7.4). Some servers support is-not-defined for other properties (e.g. CLASS) but silently return wrong results or nothing when applied to CATEGORIES" + "description": "Supports searching for objects where the CATEGORIES property is not defined (RFC4791 section 9.7.4). Some servers support is-not-defined for other properties (e.g. CLASS) but silently return wrong results or nothing when applied to CATEGORIES", + "links": ["https://datatracker.ietf.org/doc/html/rfc4791#section-9.7.4"], }, "search.is-not-defined.dtend": { ## TODO: this should most likely be removed - it was a client bug fixed in icalendar-search 1.0.5, not a server error. (Discovered in the last minute before releasing caldav v3.0.0 - I won't touch it now) - "description": "Supports searching for objects where the DTEND property is not defined (RFC4791 section 9.7.4). Some servers support is-not-defined for some properties but not DTEND" + "description": "Supports searching for objects where the DTEND property is not defined (RFC4791 section 9.7.4). Some servers support is-not-defined for some properties but not DTEND", + "links": ["https://datatracker.ietf.org/doc/html/rfc4791#section-9.7.4"], }, "search.is-not-defined.class": { - "description": "Supports searching for objects where the CLASS property is not defined (RFC4791 section 9.7.4). Some servers support is-not-defined for CLASS but not for other properties like CATEGORIES" + "description": "Supports searching for objects where the CLASS property is not defined (RFC4791 section 9.7.4). Some servers support is-not-defined for CLASS but not for other properties like CATEGORIES", + "links": ["https://datatracker.ietf.org/doc/html/rfc4791#section-9.7.4"], }, "search.text": { "description": "Search for text attributes should work" }, "search.text.case-sensitive": { - "description": "In RFC4791, section-9.7.5, a text-match may pass a collation, and i;ascii-casemap MUST be the default, this is not checked (yet - TODO) by the caldav-server-checker project. Section 7.5 describes that the servers also are REQUIRED to support i;octet. The definitions of those collations are given in RFC4790, i;octet is a case-sensitive byte-by-byte comparition (fastest). search.text.case-sensitive is supported if passing the i;octet collation to search causes the search to be case-sensitive." + "description": "In RFC4791, section-9.7.5, a text-match may pass a collation, and i;ascii-casemap MUST be the default, this is not checked (yet - TODO) by the caldav-server-checker project. Section 7.5 describes that the servers also are REQUIRED to support i;octet. The definitions of those collations are given in RFC4790, i;octet is a case-sensitive byte-by-byte comparition (fastest). search.text.case-sensitive is supported if passing the i;octet collation to search causes the search to be case-sensitive.", + "links": [ + "https://datatracker.ietf.org/doc/html/rfc4791#section-9.7.5", + "https://datatracker.ietf.org/doc/html/rfc4791#section-7.5", + "https://datatracker.ietf.org/doc/html/rfc4790", + ], }, "search.text.case-insensitive": { - "description": "The i;ascii-casemap requires ascii-characters to be case-insensitive, while non-ascii characters are compared byte-by-byte (case-sensitive). Proper unicode case-insensitive searches may be supported by the server, but it's not a requirement in the RFC. As for now, we consider case-insensitive searches to be supported if the i;ascii-casemap collation does what it's supposed to do.. In the future we may consider adding a search.text.case-insensitive.unicode. (i;unicode-casemap is defined in RFC5051)" + "description": "The i;ascii-casemap requires ascii-characters to be case-insensitive, while non-ascii characters are compared byte-by-byte (case-sensitive). Proper unicode case-insensitive searches may be supported by the server, but it's not a requirement in the RFC. As for now, we consider case-insensitive searches to be supported if the i;ascii-casemap collation does what it's supposed to do.. In the future we may consider adding a search.text.case-insensitive.unicode. (i;unicode-casemap is defined in RFC5051)", + "links": [ + "https://datatracker.ietf.org/doc/html/rfc4791#section-9.7.5", + "https://datatracker.ietf.org/doc/html/rfc5051", + ], }, "search.text.substring": { - "description": "According to RFC4791 the search done should be a substring search. The search.text.substring feature is set if the calendar server does this (as opposed to only return full matches). Substring matches does not always make sense, but it's mandated by the RFC. When a server does a substring match on some properties but an exact match on others, the support should be marked as fragile. Except for categories, which are handled in search.text.category.substring" + "description": "According to RFC4791 the search done should be a substring search. The search.text.substring feature is set if the calendar server does this (as opposed to only return full matches). Substring matches does not always make sense, but it's mandated by the RFC. When a server does a substring match on some properties but an exact match on others, the support should be marked as fragile. Except for categories, which are handled in search.text.category.substring", + "links": ["https://datatracker.ietf.org/doc/html/rfc4791#section-9.7.5"], }, "search.text.category": { - "description": "Search for category should work. This is not explicitly specified in RFC4791, but covered in section 9.7.5. No examples targets categories explicitly, but there are some text match examples in section 7.8.6 and following sections"}, + "description": "Search for category should work. This is not explicitly specified in RFC4791, but covered in section 9.7.5. No examples targets categories explicitly, but there are some text match examples in section 7.8.6 and following sections", + "links": [ + "https://datatracker.ietf.org/doc/html/rfc4791#section-9.7.5", + "https://datatracker.ietf.org/doc/html/rfc4791#section-7.8.6", + ], + }, "search.text.category.substring": { "description": "Substring search for category should work according to the RFC. I.e., search for mil should match family,finance", }, @@ -259,7 +311,8 @@ class FeatureSet: "description": "Server expand should work correctly also if a recurrence set with exceptions is given" }, "sync-token": { - "description": "RFC6578 sync-collection reports are supported. Server provides sync tokens that can be used to efficiently retrieve only changed objects since last sync. Support can be 'full', 'fragile' (occasionally returns more content than expected), or 'unsupported'. Behaviour 'time-based' indicates second-precision tokens requiring sleep(1) between operations" + "description": "RFC6578 sync-collection reports are supported. Server provides sync tokens that can be used to efficiently retrieve only changed objects since last sync. Support can be 'full', 'fragile' (occasionally returns more content than expected), or 'unsupported'. Behaviour 'time-based' indicates second-precision tokens requiring sleep(1) between operations", + "links": ["https://datatracker.ietf.org/doc/html/rfc6578"], }, "sync-token.delete": { "description": "Server correctly handles sync-collection reports after objects have been deleted from the calendar (solved in Nextcloud in https://github.com/nextcloud/server/pull/44130)" @@ -270,7 +323,10 @@ class FeatureSet: }, "scheduling.mailbox": { "description": "Server provides schedule-inbox and schedule-outbox collections for the principal (RFC6638 sections 2.1-2.2). When unsupported, calls to schedule_inbox() or schedule_outbox() raise NotFoundError.", - "links": ["https://datatracker.ietf.org/doc/html/rfc6638#section-2.1"], + "links": [ + "https://datatracker.ietf.org/doc/html/rfc6638#section-2.1", + "https://datatracker.ietf.org/doc/html/rfc6638#section-2.2", + ], "default": {"support": "full"}, }, "scheduling.calendar-user-address-set": { @@ -283,11 +339,21 @@ class FeatureSet: "https://datatracker.ietf.org/doc/html/rfc6638#section-4.1", ], }, - 'freebusy-query': {'description': "freebusy queries come in two flavors, one query can be done towards a CalDAV server as defined in RFC4791, another query can be done through the scheduling framework, RFC 6638. Only RFC4791 is tested for as today"}, + 'freebusy-query': { + 'description': "freebusy queries come in two flavors, one query can be done towards a CalDAV server as defined in RFC4791, another query can be done through the scheduling framework, RFC 6638.", + "links": [ + "https://datatracker.ietf.org/doc/html/rfc4791#section-7.10", + "https://datatracker.ietf.org/doc/html/rfc6638", + ], + }, "freebusy-query.rfc4791": { "description": "Server supports free/busy-query REPORT as specified in RFC4791 section 7.10. The REPORT allows clients to query for free/busy time information for a time range. Servers without this support will typically return an error (often 500 Internal Server Error or 501 Not Implemented). Note: RFC6638 defines a different freebusy mechanism for scheduling", "links": ["https://datatracker.ietf.org/doc/html/rfc4791#section-7.10"], }, + "freebusy-query.rfc6638": { + "description": "Server supports RFC6638 freebusy query via the schedule outbox (section 4.1). The organizer POSTs a VFREEBUSY component to the schedule outbox and the server returns free/busy information for the listed attendees. Distinct from freebusy-query.rfc4791 which queries a calendar collection directly via REPORT.", + "links": ["https://datatracker.ietf.org/doc/html/rfc6638#section-4.1"], + }, "principal-search": { "description": "Server supports searching for principals (CalDAV users). Principal search may be restricted for privacy/security reasons on many servers. (not to be confused with get-current-user-principal)" }, @@ -725,13 +791,6 @@ def dotted_feature_set_list(self, compact=False): ## * Perhaps some more readable format should be considered (yaml?). ## * Consider how to get this into the documentation incompatibility_description = { - 'no_default_calendar': - """The given user starts without an assigned default calendar """ - """(or without pre-defined calendars at all)""", - - 'no_freebusy_rfc6638': - """Server does not support a freebusy-request as per RFC6638""", - 'calendar_order': """Server supports (nonstandard) calendar ordering property""", @@ -746,10 +805,6 @@ def dotted_feature_set_list(self, compact=False): 'event_by_url_is_broken': """A GET towards a valid calendar object resource URL will yield 404 (wtf?)""", - 'no_delete_event': - """Zimbra does not support deleting an event, probably because event_by_url is broken""", - - 'propfind_allprop_failure': """The propfind test fails ... """ """it asserts DAV:allprop response contains the text 'resourcetype', """ @@ -765,28 +820,11 @@ def dotted_feature_set_list(self, compact=False): """date searches for todo-items will (only) find tasks that has either """ """a dtstart or due set""", - 'vtodo_datesearch_nostart_future_tasks_delivered': - """Future tasks are yielded when doing a date search with some end timestamp and without start timestamp and the task contains both dtstart and due, but not duration (xandikos 0.2.12)""", - 'vtodo_no_due_infinite_duration': """date search will find todo-items without due if dtstart is """ """before the date search interval. This is in breach of rfc4791""" """section 9.9""", - 'vtodo_no_dtstart_infinite_duration': - """date search will find todo-items without dtstart if due is """ - """after the date search interval. This is in breach of rfc4791""" - """section 9.9""", - - 'vtodo_no_dtstart_search_weirdness': - """Zimbra is weird""", - - 'vtodo_no_duration_search_weirdness': - """Zimbra is weird""", - - 'vtodo_with_due_weirdness': - """Zimbra is weird""", - 'vtodo-cannot-be-uncompleted': """If a VTODO object has been set with STATUS:COMPLETE, it's not possible to delete the COMPLTEDED attribute and change back to STATUS:IN-ACTION""", @@ -803,24 +841,12 @@ def dotted_feature_set_list(self, compact=False): 'dav_not_supported': """when asked, the server may claim it doesn't support the DAV protocol. Observed by one baikal server, should be investigated more (TODO) and robur""", - 'text_search_is_case_insensitive': - """Probably not supporting the collation used by the caldav library""", - - 'date_search_ignores_duration': - """Date search with search interval overlapping event interval works on events with dtstart and dtend, but not on events with dtstart and due""", - - 'date_todo_search_ignores_duration': - """Same as above, but specifically for tasks""", - 'fastmail_buggy_noexpand_date_search': """The 'blissful anniversary' recurrent example event is returned when asked for a no-expand date search for some timestamps covering a completely different date""", 'non_existing_raises_other': """Robur raises AuthorizationError when trying to access a non-existing resource (while 404 is expected). Probably so one shouldn't probe a public name space?""", - 'no_relships': - """The calendar server does not support child/parent relationships between calendar components""", - 'robur_rrule_freq_yearly_expands_monthly': """Robur expands a yearly event into a monthly event. I believe I've reported this one upstream at some point, but can't find back to it""", @@ -844,26 +870,17 @@ def dotted_feature_set_list(self, compact=False): 'principal-search': {'support': 'unsupported'}, 'freebusy-query.rfc4791': {'support': 'ungraceful', 'behaviour': '500 internal server error'}, "scheduling": {"support": "unsupported"}, - "old_flags": [ ## https://github.com/jelmer/xandikos/issues/8 - 'date_todo_search_ignores_duration', - 'vtodo_datesearch_nostart_future_tasks_delivered', - - ## The test with an rrule and an overridden event passes as - ## long as it's with timestamps. With dates, xandikos gets - ## into troubles. I've chosen to edit the test to use timestamp - ## rather than date, just to have the test exercised ... but we - ## should report this upstream - #'broken_expand_on_exceptions', - - ] + 'search.time-range.todo.duration': {'support': 'unsupported'}, + 'search.time-range.todo.open-start': {'support': 'broken', 'behaviour': 'future tasks are returned when only an end bound is given'}, } xandikos = { ## We've sometimes been observing internal server errors on freebusy-requests. ## Should do more research on it next time it shows up. - ## Component type filtering is required - searches must specify event=True or todo=True + ## Component type filtering is required - searches must specify event=True or todo=True; + ## omitting it returns empty results. "search.comp-type.optional": "unsupported", ## Principal property search returns 403 (not implemented) @@ -877,11 +894,10 @@ def dotted_feature_set_list(self, compact=False): "auto-connect.url": {"domain": "localhost", "scheme": "http", "basepath": "/"}, "scheduling": {"support": "unsupported"}, - "old_flags": [ - ## https://github.com/jelmer/xandikos/issues/8 - 'date_todo_search_ignores_duration', - 'vtodo_datesearch_nostart_future_tasks_delivered', - ] + ## Open-start searches (end bound only) cause xandikos to return 500 when processing + ## VTODOs that have DURATION but no DUE (no DUE means the index falls back to a full + ## file check, which crashes in the time-range calculation). + 'search.time-range.todo.open-start': {'support': 'ungraceful', 'behaviour': 'xandikos returns 500 on open-start searches involving DURATION-only VTODOs'}, } ## This seems to work as of version 3.5.4 of Radicale. @@ -933,9 +949,12 @@ def dotted_feature_set_list(self, compact=False): #'save-load.todo.mixed-calendar': {'support': 'unsupported'}, ## Why? It started complaining about this just recently. 'principal-search.by-name.self': {'support': 'unsupported'}, 'principal-search': {'support': 'ungraceful'}, - 'old_flags': ['unique_calendar_ids'], + #'old_flags': ['unique_calendar_ids'], ## I'm surprised, I'm quite sure this was passing earlier. Caldav commit a98d50490b872e9b9d8e93e2e401c936ad193003, caldav server checker commit 3cae24cf99da1702b851b5a74a9b88c8e5317dad - 'search.combined-is-logical-and': False + 'search.combined-is-logical-and': False, + ## Observed with Nextcloud 33: server delivers iTIP notification to the inbox AND + ## auto-schedules into the attendee's calendar (same quirk as Baikal/Cyrus). + "scheduling.mailbox.inbox-delivery": {"support": "quirk", "behaviour": "server delivers iTIP notification to inbox AND auto-schedules into calendar"}, } ## TODO: Latest - mismatch between config and test script in delete-calendar.free-namespace ... and create-calendar.set-displayname? @@ -984,10 +1003,9 @@ def dotted_feature_set_list(self, compact=False): ## auto-processed into the attendee's calendar; no iTIP notification appears in the inbox. "scheduling.mailbox": True, "scheduling.mailbox.inbox-delivery": {"support": "unsupported"}, + 'save-load.icalendar.related-to': {'support': 'unsupported'}, "old_flags": [ - ## apparently, zimbra has no journal support - ## setting display name in zimbra does not work (display name, ## calendar-ID and URL is the same, the display name cannot be ## changed, it can only be given if no calendar-ID is given. In @@ -995,9 +1013,7 @@ def dotted_feature_set_list(self, compact=False): ## then the calendar would not be available on the old URL ## anymore) ## 'event_by_url_is_broken' removed - works in zimbra/zcs-foss:latest - 'no_delete_event', 'vtodo_datesearch_notime_task_is_skipped', - 'no_relships', ## TODO: I just discovered that when searching for a date some ## years after a recurring daily event was made, the event does @@ -1042,10 +1058,10 @@ def dotted_feature_set_list(self, compact=False): "scheduling.mailbox": {"support": "unknown"}, ## TODO: play with this and see if it's needed + 'save-load.icalendar.related-to': {'support': 'broken', 'behaviour': 'first RELATED-TO line is preserved but subsequent RELATED-TO lines are stripped'}, 'old_flags': [ 'propfind_allprop_failure', 'duplicates_not_allowed', - 'no_relships' ## relships seems to work as long as it's one RELATED-TO-line, but as soon as there are multiple lines the implementation seems broken ], } @@ -1118,7 +1134,6 @@ def dotted_feature_set_list(self, compact=False): "support": "quirk", "behaviour": "server delivers iTIP notification to inbox AND auto-schedules into calendar", }, - 'old_flags': [] } ## See comments on https://github.com/python-caldav/caldav/issues/3 @@ -1153,11 +1168,11 @@ def dotted_feature_set_list(self, compact=False): #'nofreebusy', ## for old versions ## 'fragile_sync_tokens' removed - covered by 'sync-token': {'support': 'fragile'} 'vtodo_datesearch_nodtstart_task_is_skipped', ## no issue raised yet - 'date_todo_search_ignores_duration', 'calendar_color', 'calendar_order', 'vtodo_datesearch_notime_task_is_skipped', - ] + ], + 'search.time-range.todo.duration': {'support': 'unsupported'}, } sogo = { @@ -1261,9 +1276,8 @@ def dotted_feature_set_list(self, compact=False): "scheduling": {"support": "unsupported"}, 'old_flags': [ 'non_existing_raises_other', ## AuthorizationError instead of NotFoundError - 'no_supported_components_support', - 'no_relships', ], + 'save-load.icalendar.related-to': {'support': 'unsupported'}, 'test-calendar': {'cleanup-regime': 'wipe-calendar'}, "sync-token": {"support": "ungraceful"}, "get-supported-components": {"support": "unsupported"}, @@ -1302,9 +1316,6 @@ def dotted_feature_set_list(self, compact=False): 'sync-token': {'support': 'ungraceful'}, 'principal-search': {'support': 'unsupported'}, "scheduling": {"support": "unsupported"}, - 'old_flags': [ - #'no_recurring_todo', ## todo - ] } #calendar_mail_ru = [ @@ -1446,9 +1457,6 @@ def dotted_feature_set_list(self, compact=False): }, ## Known, work in progress "scheduling": {"support": "unsupported"}, - 'old_flags': [ - 'no_supported_components_support', - ], ## Known, not a breach of standard "get-supported-components": {"support": "unsupported"}, } @@ -1539,9 +1547,7 @@ def dotted_feature_set_list(self, compact=False): 'principal-search.list-all': {'support': 'unsupported'}, ## Cross-calendar duplicate UID test fails (AuthorizationError creating second calendar) 'save.duplicate-uid.cross-calendar': {'support': 'ungraceful'}, - 'old_flags': [ - 'no_relships', - ], + 'save-load.icalendar.related-to': {'support': 'unsupported'}, ## OX App Suite has complex user provisioning; cross-user scheduling tests not yet set up. "scheduling.mailbox.inbox-delivery": {"support": "unknown"}, } diff --git a/tests/docker-test-servers/nextcloud/setup_nextcloud.sh b/tests/docker-test-servers/nextcloud/setup_nextcloud.sh index 5d5382e6..ef804171 100755 --- a/tests/docker-test-servers/nextcloud/setup_nextcloud.sh +++ b/tests/docker-test-servers/nextcloud/setup_nextcloud.sh @@ -61,6 +61,17 @@ fi docker exec $CONTAINER_NAME php occ app:disable bruteforcesettings || true docker exec $CONTAINER_NAME php occ config:system:set auth.bruteforce.protection.enabled --value=false --type=boolean || true +echo "Disabling CalDAV trashbin (calendar retention)..." +# Setting calendarRetentionObligation to '0' (the string) disables the trashbin in +# CalDavBackend::deleteCalendar and deleteCalendarObject, making deletes permanent. +# Without this, deleted calendars/objects are soft-deleted and accumulate in the DB, +# causing UNIQUE constraint violations when tests recreate a calendar with the same slug +# (Nextcloud 33+ reuses the calendarid, keeping old soft-deleted objects, so adding +# an event with the same UID fails). +docker exec $CONTAINER_NAME php occ config:app:set dav calendarRetentionObligation --value=0 || true +# Purge any leftover soft-deleted calendars/objects from previous runs +docker exec $CONTAINER_NAME php occ dav:retention:clean-up || true + echo "Configuring CalDAV rate limits..." docker exec $CONTAINER_NAME php occ config:app:set dav rateLimitCalendarCreation --value=99999 || true docker exec $CONTAINER_NAME php occ config:app:set dav maximumCalendarsSubscriptions --value=-1 || true diff --git a/tests/test_caldav.py b/tests/test_caldav.py index e7a24de2..92ee5723 100644 --- a/tests/test_caldav.py +++ b/tests/test_caldav.py @@ -2440,7 +2440,7 @@ def testWrongPassword(self): def testCreateChildParent(self): self.skip_unless_support("save-load.event") - self.skip_on_compatibility_flag("no_relships") + self.skip_unless_support("save-load.icalendar.related-to") c = self._fixCalendar(supported_calendar_component_set=["VEVENT"]) parent = c.add_event( dtstart=datetime(2022, 12, 26, 19, 15), @@ -2602,7 +2602,7 @@ def testSetDue(self): some_todo.save() - self.skip_on_compatibility_flag("no_relships") + self.skip_unless_support("save-load.icalendar.related-to") parent = c.add_todo( dtstart=datetime(2022, 12, 26, 19, 00, tzinfo=utc), From 5ab79909548ce485e1bc340c5f717fccbda22a85 Mon Sep 17 00:00:00 2001 From: Tobias Brox Date: Thu, 9 Apr 2026 08:35:22 +0200 Subject: [PATCH 04/17] test: fix Nextcloud 33 test failures Nextcloud 33 introduced (or changed) soft-delete behaviour: when a calendar is deleted via CalDAV, both the calendar and its objects are soft-deleted to a trashbin. When tests delete and re-create a calendar with the same slug, Nextcloud reuses the same calendarid, and the soft-deleted objects (with their UIDs) remain in oc_calendarobjects. Trying to add a new event with the same UID then fails with "UNIQUE constraint failed: calendarid, calendartype, uid". Fix: set calendarRetentionObligation=0 in setup_nextcloud.sh. When this config value is the string '0', CalDavBackend::deleteCalendar and deleteCalendarObject skip the trashbin entirely and hard-delete immediately (see apps/dav/lib/CalDAV/CalDavBackend.php lines 940-941, 1572). Also run dav:retention:clean-up at startup to purge any leftover soft-deleted entries from prior runs. Also update the nextcloud compatibility hints to reflect the changed behaviour of scheduling.mailbox.inbox-delivery in Nextcloud 33: the server now delivers the iTIP notification to the inbox AND auto-schedules into the attendee's calendar (quirk), whereas previously it only delivered to the inbox (full). Prompt: `pytest -k nextcloud --last-failed` gives lots of failures. It seems to be something wrong with the nextcloud container. Looks a bit like the problems are due to the setup (user provisioning) is not being compatible with the latest version of the image. Please investigate. This commit was AI-generated, according to the AI-POLICY it's acceptable to use AI on the test server framework. Without AI help, we would still only have radicale, xandios and personal testing accounts available for integration tests. Co-Authored-By: Claude Sonnet 4.6 --- caldav/calendarobjectresource.py | 15 +++ caldav/collection.py | 65 +++++++++++++ tests/test_async_integration.py | 88 ++++++++++++++++++ tests/test_caldav.py | 40 ++++++++ tests/test_caldav_unit.py | 153 ++++++++++++++++++++++++++++++- 5 files changed, 360 insertions(+), 1 deletion(-) diff --git a/caldav/calendarobjectresource.py b/caldav/calendarobjectresource.py index ec4836c5..144f0a23 100644 --- a/caldav/calendarobjectresource.py +++ b/caldav/calendarobjectresource.py @@ -182,12 +182,18 @@ def add_organizer(self, organizer=None) -> None: * A plain email address string (``"mailto:"`` is prepended automatically) Any pre-existing ORGANIZER field is removed before the new one is added. + + For async clients, when *organizer* is omitted the method returns a + coroutine that must be awaited. When an explicit *organizer* is supplied + the method is always synchronous (pure in-memory, no network call). """ from .collection import Principal as _Principal ## avoid circular import if organizer is None: if self.client is None: raise ValueError("Unexpected value None for self.client") + if self.is_async_client: + return self._async_add_organizer() organizer_obj = self.client.principal().get_vcal_address() elif isinstance(organizer, _Principal): organizer_obj = organizer.get_vcal_address() @@ -201,6 +207,15 @@ def add_organizer(self, organizer=None) -> None: else: raise ValueError(f"Unsupported organizer type: {type(organizer)!r}") + self._set_organizer(organizer_obj) + + async def _async_add_organizer(self) -> None: + """Async implementation of add_organizer() for async clients.""" + principal = await self.client.principal() + self._set_organizer(await principal._async_get_vcal_address()) + + def _set_organizer(self, organizer_obj: vCalAddress) -> None: + """Write the ORGANIZER property onto the icalendar component (sync, no I/O).""" ievent = self.icalendar_component ievent.pop("organizer", None) ievent.add("organizer", organizer_obj) diff --git a/caldav/collection.py b/caldav/collection.py index 8df78bf8..41695549 100644 --- a/caldav/collection.py +++ b/caldav/collection.py @@ -396,6 +396,22 @@ def get_vcal_address(self) -> "vCalAddress": ret.params["cutype"] = vText(cutype) return ret + async def _async_get_vcal_address(self) -> "vCalAddress": + """Async counterpart of get_vcal_address() for use with AsyncDAVClient.""" + from icalendar import vCalAddress, vText + + cn = await self.get_display_name() + addresses_el = await self.get_property(cdav.CalendarUserAddressSet(), parse_props=False) + if addresses_el is None: + raise error.NotFoundError("No calendar user addresses given from server") + assert not [x for x in addresses_el if x.tag != dav.Href().tag] + addresses = sorted(list(addresses_el), key=lambda x: -int(x.get("preferred", 0))) + cutype = await self.get_property(cdav.CalendarUserType()) + ret = vCalAddress(addresses[0].text) + ret.params["cn"] = vText(cn) + ret.params["cutype"] = vText(cutype) + return ret + @property def calendar_home_set(self): if not self._calendar_home_set: @@ -461,7 +477,11 @@ def calendars(self) -> list["Calendar"]: def freebusy_request(self, dtstart, dtend, attendees): """Sends a freebusy-request for some attendee to the server as per RFC6638. + + For async clients, returns a coroutine that must be awaited. """ + if self.is_async_client: + return self._async_freebusy_request(dtstart, dtend, attendees) freebusy_ical = icalendar.Calendar() freebusy_ical.add("prodid", "-//tobixen/python-caldav//EN") @@ -487,6 +507,31 @@ def freebusy_request(self, dtstart, dtend, attendees): ) return response._find_objects_and_props() + async def _async_freebusy_request(self, dtstart, dtend, attendees): + """Async implementation of freebusy_request() for async clients.""" + freebusy_ical = icalendar.Calendar() + freebusy_ical.add("prodid", "-//tobixen/python-caldav//EN") + freebusy_ical.add("version", "2.0") + freebusy_ical.add("method", "REQUEST") + uid = uuid.uuid4() + freebusy_comp = icalendar.FreeBusy() + freebusy_comp.add("uid", uid) + freebusy_comp.add("dtstamp", datetime.now()) + freebusy_comp.add("dtstart", dtstart) + freebusy_comp.add("dtend", dtend) + freebusy_ical.add_component(freebusy_comp) + outbox = await self._async_schedule_outbox() + caldavobj = FreeBusy(data=freebusy_ical, parent=outbox) + await caldavobj.add_organizer() + for attendee in attendees: + caldavobj.add_attendee(attendee, no_default_parameters=True) + response = await self.client.post( + outbox.url, + caldavobj.data, + headers={"Content-Type": "text/calendar; charset=utf-8"}, + ) + return response._find_objects_and_props() + def calendar_user_address_set(self) -> list[str | None]: """ defined in RFC6638 @@ -517,6 +562,11 @@ def schedule_outbox(self) -> "ScheduleOutbox": """ return ScheduleOutbox(principal=self) + async def _async_schedule_outbox(self) -> "ScheduleOutbox": + """Async version of schedule_outbox() for async clients.""" + url = await self.get_property(cdav.ScheduleOutboxURL()) + return ScheduleOutbox(client=self.client, url=url) + class Calendar(DAVObject): """ @@ -793,7 +843,11 @@ def save_with_invites(self, ical: str, attendees, **attendeeoptions) -> None: """ sends a schedule request to the server. Equivalent with add_event, add_todo, etc, but the attendees will be added to the ical object before sending it to the server. + + For async clients, returns a coroutine that must be awaited. """ + if self.is_async_client: + return self._async_save_with_invites(ical, attendees, **attendeeoptions) ## TODO: consolidate together with save_* obj = self._calendar_comp_class_by_data(ical)(data=ical, client=self.client) obj.parent = self @@ -804,6 +858,17 @@ def save_with_invites(self, ical: str, attendees, **attendeeoptions) -> None: obj.save() return obj + async def _async_save_with_invites(self, ical: str, attendees, **attendeeoptions): + """Async implementation of save_with_invites() for async clients.""" + obj = self._calendar_comp_class_by_data(ical)(data=ical, client=self.client) + obj.parent = self + await obj.add_organizer() + for attendee in attendees: + obj.add_attendee(attendee, **attendeeoptions) + obj.id = obj.icalendar_instance.walk("vevent")[0]["uid"] + await obj.save() + return obj + def _use_or_create_ics(self, ical, objtype, **ical_data): if ical_data or ( (isinstance(ical, str) or isinstance(ical, bytes)) diff --git a/tests/test_async_integration.py b/tests/test_async_integration.py index 1f733b2e..01a0f8ea 100644 --- a/tests/test_async_integration.py +++ b/tests/test_async_integration.py @@ -439,6 +439,94 @@ async def test_todos_method(self, async_task_list: Any) -> None: assert len(todos) >= 1 assert all(isinstance(t, AsyncTodo) for t in todos) + @pytest.mark.asyncio + async def test_add_organizer_no_arg(self, async_client: Any, async_calendar: Any) -> None: + """add_organizer() without args returns a coroutine and sets ORGANIZER (issue #524). + + Verifies the async fix: on an AsyncDAVClient principal() is a coroutine + function, so add_organizer() must await it via _async_add_organizer() + rather than calling it synchronously (which would raise AttributeError + on the returned coroutine object). + """ + from caldav import Event + + self.skip_unless_support("scheduling.calendar-user-address-set") + + principal = await async_client.principal() + expected_vcal = await principal._async_get_vcal_address() + + event = Event(client=async_client, data=ev1(), parent=async_calendar) + + ## Must return a coroutine, not raise AttributeError + coro = event.add_organizer() + assert asyncio.iscoroutine(coro), ( + f"add_organizer() on async client must return a coroutine, got {type(coro)}" + ) + await coro + + org = event.icalendar_component.get("organizer") + assert org is not None, "ORGANIZER must be set after awaiting add_organizer()" + assert str(org) == str(expected_vcal), ( + f"ORGANIZER {org!r} should match the principal's address {expected_vcal!r}" + ) + + @pytest.mark.asyncio + async def test_save_with_invites(self, async_client: Any, async_calendar: Any) -> None: + """Calendar.save_with_invites() must return a coroutine and save the event for async clients. + + Verifies that save_with_invites() detects the async client, returns a coroutine, + and that awaiting it properly sets ORGANIZER (via _async_add_organizer) and saves + the object to the server (via _async_save). + """ + import uuid + + self.skip_unless_support("scheduling.calendar-user-address-set") + + principal = await async_client.principal() + vcal_address = await principal._async_get_vcal_address() + + base = _get_base_date() + ical = make_event( + f"async-swi-{uuid.uuid4()}@example.com", + "Async Save-With-Invites Test", + base + timedelta(days=2), + base + timedelta(days=2, hours=1), + ) + + ## Must return a coroutine, not execute synchronously + coro = async_calendar.save_with_invites(ical, [vcal_address]) + assert asyncio.iscoroutine(coro), ( + f"save_with_invites() on async client must return a coroutine, got {type(coro)}" + ) + obj = await coro + + assert obj.id is not None, "save_with_invites() must return an object with an id" + org = obj.icalendar_component.get("organizer") + assert org is not None, "ORGANIZER must be set after awaiting save_with_invites()" + + @pytest.mark.asyncio + async def test_principal_freebusy_request(self, async_client: Any) -> None: + """Principal.freebusy_request() must return a coroutine for async clients. + + Verifies that freebusy_request() detects the async client and delegates to + _async_freebusy_request, which awaits _async_schedule_outbox(), add_organizer(), + and client.post() rather than executing them synchronously. + """ + self.skip_unless_support("scheduling.mailbox") + self.skip_unless_support("scheduling.calendar-user-address-set") + + principal = await async_client.principal() + vcal_address = await principal._async_get_vcal_address() + + base = _get_base_date() + + coro = principal.freebusy_request(base, base + timedelta(hours=2), [vcal_address]) + assert asyncio.iscoroutine(coro), ( + f"Principal.freebusy_request() on async client must return a coroutine, got {type(coro)}" + ) + ## We don't assert much about the response structure — just that awaiting doesn't raise + await coro + # ==================== Dynamic Test Class Generation ==================== # diff --git a/tests/test_caldav.py b/tests/test_caldav.py index 92ee5723..3fe7f95d 100644 --- a/tests/test_caldav.py +++ b/tests/test_caldav.py @@ -1263,6 +1263,46 @@ def testSchedulingInfo(self): calendar_user_address_set = self.principal.calendar_user_address_set() me_a_participant = self.principal.get_vcal_address() + def testAddOrganizer(self): + """add_organizer() sets ORGANIZER from the current principal (issue #524). + + Tests three paths: + - no-arg: principal is looked up from the client (requires + scheduling.calendar-user-address-set). + - explicit email string: pure in-memory, no server interaction. + - explicit vCalAddress: pure in-memory, no server interaction. + """ + from icalendar import vCalAddress + + cal = self._fixCalendar() + event = Event( + client=self.caldav, + data=ev1, + parent=cal, + ) + + ## ---- explicit string arg (pure in-memory, always runs) ---- + event.add_organizer("organizer@example.com") + org = event.icalendar_component.get("organizer") + assert org is not None, "ORGANIZER should be set after add_organizer(string)" + assert "organizer@example.com" in str(org) + + ## ---- explicit vCalAddress arg ---- + addr = vCalAddress("mailto:addr@example.com") + event.add_organizer(addr) + org = event.icalendar_component.get("organizer") + assert str(org) == "mailto:addr@example.com" + + ## ---- no-arg: uses current principal ---- + self.skip_unless_support("scheduling.calendar-user-address-set") + event.add_organizer() + org = event.icalendar_component.get("organizer") + assert org is not None, "ORGANIZER should be set when add_organizer() uses principal" + principal_addresses = self.principal.calendar_user_address_set() + assert any(addr in str(org) for addr in principal_addresses), ( + f"ORGANIZER {org!r} should contain one of the principal's addresses {principal_addresses!r}" + ) + def testIssue399ChangeAttendeeStatusUsernameEmailFallback(self): """change_attendee_status() works when the attendee is identified by the client username rather than calendar_user_address_set() (issue #399). diff --git a/tests/test_caldav_unit.py b/tests/test_caldav_unit.py index 6b669f7e..bf0d12fa 100755 --- a/tests/test_caldav_unit.py +++ b/tests/test_caldav_unit.py @@ -2190,12 +2190,151 @@ def test_set_relation_returns_coroutine_for_async_client(self): result.close() def test_accept_invite_raises_not_implemented_for_async_client(self): - """accept_invite() must raise NotImplementedError for async clients (not silently fail).""" + """accept_invite() must raise Notimplemented for async clients (not silently fail).""" client, calendar = self._make_async_client_and_calendar() event = Event(client=client, url="/calendar/ev1.ics", data=ev1, parent=calendar) with pytest.raises(NotImplementedError): event.accept_invite() + def test_add_organizer_explicit_arg_is_sync_safe_for_async_client(self): + """add_organizer(explicit_arg) is pure in-memory: no network call, no await needed. + It must work correctly even when the client is an async client.""" + from icalendar import vCalAddress + + client, calendar = self._make_async_client_and_calendar() + event = Event(client=client, url="/calendar/ev1.ics", data=ev1, parent=calendar) + event.add_organizer("organizer@example.com") + organizer = event.icalendar_component.get("organizer") + assert "organizer@example.com" in str(organizer) + + def test_add_organizer_no_arg_returns_coroutine_for_async_client(self): + """add_organizer() without args must return a coroutine for async clients. + + Bug: today the code calls self.client.principal().get_vcal_address() without + checking is_async_client first. On an AsyncDAVClient, principal() is a + coroutine function, so principal() returns a coroutine object. Calling + .get_vcal_address() on that coroutine raises AttributeError instead of + returning a usable coroutine to the caller. + """ + import asyncio + + from icalendar import vCalAddress + + client, calendar = self._make_async_client_and_calendar() + event = Event(client=client, url="/calendar/ev1.ics", data=ev1, parent=calendar) + + async def async_principal(): + p = mock.MagicMock() + p._async_get_vcal_address = mock.AsyncMock( + return_value=vCalAddress("mailto:me@example.com") + ) + return p + + client.principal = async_principal + + result = event.add_organizer() + assert asyncio.iscoroutine(result), ( + f"expected a coroutine from add_organizer() on async client, got {type(result)}" + ) + result.close() + + def test_add_organizer_no_arg_async_awaited_sets_organizer(self): + """Awaiting add_organizer() without args on async client must set ORGANIZER correctly.""" + import asyncio + + from icalendar import vCalAddress + + client, calendar = self._make_async_client_and_calendar() + event = Event(client=client, url="/calendar/ev1.ics", data=ev1, parent=calendar) + + async def async_principal(): + p = mock.MagicMock() + p._async_get_vcal_address = mock.AsyncMock( + return_value=vCalAddress("mailto:me@example.com") + ) + return p + + client.principal = async_principal + + asyncio.run(event.add_organizer()) + organizer = event.icalendar_component.get("organizer") + assert str(organizer) == "mailto:me@example.com" + + def test_save_with_invites_returns_coroutine_for_async_client(self): + """save_with_invites() must return a coroutine for async clients (not silently drop save/organizer).""" + import asyncio + + from icalendar import vCalAddress + + client, calendar = self._make_async_client_and_calendar() + + async def async_principal(): + p = mock.MagicMock() + p._async_get_vcal_address = mock.AsyncMock( + return_value=vCalAddress("mailto:me@example.com") + ) + return p + + client.principal = async_principal + + result = calendar.save_with_invites(ev1, []) + assert asyncio.iscoroutine(result), ( + f"expected coroutine from save_with_invites() on async client, got {type(result)}" + ) + result.close() + + def test_save_with_invites_async_awaited_sets_organizer_and_saves(self): + """Awaiting save_with_invites() on async client must set ORGANIZER and call put.""" + import asyncio + + from icalendar import vCalAddress + + client, calendar = self._make_async_client_and_calendar() + + async def async_principal(): + p = mock.MagicMock() + p._async_get_vcal_address = mock.AsyncMock( + return_value=vCalAddress("mailto:me@example.com") + ) + return p + + client.principal = async_principal + saved = False + + async def fake_async_put(*args, **kwargs): + nonlocal saved + saved = True + r = mock.MagicMock() + r.status = 201 + r.headers = [] + return r + + client.put = fake_async_put + + obj = asyncio.run(calendar.save_with_invites(ev1, [])) + assert saved, "save_with_invites() did not call put for async client" + org = obj.icalendar_component.get("organizer") + assert org is not None, "ORGANIZER must be set after awaiting save_with_invites()" + assert "me@example.com" in str(org) + + def test_principal_freebusy_request_returns_coroutine_for_async_client(self): + """Principal.freebusy_request() must return a coroutine for async clients.""" + import asyncio + from datetime import datetime + + client, calendar = self._make_async_client_and_calendar() + principal = Principal(client=client, url="/principals/me/") + + result = principal.freebusy_request( + datetime(2024, 1, 1, 10, 0, 0), + datetime(2024, 1, 1, 12, 0, 0), + [], + ) + assert asyncio.iscoroutine(result), ( + f"expected coroutine from Principal.freebusy_request() on async client, got {type(result)}" + ) + result.close() + class TestRateLimitHelpers: """Unit tests for the shared rate-limit helper functions in caldav.lib.error.""" @@ -2867,6 +3006,18 @@ def test_add_organizer_no_arg_no_client_raises(self): with pytest.raises(ValueError): ev.add_organizer() + def test_add_organizer_principal_object(self): + """Passing a Principal object directly calls get_vcal_address() on it.""" + from icalendar import vCalAddress + + ev = self._make_event() + mock_principal = mock.MagicMock(spec=Principal) + mock_principal.get_vcal_address.return_value = vCalAddress("mailto:organizer@example.com") + ev.add_organizer(mock_principal) + organizer = ev.icalendar_component.get("organizer") + assert str(organizer) == "mailto:organizer@example.com" + mock_principal.get_vcal_address.assert_called_once() + class TestChangeAttendeeStatusFallback: """Unit tests for change_attendee_status() fallback when calendar_user_address_set() is unavailable. From 595db5a7356825909d17072f42d3ebe55e8d9959 Mon Sep 17 00:00:00 2001 From: Tobias Brox Date: Thu, 23 Apr 2026 17:50:32 +0200 Subject: [PATCH 05/17] test: add scheduling test framework (sync/async) Earlier work on scheduling testing stalled a bit due to lack of proper testing accounts. Docker test servers (Baikal, Nextcloud, Cyrus, SOGo, DAViCal, Davis, CCS, Zimbra, Stalwart) now all pre-create multiple users for scheduling tests. This commit adds a testing framework for multi-account testing plus various tests. Prompt: The sync integration tests in tests/caldav_test.py has a _TestSchedulingBase class, for tests that requires several users - `save_with_invites` is tested there. The class has a TODO-comment, "Stalled a bit due to lack of proper testing accounts" is for sure not true, I believe the whole comment is obsoleted, please verify. The async tests should be made as symmetric and similar to the sync tests as possible. There is a todo-comment there that FreeBusy isn't tested (FreeBusy as defined in RFC663), please make tests for it, both for sync and freesync. (some followup-prompts appears to be missing here) Test code is AI-generated, tests are among the things identified as particularly suitable for AI-assisted codeing. Co-Authored-By: Claude Sonnet 4.6 --- caldav/collection.py | 5 + tests/test_async_integration.py | 222 +++++++++++++++++++++++++++----- tests/test_caldav.py | 66 ++++++---- 3 files changed, 235 insertions(+), 58 deletions(-) diff --git a/caldav/collection.py b/caldav/collection.py index 41695549..53b0d3e6 100644 --- a/caldav/collection.py +++ b/caldav/collection.py @@ -562,6 +562,11 @@ def schedule_outbox(self) -> "ScheduleOutbox": """ return ScheduleOutbox(principal=self) + async def _async_schedule_inbox(self) -> "ScheduleInbox": + """Async version of schedule_inbox() for async clients.""" + url = await self.get_property(cdav.ScheduleInboxURL()) + return ScheduleInbox(client=self.client, url=url) + async def _async_schedule_outbox(self) -> "ScheduleOutbox": """Async version of schedule_outbox() for async clients.""" url = await self.get_property(cdav.ScheduleOutboxURL()) diff --git a/tests/test_async_integration.py b/tests/test_async_integration.py index 01a0f8ea..4c3cb864 100644 --- a/tests/test_async_integration.py +++ b/tests/test_async_integration.py @@ -470,61 +470,200 @@ async def test_add_organizer_no_arg(self, async_client: Any, async_calendar: Any f"ORGANIZER {org!r} should match the principal's address {expected_vcal!r}" ) + +class _AsyncTestSchedulingBase: + """ + Async counterpart of _TestSchedulingBase (tests/test_caldav.py) for + RFC6638 scheduling tests. Not collected directly by pytest (no ``Test`` + prefix); concrete subclasses supply ``_users`` and ``_server_features``. + + Concrete subclasses are generated dynamically in the module epilogue, + one per server that has ``scheduling_users`` configured. + """ + + ## Subclasses set these when the class is dynamically generated. + _users: list[dict] = [] + _server_features: object = None + + def _skip_unless_support(self, feature: str) -> None: + """Skip if the server does not declare support for *feature*.""" + from caldav.compatibility_hints import FeatureSet + + if not self._server_features: + pytest.skip(f"No feature information available, skipping {feature} test") + fs = ( + self._server_features + if isinstance(self._server_features, FeatureSet) + else FeatureSet(self._server_features) + ) + if not fs.is_supported(feature): + msg = fs.find_feature(feature).get("description", feature) + pytest.skip("Test skipped due to server incompatibility issue: " + msg) + + @pytest_asyncio.fixture + async def scheduling_setup(self) -> Any: + """Create async clients/principals/calendars for each scheduling user.""" + import uuid + + from caldav.aio import get_async_davclient + + from .fixture_helpers import aget_or_create_test_calendar, cleanup_calendar_objects + + clients: list[Any] = [] + principals: list[Any] = [] + calendars: list[Any] = [] + auto_uids: list[str] = [] + + for i, user_config in enumerate(self._users): + try: + client = await get_async_davclient(probe=False, **user_config) + except Exception: + continue + if not await client.check_scheduling_support(): + await client.close() + continue + principal = await client.principal() + cal, _ = await aget_or_create_test_calendar( + client, + principal, + calendar_name=f"async scheduling test {i}", + cal_id=f"asyncschedtest{uuid.uuid4().hex[:8]}", + ) + if cal is None: + await client.close() + continue + await cleanup_calendar_objects(cal) + clients.append(client) + principals.append(principal) + calendars.append(cal) + + if not clients: + pytest.skip("No scheduling users available or server does not support scheduling") + + yield clients, principals, calendars, auto_uids + + ## Teardown: clear calendar objects and clean up auto-scheduled events + for i, (client, principal) in enumerate(zip(clients, principals, strict=False)): + try: + await cleanup_calendar_objects(calendars[i]) + except Exception: + pass + if auto_uids: + try: + for cal in await principal.calendars(): + try: + for event in await cal.get_events(): + if event.id in auto_uids: + await event.delete() + except Exception: + pass + except Exception: + pass + await client.close() + @pytest.mark.asyncio - async def test_save_with_invites(self, async_client: Any, async_calendar: Any) -> None: - """Calendar.save_with_invites() must return a coroutine and save the event for async clients. + async def test_invite_and_respond(self, scheduling_setup: Any) -> None: + """send a calendar invite via save_with_invites and verify delivery. - Verifies that save_with_invites() detects the async client, returns a coroutine, - and that awaiting it properly sets ORGANIZER (via _async_add_organizer) and saves - the object to the server (via _async_save). + Async counterpart of _TestSchedulingBase.testInviteAndRespond. + Note: accept_invite() is not yet supported for async clients, so + the response half of the flow is verified only at the delivery level + (inbox item or auto-scheduled event). """ import uuid - self.skip_unless_support("scheduling.calendar-user-address-set") - - principal = await async_client.principal() - vcal_address = await principal._async_get_vcal_address() + clients, principals, calendars, auto_uids = scheduling_setup + if len(principals) < 2: + pytest.skip("need 2 principals to do the invite and respond test") + ## Snapshot inbox contents before the invite + inbox_urls_before: set[Any] = set() + try: + inbox0 = await principals[0]._async_schedule_inbox() + inbox1 = await principals[1]._async_schedule_inbox() + for item in await inbox0.get_events(): + inbox_urls_before.add(item.url) + for item in await inbox1.get_events(): + inbox_urls_before.add(item.url) + except Exception: + pass ## inbox listing may not work on all servers + + ## Send the invite base = _get_base_date() ical = make_event( - f"async-swi-{uuid.uuid4()}@example.com", - "Async Save-With-Invites Test", - base + timedelta(days=2), - base + timedelta(days=2, hours=1), + f"async-sched-{uuid.uuid4()}@example.com", + "Async Schedule Test", + base + timedelta(days=3), + base + timedelta(days=3, hours=1), + ) + attendee_vcal = await principals[1]._async_get_vcal_address() + saved_event = await calendars[0].save_with_invites(ical, [principals[0], attendee_vcal]) + event_uid = saved_event.id + auto_uids.append(event_uid) + + ## Event must be in the organizer's calendar + organizer_events = await calendars[0].get_events() + assert any(e.id == event_uid for e in organizer_events), ( + "Event should appear in organizer's calendar after save_with_invites" ) - ## Must return a coroutine, not execute synchronously - coro = async_calendar.save_with_invites(ical, [vcal_address]) - assert asyncio.iscoroutine(coro), ( - f"save_with_invites() on async client must return a coroutine, got {type(coro)}" + ## Poll: event auto-scheduled into attendee calendar OR new inbox item + auto_scheduled = False + new_inbox_items: list[Any] = [] + for _ in range(30): + try: + for cal in await principals[1].calendars(): + try: + if any(e.id == event_uid for e in await cal.get_events()): + auto_scheduled = True + break + except Exception: + pass + except Exception: + pass + if not auto_scheduled: + try: + new_inbox_items = [ + item + for item in await inbox1.get_events() + if item.url not in inbox_urls_before + ] + except Exception: + pass + if auto_scheduled or new_inbox_items: + break + await asyncio.sleep(1) + + assert auto_scheduled or new_inbox_items, ( + "Expected invite in attendee inbox OR event auto-added to attendee calendar, " + "got neither" ) - obj = await coro - assert obj.id is not None, "save_with_invites() must return an object with an id" - org = obj.icalendar_component.get("organizer") - assert org is not None, "ORGANIZER must be set after awaiting save_with_invites()" + ## accept_invite() is not yet supported for async clients (raises NotImplementedError). + ## Verifying delivery is sufficient to confirm save_with_invites works end-to-end. @pytest.mark.asyncio - async def test_principal_freebusy_request(self, async_client: Any) -> None: - """Principal.freebusy_request() must return a coroutine for async clients. + async def test_freebusy(self, scheduling_setup: Any) -> None: + """Test RFC6638 freebusy query via the schedule outbox. - Verifies that freebusy_request() detects the async client and delegates to - _async_freebusy_request, which awaits _async_schedule_outbox(), add_organizer(), - and client.post() rather than executing them synchronously. + Async counterpart of _TestSchedulingBase.testFreeBusy. + Verifies that Principal.freebusy_request() returns a coroutine for + async clients and that awaiting it completes without error. """ - self.skip_unless_support("scheduling.mailbox") - self.skip_unless_support("scheduling.calendar-user-address-set") - - principal = await async_client.principal() - vcal_address = await principal._async_get_vcal_address() + clients, principals, calendars, auto_uids = scheduling_setup + self._skip_unless_support("freebusy-query.rfc6638") base = _get_base_date() + dtstart = base + dtend = base + timedelta(days=1) + attendees = [await principals[0]._async_get_vcal_address()] - coro = principal.freebusy_request(base, base + timedelta(hours=2), [vcal_address]) + coro = principals[0].freebusy_request(dtstart, dtend, attendees) assert asyncio.iscoroutine(coro), ( - f"Principal.freebusy_request() on async client must return a coroutine, got {type(coro)}" + f"Principal.freebusy_request() on async client must return a coroutine, " + f"got {type(coro)}" ) - ## We don't assert much about the response structure — just that awaiting doesn't raise + ## Just verify it completes without raising; response format varies per server. await coro @@ -552,3 +691,18 @@ async def test_principal_freebusy_request(self, async_client: Any) -> None: # Add to module namespace so pytest discovers it vars()[_classname] = _test_class _generated_classes[_classname] = _test_class + + ## If the server has scheduling_users, also generate an async scheduling class. + if hasattr(_server, "config") and "scheduling_users" in _server.config: + _sched_classname = f"TestAsyncSchedulingFor{_server.name.replace(' ', '')}" + if _sched_classname not in _generated_classes: + _sched_class = type( + _sched_classname, + (_AsyncTestSchedulingBase,), + { + "_users": _server.config["scheduling_users"], + "_server_features": _server.features, + }, + ) + vars()[_sched_classname] = _sched_class + _generated_classes[_sched_classname] = _sched_class diff --git a/tests/test_caldav.py b/tests/test_caldav.py index 3fe7f95d..5f744c6e 100644 --- a/tests/test_caldav.py +++ b/tests/test_caldav.py @@ -675,30 +675,32 @@ class _TestSchedulingBase: Base class for RFC6638 scheduling tests. Not collected directly by pytest (no ``Test`` prefix); concrete subclasses supply ``_users``. - TODO: work in progress. Stalled a bit due to lack of proper testing accounts. I haven't managed to get this test to pass at any systems yet, but I believe the problem is not on the library side. - * icloud: cannot really test much with only one test account - available. I did some testing forth and back with emails sent - to an account on another service through the - scheduling_examples.py, and it seems like I was able both to - accept an invite from an external account (and the external - account got notified about it) and to receive notification that - the external party having accepted the calendar invite. - FreeBusy doesn't work. I don't have capacity following up more - right now. - * DAViCal: I have only an old version to test with at the moment, - should look into that. I did manage to send and receive a - calendar invite, but apparently I did not manage to accept the - calendar invite. It should be looked more into. FreeBusy - doesn't work in the old version, probably it works in a newer - version. - * SOGo: Sending a calendar invite, but receiving nothing in the - CalDAV inbox. FreeBusy works somehow, but returns pure - iCalendar data and not XML, I believe that's not according to - RFC6638. + Concrete subclasses are generated dynamically in the module epilogue, + one per server that has ``scheduling_users`` configured (see + ``caldav_test_servers.yaml.example`` for setup instructions). + Docker test servers (Baikal, Nextcloud, Cyrus, SOGo, DAViCal, Davis, + CCS, Zimbra, Stalwart) pre-create multiple users for this purpose. """ ## Subclasses set this to the list of user connection dicts to use. _users: list[dict] = [] + ## Server-level feature dict/FeatureSet; set by dynamic class generation. + _server_features: object = None + + def _skip_unless_support(self, feature: str) -> None: + """Skip the test if the server does not declare support for *feature*.""" + from caldav.compatibility_hints import FeatureSet + + if not self._server_features: + pytest.skip(f"No feature information available, skipping {feature} test") + fs = ( + self._server_features + if isinstance(self._server_features, FeatureSet) + else FeatureSet(self._server_features) + ) + if not fs.is_supported(feature): + msg = fs.find_feature(feature).get("description", feature) + pytest.skip("Test skipped due to server incompatibility issue: " + msg) def _getCalendar(self, i): calendar_id = "schedulingnosetestcalendar%i" % i @@ -754,9 +756,22 @@ def teardown_method(self): for c in self.clients: c.__exit__() - ## TODO - # def testFreeBusy(self): - # pass + def testFreeBusy(self): + """Test RFC6638 freebusy query via the schedule outbox (Principal.freebusy_request).""" + if len(self.principals) < 1: + pytest.skip("need at least 1 principal") + self._skip_unless_support("freebusy-query.rfc6638") + + dtstart = (datetime.now() + timedelta(days=1)).replace( + hour=9, minute=0, second=0, microsecond=0 + ) + dtend = (datetime.now() + timedelta(days=1)).replace( + hour=18, minute=0, second=0, microsecond=0 + ) + attendees = [self.principals[0].get_vcal_address()] + ## Just verify the call completes without raising. + ## The response format varies per server. + self.principals[0].freebusy_request(dtstart, dtend, attendees) def testInviteAndRespond(self): ## Look through inboxes of principals[0] and principals[1] so we can sort @@ -3934,5 +3949,8 @@ def testWithEnvironment(self): vars()[_sched_classname] = type( _sched_classname, (_TestSchedulingBase,), - {"_users": _caldav_server["scheduling_users"]}, + { + "_users": _caldav_server["scheduling_users"], + "_server_features": _caldav_server.get("features"), + }, ) From a88b2246b4144ddfc831282b251c7d11627ad29a Mon Sep 17 00:00:00 2001 From: Tobias Brox Date: Fri, 10 Apr 2026 07:10:23 +0200 Subject: [PATCH 06/17] fix: scheduling async-aware Prompt: The previous commit adds a _async_schedule_inbox to collection.py, this method is only used from the test. Why is this? Shouldn't it be possible to get the inbox through the public API while doing async operations? Prompt: pytest -k 'not compat' --last-failed -k 'test_invite_and_respond' breaks now This was predominantly written by Claude. Trivial refactoring and bugfixing. More work on async is done in a later commit. Co-Authored-By: Claude Sonnet 4.6 --- caldav/collection.py | 17 +++++++++++++---- tests/test_async_integration.py | 4 ++-- 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/caldav/collection.py b/caldav/collection.py index 53b0d3e6..ca9b3053 100644 --- a/caldav/collection.py +++ b/caldav/collection.py @@ -385,7 +385,10 @@ def calendar( def get_vcal_address(self) -> "vCalAddress": """ Returns the principal, as an icalendar.vCalAddress object. + For async clients, returns a coroutine that must be awaited. """ + if self.is_async_client: + return self._async_get_vcal_address() from icalendar import vCalAddress, vText cn = self.get_display_name() @@ -552,23 +555,27 @@ def calendar_user_address_set(self) -> list[str | None]: def schedule_inbox(self) -> "ScheduleInbox": """ - Returns the schedule inbox, as defined in RFC6638 + Returns the schedule inbox, as defined in RFC6638. + For async clients, returns a coroutine that must be awaited. """ + if self.is_async_client: + return self._async_schedule_inbox() return ScheduleInbox(principal=self) def schedule_outbox(self) -> "ScheduleOutbox": """ - Returns the schedule outbox, as defined in RFC6638 + Returns the schedule outbox, as defined in RFC6638. + For async clients, returns a coroutine that must be awaited. """ + if self.is_async_client: + return self._async_schedule_outbox() return ScheduleOutbox(principal=self) async def _async_schedule_inbox(self) -> "ScheduleInbox": - """Async version of schedule_inbox() for async clients.""" url = await self.get_property(cdav.ScheduleInboxURL()) return ScheduleInbox(client=self.client, url=url) async def _async_schedule_outbox(self) -> "ScheduleOutbox": - """Async version of schedule_outbox() for async clients.""" url = await self.get_property(cdav.ScheduleOutboxURL()) return ScheduleOutbox(client=self.client, url=url) @@ -869,6 +876,8 @@ async def _async_save_with_invites(self, ical: str, attendees, **attendeeoptions obj.parent = self await obj.add_organizer() for attendee in attendees: + if isinstance(attendee, Principal): + attendee = await attendee.get_vcal_address() obj.add_attendee(attendee, **attendeeoptions) obj.id = obj.icalendar_instance.walk("vevent")[0]["uid"] await obj.save() diff --git a/tests/test_async_integration.py b/tests/test_async_integration.py index 4c3cb864..c2e3c48b 100644 --- a/tests/test_async_integration.py +++ b/tests/test_async_integration.py @@ -579,8 +579,8 @@ async def test_invite_and_respond(self, scheduling_setup: Any) -> None: ## Snapshot inbox contents before the invite inbox_urls_before: set[Any] = set() try: - inbox0 = await principals[0]._async_schedule_inbox() - inbox1 = await principals[1]._async_schedule_inbox() + inbox0 = await principals[0].schedule_inbox() + inbox1 = await principals[1].schedule_inbox() for item in await inbox0.get_events(): inbox_urls_before.add(item.url) for item in await inbox1.get_events(): From 4ed6b9159f95df712b54368b4d55e37cf3977074 Mon Sep 17 00:00:00 2001 From: Tobias Brox Date: Fri, 10 Apr 2026 08:49:31 +0200 Subject: [PATCH 07/17] chore: various compatibility hints changes This is a combination of various work done on the compatibility hints. The compatibility hints are still a bit "work in progress", so it's considered OK to do breaking changes here even on minor-versions. It's used mostly in the test code, but also internally in the library to work around various compatibility problems. The aim of the work is to get rid of all the old "compatibility issue flags", all the new "features" should be tested by the caldav-server-tester, this file should contain everything needed by the code and by the tests, in addition to other useful/interessting features checked by the caldav-server-tester project. The work here is predominantly done by Claude - maintaining this file is tedious work and not part of the core logic. Quite some of the prompts have gone missing, quite much of the work has been done in the caldav-server-tester project. I have noticed Claude hallucinating up RFC references, so it's important to ask it to actually read the RFCs and verify the references. Here are some of the prompts: prompt: (in caldav-server-tester project ) Create a check for freebusy-query.rfc6638 followup-prompt: this being a RFC6638-thing, it's meant to be used in a multi-user-scenario, so consider usage of ServerQuirkChecker.extra_principals. (Also, rename the old FreeBusy check class with a name consistent with the new FreeBusy class) followup-prompt: rename the feature to scheduling.freebusy-query (not freebusy-query.rfc6638) followup-prompt: but then freebusy-query.rfc4791 becomes a bit redundant, it can be removed and merged into freebusy-query, with the description of freebusy-query emphasizing that it's a check for freebusy as given in rfc4791 (with reference to scheduling.freebusy-query for the rfc6638-variant) Co-Authored-By: Claude Sonnet 4.6 --- caldav/compatibility_hints.py | 108 +++++++++++++++++++++----------- tests/test_async_integration.py | 2 +- tests/test_caldav.py | 6 +- 3 files changed, 76 insertions(+), 40 deletions(-) diff --git a/caldav/compatibility_hints.py b/caldav/compatibility_hints.py index a8e0a265..85eb0d7d 100644 --- a/caldav/compatibility_hints.py +++ b/caldav/compatibility_hints.py @@ -203,13 +203,26 @@ class FeatureSet: }, "search.time-range.todo": {"description": "basic time range searches for tasks works", "default": {"support": "full"}}, "search.time-range.todo.old-dates": {"description": "time range searches for tasks with old dates (e.g. year 2000) work - some servers enforce a min-date-time restriction"}, - "search.time-range.todo.duration": { - "description": "Time-range searches correctly handle VTODOs that specify their interval via DTSTART+DURATION (without a DUE property). RFC4791 section 9.9 specifies that such tasks overlap a time range if DTSTART+DURATION falls within the range. When 'unsupported', the server ignores DURATION and fails to find such tasks.", + "search.time-range.todo.strict": { + "description": "Bounded VTODO time-range searches do not return tasks whose time span falls entirely outside the searched range (no false positives).", "default": {"support": "full"}, "links": ["https://datatracker.ietf.org/doc/html/rfc4791#section-9.9"], }, - "search.time-range.todo.open-start": { - "description": "Time-range searches with only an end bound (no start) correctly exclude tasks whose DTSTART is after the end bound. RFC4791 section 9.9: a VTODO with both DTSTART and DUE should not overlap if its DTSTART > search_end. When 'broken', the server incorrectly returns future tasks.", + "search.time-range.open": { + "description": "Open-ended time-range searches (with only one bound) work correctly. RFC4791 section 9.9: the CALDAV:time-range 'start' and 'end' attributes are optional; if absent, assume -infinity and +infinity respectively. At least one attribute must be present.", + "links": ["https://datatracker.ietf.org/doc/html/rfc4791#section-9.9"], + }, + "search.time-range.open.end": { + "description": "Searches with only a start bound (end assumed +infinity) correctly return components whose time span overlaps the start. RFC4791 section 9.9: for a VTODO with DTSTART+DUE and absent end bound, the overlap condition is (start < DUE) OR (start <= DTSTART). When 'unsupported', such queries return no results.", + "links": ["https://datatracker.ietf.org/doc/html/rfc4791#section-9.9"], + }, + "search.time-range.open.start": { + "description": "Searches with only an end bound (start assumed -infinity) correctly exclude components whose DTSTART is after the end bound. RFC4791 section 9.9: a VTODO with DTSTART+DUE should not overlap if its DTSTART > search_end. When 'broken', the server incorrectly returns future tasks.", + "default": {"support": "full"}, + "links": ["https://datatracker.ietf.org/doc/html/rfc4791#section-9.9"], + }, + "search.time-range.open.start.duration": { + "description": "Time-range searches correctly handle components that specify their interval via DTSTART+DURATION (without DTEND/DUE). RFC4791 section 9.9: a VEVENT with DURATION (end > 0s) overlaps [start, end] if (start < DTSTART+DURATION) AND (end > DTSTART); a VTODO with DTSTART+DURATION overlaps if (start <= DTSTART+DURATION) AND ((end > DTSTART) OR (end >= DTSTART+DURATION)). Tested for both VTODO and VEVENT; if support is asymmetric across component types the feature is marked 'broken' with a behaviour note.", "default": {"support": "full"}, "links": ["https://datatracker.ietf.org/doc/html/rfc4791#section-9.9"], }, @@ -339,20 +352,34 @@ class FeatureSet: "https://datatracker.ietf.org/doc/html/rfc6638#section-4.1", ], }, - 'freebusy-query': { - 'description': "freebusy queries come in two flavors, one query can be done towards a CalDAV server as defined in RFC4791, another query can be done through the scheduling framework, RFC 6638.", + "scheduling.auto-schedule": { + "description": "Server automatically processes incoming iTIP REQUEST messages and adds the event directly to the attendee's calendar without requiring explicit acceptance from the inbox (RFC6638 SCHEDULE-AGENT=SERVER behaviour). When False/unsupported, the attendee must process inbox items manually. Note: only detectable from the caldav-server-tester with a cross-user probe (extra_principals configured).", "links": [ - "https://datatracker.ietf.org/doc/html/rfc4791#section-7.10", "https://datatracker.ietf.org/doc/html/rfc6638", ], + "default": {"support": "full"}, + }, + "scheduling.schedule-tag": { + "description": "Server returns a Schedule-Tag response header on GET of a scheduling object resource (a calendar object with an ORGANIZER property) and exposes the schedule-tag DAV property via PROPFIND (RFC6638 sections 3.2-3.3). Clients use the Schedule-Tag for conditional PUT requests to detect concurrent scheduling changes.", + "default": {"support": "full"}, + "links": [ + "https://datatracker.ietf.org/doc/html/rfc6638#section-3.2", + "https://datatracker.ietf.org/doc/html/rfc6638#section-3.3", + ], }, - "freebusy-query.rfc4791": { - "description": "Server supports free/busy-query REPORT as specified in RFC4791 section 7.10. The REPORT allows clients to query for free/busy time information for a time range. Servers without this support will typically return an error (often 500 Internal Server Error or 501 Not Implemented). Note: RFC6638 defines a different freebusy mechanism for scheduling", - "links": ["https://datatracker.ietf.org/doc/html/rfc4791#section-7.10"], + "scheduling.schedule-tag.stable-partstat": { + "description": "Server keeps the Schedule-Tag stable when an attendee performs a PARTSTAT-only update (RFC6638 section 3.2 requirement). Non-compliant servers change the tag even when only PARTSTAT is updated, breaking conditional-PUT logic for other attendees.", + "links": ["https://datatracker.ietf.org/doc/html/rfc6638#section-3.2"], }, - "freebusy-query.rfc6638": { - "description": "Server supports RFC6638 freebusy query via the schedule outbox (section 4.1). The organizer POSTs a VFREEBUSY component to the schedule outbox and the server returns free/busy information for the listed attendees. Distinct from freebusy-query.rfc4791 which queries a calendar collection directly via REPORT.", - "links": ["https://datatracker.ietf.org/doc/html/rfc6638#section-4.1"], + "scheduling.freebusy-query": { + "description": "Server supports the RFC6638 freebusy query: the organizer POSTs a VFREEBUSY REQUEST to the schedule outbox and the server returns free/busy information for the listed attendees.", + "links": ["https://datatracker.ietf.org/doc/html/rfc6638#section-5"], + }, + 'freebusy-query': { + 'description': "Server supports the RFC4791 free/busy-query REPORT (section 7.10): a REPORT sent directly to a calendar collection to retrieve free/busy time for a range. See also scheduling.freebusy-query for the RFC6638 variant which POSTs a VFREEBUSY to the schedule outbox.", + "links": [ + "https://datatracker.ietf.org/doc/html/rfc4791#section-7.10", + ], }, "principal-search": { "description": "Server supports searching for principals (CalDAV users). Principal search may be restricted for privacy/security reasons on many servers. (not to be confused with get-current-user-principal)" @@ -850,9 +877,6 @@ def dotted_feature_set_list(self, compact=False): 'robur_rrule_freq_yearly_expands_monthly': """Robur expands a yearly event into a monthly event. I believe I've reported this one upstream at some point, but can't find back to it""", - 'no_search_openended': - """An open-ended search will not work""", - } ## This is for Xandikos 0.2.12. @@ -868,11 +892,11 @@ def dotted_feature_set_list(self, compact=False): "search.text.substring": {"support": "unsupported"}, "search.text.category.substring": {"support": "unsupported"}, 'principal-search': {'support': 'unsupported'}, - 'freebusy-query.rfc4791': {'support': 'ungraceful', 'behaviour': '500 internal server error'}, + 'freebusy-query': {'support': 'ungraceful', 'behaviour': '500 internal server error'}, "scheduling": {"support": "unsupported"}, ## https://github.com/jelmer/xandikos/issues/8 - 'search.time-range.todo.duration': {'support': 'unsupported'}, - 'search.time-range.todo.open-start': {'support': 'broken', 'behaviour': 'future tasks are returned when only an end bound is given'}, + 'search.time-range.open.start.duration': {'support': 'unsupported'}, + 'search.time-range.open.start': {'support': 'broken', 'behaviour': 'future tasks are returned when only an end bound is given'}, } xandikos = { @@ -897,7 +921,10 @@ def dotted_feature_set_list(self, compact=False): ## Open-start searches (end bound only) cause xandikos to return 500 when processing ## VTODOs that have DURATION but no DUE (no DUE means the index falls back to a full ## file check, which crashes in the time-range calculation). - 'search.time-range.todo.open-start': {'support': 'ungraceful', 'behaviour': 'xandikos returns 500 on open-start searches involving DURATION-only VTODOs'}, + 'search.time-range.open.start': {'support': 'ungraceful', 'behaviour': 'xandikos returns 500 on open-start searches involving DURATION-only VTODOs'}, + ## xandikos index-based filtering for VTODO is inaccurate: tasks with DTSTART+DUE + ## entirely outside the search range can be returned as false positives. + 'search.time-range.todo.strict': {'support': 'broken', 'behaviour': 'tasks with DTSTART+DUE outside the range are returned'}, } ## This seems to work as of version 3.5.4 of Radicale. @@ -914,12 +941,12 @@ def dotted_feature_set_list(self, compact=False): "auto-connect.url": {"domain": "localhost", "scheme": "http", "basepath": "/"}, ## freebusy is not supported yet, but on the long-term road map "scheduling": {"support": "unsupported"}, + ## Radicale does not return results for open-end date searches (only start given) + 'search.time-range.open.end': {'support': 'unsupported'}, 'old_flags': [ ## calendar listings and calendar creation works a bit ## "weird" on radicale - 'no_search_openended', - #'text_search_is_exact_match_sometimes', ## extra features not specified in RFC5545 @@ -1004,6 +1031,8 @@ def dotted_feature_set_list(self, compact=False): "scheduling.mailbox": True, "scheduling.mailbox.inbox-delivery": {"support": "unsupported"}, 'save-load.icalendar.related-to': {'support': 'unsupported'}, + 'search.time-range.open.start.duration': {'support': 'unsupported'}, + 'search.time-range.open.start': {'support': 'broken'}, "old_flags": [ ## setting display name in zimbra does not work (display name, @@ -1035,7 +1064,7 @@ def dotted_feature_set_list(self, compact=False): 'save-load.todo.recurrences.thisandfuture': {'support': 'ungraceful'}, 'save-load.event.recurrences.exception': False, 'search.time-range.alarm': {'support': 'unsupported'}, - "freebusy-query.rfc4791": True, + "freebusy-query": True, "search.time-range.todo": False, "search.text": False, ## sometimes ungraceful "search.recurrences.includes-implicit": False, @@ -1123,6 +1152,9 @@ def dotted_feature_set_list(self, compact=False): 'delete-calendar': { 'support': 'fragile', 'behaviour': 'Deleting a recently created calendar fails'}, + # Cyrus changes the Schedule-Tag even on attendee PARTSTAT-only updates, + # violating RFC6638 section 3.2 which requires the tag to remain stable. + "scheduling.schedule-tag.stable-partstat": {"support": "unsupported"}, # Cyrus may not properly reject wrong passwords in some configurations # Cyrus implements server-side automatic scheduling: for cross-user # invites, the server both auto-processes the invite into the attendee's calendar @@ -1172,7 +1204,7 @@ def dotted_feature_set_list(self, compact=False): 'calendar_order', 'vtodo_datesearch_notime_task_is_skipped', ], - 'search.time-range.todo.duration': {'support': 'unsupported'}, + 'search.time-range.open.start.duration': {'support': 'unsupported'}, } sogo = { @@ -1210,7 +1242,7 @@ def dotted_feature_set_list(self, compact=False): "support": "unsupported" }, ## unsupported earlier, ungraceful at be26d42b1ca3ff3b4fd183761b4a9b024ce12b84 / 537a23b145487006bb987dee5ab9e00cdebb0492 - "freebusy-query.rfc4791": {"support": "ungraceful"}, + "freebusy-query": {"support": "ungraceful"}, "principal-search": { "support": "ungraceful", "behaviour": "Search by name failed: ReportError at '501 Not Implemented - \n\n

An error occurred during object publishing

did not find the specified REPORT

\n\n', reason no reason", @@ -1272,7 +1304,7 @@ def dotted_feature_set_list(self, compact=False): "search.recurrences.expanded.exception": { "support": "unsupported" }, 'search.recurrences.includes-implicit.todo': {'support': 'unsupported'}, 'principal-search': {'support': 'ungraceful'}, - 'freebusy-query.rfc4791': {'support': 'ungraceful'}, + 'freebusy-query': {'support': 'ungraceful'}, "scheduling": {"support": "unsupported"}, 'old_flags': [ 'non_existing_raises_other', ## AuthorizationError instead of NotFoundError @@ -1370,21 +1402,24 @@ def dotted_feature_set_list(self, compact=False): "save.duplicate-uid.cross-calendar": {"support": "ungraceful"}, # CCS rejects multi-instance VTODOs (thisandfuture recurring completion) "save-load.todo.recurrences.thisandfuture": {"support": "unsupported"}, - "search.time-range.event": {"support": "full"}, - "search.time-range.event.old-dates": {"support": "ungraceful"}, - "search.time-range.todo": {"support": "full"}, - "search.time-range.todo.old-dates": {"support": "ungraceful"}, "search.comp-type.optional": {"support": "ungraceful"}, + "scheduling.free-busy": {"support": "broken"}, ## "full" observed, 70938dc1cbb6a839978eee4315699746d38ee5f0/3cae24cf99da1702b851b5a74a9b88c8e5317dad, 2026-02-17. ## However, this may be due to mess with the caldav-server-checker branches. "unsupported" again at be26d42b1ca3ff3b4fd183761b4a9b024ce12b84 / 537a23b145487006bb987dee5ab9e00cdebb0492 "search.text.case-sensitive": {"support": "unsupported"}, + "search.time-range.event": {"support": "full"}, + "search.time-range.event.old-dates": {"support": "ungraceful"}, + "search.time-range.todo": {"support": "full"}, + 'search.time-range.open.start.duration': {'support': 'ungraceful'}, + "search.time-range.todo.old-dates": {"support": "ungraceful"}, + "search.time-range.open.start": {"support": "ungraceful"}, "search.time-range.alarm": {"support": "unsupported"}, "search.recurrences": {"support": "unsupported"}, "principal-search": {"support": "unsupported"}, # Ephemeral Docker container: wipe objects (avoids UID conflicts across calendars) "test-calendar": {"cleanup-regime": "wipe-calendar"}, ## Did pass earlier, ungraceful at be26d42b1ca3ff3b4fd183761b4a9b024ce12b84 / 537a23b145487006bb987dee5ab9e00cdebb0492 - 'freebusy-query.rfc4791': {'support': 'ungraceful'}, + 'freebusy-query': {'support': 'ungraceful'}, "old_flags": [ "propfind_allprop_failure", ], @@ -1417,11 +1452,11 @@ def dotted_feature_set_list(self, compact=False): 'search.recurrences.expanded.exception': False, ## Stalwart stores master+exception VEVENTs as a single resource with 2 VEVENTs. 'save-load.event.recurrences.exception': {'support': 'full'}, + ## Stalwart does not return results for open-end date searches (only start given) + 'search.time-range.open.end': {'support': 'unsupported'}, 'old_flags': [ ## Stalwart does not return VTODO items without DTSTART in date searches 'vtodo_datesearch_nodtstart_task_is_skipped', - ## Stalwart does not return results for open-ended date searches on VTODOs - 'no_search_openended', ], } @@ -1487,8 +1522,8 @@ def dotted_feature_set_list(self, compact=False): 'principal-search': {'support': 'ungraceful'}, 'principal-search.by-name.self': {'support': 'unsupported'}, ## TODO: flapping ...? - #'freebusy-query.rfc4791': {'support': 'unsupported'}, - 'freebusy-query.rfc4791': {'support': 'ungraceful'}, + #'freebusy-query': {'support': 'unsupported'}, + 'freebusy-query': {'support': 'ungraceful'}, ## flapping ...? #'search.is-not-defined.category': {'support': 'unsupported'}, ## flapping ...? @@ -1502,9 +1537,10 @@ def dotted_feature_set_list(self, compact=False): "scheduling": {"support": "full"}, "scheduling.mailbox": {"support": "unsupported"}, "scheduling.calendar-user-address-set": {"support": "unsupported"}, + ## GMX does not return results for open-end date searches (only start given) + 'search.time-range.open.end': {'support': 'unsupported'}, "old_flags": [ #"text_search_is_case_insensitive", - "no_search_openended", "vtodo-cannot-be-uncompleted", ] } diff --git a/tests/test_async_integration.py b/tests/test_async_integration.py index c2e3c48b..511247e3 100644 --- a/tests/test_async_integration.py +++ b/tests/test_async_integration.py @@ -651,7 +651,7 @@ async def test_freebusy(self, scheduling_setup: Any) -> None: async clients and that awaiting it completes without error. """ clients, principals, calendars, auto_uids = scheduling_setup - self._skip_unless_support("freebusy-query.rfc6638") + self._skip_unless_support("scheduling.freebusy-query") base = _get_base_date() dtstart = base diff --git a/tests/test_caldav.py b/tests/test_caldav.py index 5f744c6e..e3f7e7a7 100644 --- a/tests/test_caldav.py +++ b/tests/test_caldav.py @@ -760,7 +760,7 @@ def testFreeBusy(self): """Test RFC6638 freebusy query via the schedule outbox (Principal.freebusy_request).""" if len(self.principals) < 1: pytest.skip("need at least 1 principal") - self._skip_unless_support("freebusy-query.rfc6638") + self._skip_unless_support("scheduling.freebusy-query") dtstart = (datetime.now() + timedelta(days=1)).replace( hour=9, minute=0, second=0, microsecond=0 @@ -3033,7 +3033,7 @@ def testTodoDatesearch(self): todos2 = c.search(start=datetime(2025, 4, 14), todo=True, include_completed=True) todos3 = c.search(start=datetime(2025, 4, 14), todo=True) - if not self.check_compatibility_flag("no_search_openended"): + if self.is_supported("search.time-range.open.end"): assert isinstance(todos1[0], Todo) assert isinstance(todos2[0], Todo) assert isinstance(todos3[0], Todo) @@ -3530,7 +3530,7 @@ def testDateSearchAndFreeBusy(self): assert len(r) == 1 # Lets try a freebusy request as well - self.skip_unless_support("freebusy-query.rfc4791") + self.skip_unless_support("freebusy-query") freebusy = c.freebusy_request( datetime(2007, 7, 13, 17, 00, 00), datetime(2007, 7, 15, 17, 00, 00) From e8eae79c698d5bd395d84ce4d4687cf014ad45f9 Mon Sep 17 00:00:00 2001 From: Tobias Brox Date: Fri, 24 Apr 2026 11:55:44 +0200 Subject: [PATCH 08/17] chore: various compatibility hints changes This is a combination of various work done on the compatibility hints. The compatibility hints are still a bit "work in progress", so it's considered OK to do breaking changes here even on minor-versions. It's used mostly in the test code, but also internally in the library to work around various compatibility problems. The aim of the work is to get rid of all the old "compatibility issue flags", all the new "features" should be tested by the caldav-server-tester, this file should contain everything needed by the code and by the tests, in addition to other useful/interessting features checked by the caldav-server-tester project. This commit is a combination of many commits, many of them AI-generated. Maintaining this file is tedious work and not part of the core logic. Quite some of the prompts have gone missing, quite much of the work has been done in the caldav-server-tester project. I do believe the changes have been through sufficient scrutiny. Claude does have a history of messing up this file - hallucinating up RFC references, and frequently flipping compatibility support instead of doing research on why the feature support is "fragile". Here are some of the prompts: prompt: (in caldav-server-tester project ) Create a check for freebusy-query.rfc6638 followup-prompt: this being a RFC6638-thing, it's meant to be used in a multi-user-scenario, so consider usage of ServerQuirkChecker.extra_principals. (Also, rename the old FreeBusy check class with a name consistent with the new FreeBusy class) followup-prompt: rename the feature to scheduling.freebusy-query (not freebusy-query.rfc6638) followup-prompt: but then freebusy-query.rfc4791 becomes a bit redundant, it can be removed and merged into freebusy-query, with the description of freebusy-query emphasizing that it's a check for freebusy as given in rfc4791 (with reference to scheduling.freebusy-query for the rfc6638-variant) Co-Authored-By: Claude Sonnet 4.6 --- caldav/compatibility_hints.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/caldav/compatibility_hints.py b/caldav/compatibility_hints.py index 85eb0d7d..a3cfb011 100644 --- a/caldav/compatibility_hints.py +++ b/caldav/compatibility_hints.py @@ -1106,6 +1106,8 @@ def dotted_feature_set_list(self, compact=False): "search.recurrences.expanded.exception": False, 'old_flags': ['vtodo_datesearch_nodtstart_task_is_skipped'], 'test-calendar': {'cleanup-regime': 'wipe-calendar'}, + 'scheduling.schedule-tag': False, + 'scheduling.mailbox.inbox-delivery': False, } baikal = { ## version 0.10.1 From 3d26fd71c982d289547a95d29d10f12726cb22c0 Mon Sep 17 00:00:00 2001 From: Tobias Brox Date: Thu, 23 Apr 2026 18:07:58 +0200 Subject: [PATCH 09/17] feat: freebusy scheduling The scheduling freebusy-requests were completely untested and didn't work at all. logic was human-written, test-code by Claude Prompt: look into https://datatracker.ietf.org/doc/html/rfc6638#appendix-B.5 and make a pure unit test with a mocked-up response to a freebusy scheduling request, exercising the handling part of it. This will break with a NotImplementedError as for now. Only fix the test, do not fix the code logic. Consider the TODO-comment in response.py, line 247, and give me an opinion on weather it makes sense to reuse the _find_objects_and_props for scheduling response or if it's better to create a dedicated separate method for this. --- CHANGELOG.md | 1 + caldav/collection.py | 56 ++++++++---------- caldav/datastate.py | 19 ++++-- caldav/elements/cdav.py | 60 ++++++++++++++----- caldav/elements/dav.py | 2 + caldav/elements/ical.py | 2 +- caldav/protocol/xml_parsers.py | 1 + caldav/response.py | 82 ++++++++++++++++++++++++-- caldav/search.py | 8 ++- tests/test_caldav_unit.py | 103 +++++++++++++++++++++++++++++++++ 10 files changed, 276 insertions(+), 58 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2c3db3d4..cd6ad35d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -35,6 +35,7 @@ This project should adhere to [Semantic Versioning](https://semver.org/spec/v2.0 ### Test framework, compatibility hints, documentation, examples +* Open-ended time-range search compatibility hints: new `search.time-range.open`, `search.time-range.open.end`, `search.time-range.open.start`, and `search.time-range.open.start.duration` features (RFC4791 section 9.9). Old `no_search_openended` flag and `search.time-range.todo.duration`/`search.time-range.todo.open-start` features migrated. `testTodoSearch` updated to use `is_supported("search.time-range.open.end")` instead of the old compatibility flag. * RFC 6638 scheduling feature-detection infrastructure: new `scheduling`, `scheduling.mailbox`, and `scheduling.calendar-user-address-set` compatibility hints; legacy `no_scheduling` flags migrated. Default scheduling hints set for all the servers tested. * Calendar owner example (`examples/calendar_owner_examples.py`) demonstrating how to retrieve the owner of a calendar via `DAV:owner` and resolve their calendar-user address. `testFindCalendarOwner` now exercises the full owner → principal → `get_vcal_address()` chain. Closes https://github.com/python-caldav/caldav/issues/544 * `testInviteAndRespond` implemented end-to-end: organizer creates an event, invites an attendee, attendee accepts, and the organizer verifies the updated `PARTSTAT`. Per-server compatibility flags applied for known quirks (Baikal, Cyrus, SOGo). diff --git a/caldav/collection.py b/caldav/collection.py index ca9b3053..e17cea84 100644 --- a/caldav/collection.py +++ b/caldav/collection.py @@ -477,15 +477,17 @@ def calendars(self) -> list["Calendar"]: """ return self.get_calendars() - def freebusy_request(self, dtstart, dtend, attendees): + ## TODO: we have code in lib.vcal for constructing icalendar objects, + ## and from icalendar 7 there is also code in the icalendar library + ## for this. The cruft below for constructing the request should be + ## eliminated. Also, the async diversion should happen closer to the + ## bottom of the method, reducing the need of duplicating code + def freebusy_request(self, dtstart, dtend, attendees) -> dict[str, FreeBusy]: """Sends a freebusy-request for some attendee to the server as per RFC6638. For async clients, returns a coroutine that must be awaited. """ - if self.is_async_client: - return self._async_freebusy_request(dtstart, dtend, attendees) - freebusy_ical = icalendar.Calendar() freebusy_ical.add("prodid", "-//tobixen/python-caldav//EN") freebusy_ical.add("version", "2.0") @@ -498,42 +500,34 @@ def freebusy_request(self, dtstart, dtend, attendees): freebusy_comp.add("dtend", dtend) freebusy_ical.add_component(freebusy_comp) outbox = self.schedule_outbox() - caldavobj = FreeBusy(data=freebusy_ical, parent=outbox) - caldavobj.add_organizer() + caldavobj = FreeBusy(data=freebusy_ical, parent=self) for attendee in attendees: caldavobj.add_attendee(attendee, no_default_parameters=True) + if self.is_async_client: + return self._async_freebusy_request(outbox, caldavobj) + + caldavobj.add_organizer() + response = self.client.post( outbox.url, caldavobj.data, headers={"Content-Type": "text/calendar; charset=utf-8"}, ) - return response._find_objects_and_props() + return response._parse_scheduling_response_objects(parent=self) - async def _async_freebusy_request(self, dtstart, dtend, attendees): + async def _async_freebusy_request(self, outbox, fb_obj) -> dict: """Async implementation of freebusy_request() for async clients.""" - freebusy_ical = icalendar.Calendar() - freebusy_ical.add("prodid", "-//tobixen/python-caldav//EN") - freebusy_ical.add("version", "2.0") - freebusy_ical.add("method", "REQUEST") - uid = uuid.uuid4() - freebusy_comp = icalendar.FreeBusy() - freebusy_comp.add("uid", uid) - freebusy_comp.add("dtstamp", datetime.now()) - freebusy_comp.add("dtstart", dtstart) - freebusy_comp.add("dtend", dtend) - freebusy_ical.add_component(freebusy_comp) - outbox = await self._async_schedule_outbox() - caldavobj = FreeBusy(data=freebusy_ical, parent=outbox) - await caldavobj.add_organizer() - for attendee in attendees: - caldavobj.add_attendee(attendee, no_default_parameters=True) - response = await self.client.post( - outbox.url, - caldavobj.data, - headers={"Content-Type": "text/calendar; charset=utf-8"}, - ) - return response._find_objects_and_props() + ## TODO: could we have common headers as global variable? + headers = {"Content-Type": "text/calendar; charset=utf-8"} + outbox = await outbox + ## TODO: it's really bad that arbitrary methods returns + ## a coroutine in async mode. It's needed to make it much + ## more clear what methods involves I/O and what methods + ## doesn't involve I/O in 4.0 + await fb_obj.add_organizer() + response = await self.client.post(outbox.url, fb_obj.data, headers) + return response._parse_scheduling_response_objects(parent=self) def calendar_user_address_set(self) -> list[str | None]: """ @@ -1473,7 +1467,7 @@ def freebusy_request(self, start: datetime, end: datetime) -> "FreeBusy": Returns: [FreeBusy(), ...] """ - + ## TODO: async variant? root = cdav.FreeBusyQuery() + [cdav.TimeRange(start, end)] response = self._query(root, 1, "report") return FreeBusy(self, response.raw) diff --git a/caldav/datastate.py b/caldav/datastate.py index d528a6d1..72c89dc2 100644 --- a/caldav/datastate.py +++ b/caldav/datastate.py @@ -5,6 +5,9 @@ representations of calendar data (raw string, icalendar object, vobject object). See https://github.com/python-caldav/caldav/issues/613 for design discussion. + +TODO: verify that we have sufficient test coverage - both through unit tests +and integration tests """ from __future__ import annotations @@ -61,18 +64,18 @@ def get_uid(self) -> str | None: """ cal = self.get_icalendar_copy() for comp in cal.subcomponents: - if comp.name in ("VEVENT", "VTODO", "VJOURNAL") and "UID" in comp: + if comp.name in ("VEVENT", "VTODO", "VJOURNAL", "FREEBUSY") and "UID" in comp: return str(comp["UID"]) return None def get_component_type(self) -> str | None: - """Get the component type (VEVENT, VTODO, VJOURNAL) without full parsing. + """Get the component type (VEVENT, VTODO, VJOURNAL, FREEBUSY) without full parsing. Default implementation parses the data, but subclasses can optimize. """ cal = self.get_icalendar_copy() for comp in cal.subcomponents: - if comp.name in ("VEVENT", "VTODO", "VJOURNAL"): + if comp.name in ("VEVENT", "VTODO", "VJOURNAL", "FREEBUSY"): return comp.name return None @@ -146,6 +149,8 @@ def get_component_type(self) -> str | None: return "VTODO" elif "BEGIN:VJOURNAL" in self._data: return "VJOURNAL" + elif "BEGIN:FREEBUSY" in self._data: + return "VFREEBUSY" return None @@ -182,13 +187,13 @@ def get_vobject_copy(self) -> vobject.base.Component: def get_uid(self) -> str | None: for comp in self._calendar.subcomponents: - if comp.name in ("VEVENT", "VTODO", "VJOURNAL") and "UID" in comp: + if comp.name in ("VEVENT", "VTODO", "VJOURNAL", "VFREEBUSY") and "UID" in comp: return str(comp["UID"]) return None def get_component_type(self) -> str | None: for comp in self._calendar.subcomponents: - if comp.name in ("VEVENT", "VTODO", "VJOURNAL"): + if comp.name in ("VEVENT", "VTODO", "VJOURNAL", "VFREEBUSY"): return comp.name return None @@ -232,6 +237,8 @@ def get_uid(self) -> str | None: return str(self._vobject.vtodo.uid.value) elif hasattr(self._vobject, "vjournal"): return str(self._vobject.vjournal.uid.value) + elif hasattr(self._vobject, "vfreebusy"): + return str(self._vobject.vfreebusy.uid.value) except AttributeError: pass return None @@ -243,4 +250,6 @@ def get_component_type(self) -> str | None: return "VTODO" elif hasattr(self._vobject, "vjournal"): return "VJOURNAL" + elif hasattr(self._vobject, "vfreebusy"): + return "VFREEBUSY" return None diff --git a/caldav/elements/cdav.py b/caldav/elements/cdav.py index 5636a728..aa16c961 100644 --- a/caldav/elements/cdav.py +++ b/caldav/elements/cdav.py @@ -41,6 +41,9 @@ def _to_utc_date_string(ts): return ts.strftime("%Y%m%dT%H%M%SZ") +## TODO: add RFC references to every class, like it's done in the Response class + + # Operations class CalendarQuery(BaseElement): tag: ClassVar[str] = ns("C", "calendar-query") @@ -58,14 +61,6 @@ class CalendarMultiGet(BaseElement): tag: ClassVar[str] = ns("C", "calendar-multiget") -class ScheduleInboxURL(BaseElement): - tag: ClassVar[str] = ns("C", "schedule-inbox-URL") - - -class ScheduleOutboxURL(BaseElement): - tag: ClassVar[str] = ns("C", "schedule-outbox-URL") - - # Filters class Filter(BaseElement): tag: ClassVar[str] = ns("C", "filter") @@ -143,13 +138,6 @@ class Comp(NamedBaseElement): tag: ClassVar[str] = ns("C", "comp") -# Uhhm ... can't find any references to calendar-collection in rfc4791.txt -# and newer versions of baikal gives 403 forbidden when this one is -# encountered -# class CalendarCollection(BaseElement): -# tag = ns("C", "calendar-collection") - - # Properties class CalendarUserAddressSet(BaseElement): tag: ClassVar[str] = ns("C", "calendar-user-address-set") @@ -208,5 +196,47 @@ class Allprop(BaseElement): tag: ClassVar[str] = ns("C", "allprop") +# Scheduling + + class ScheduleTag(BaseElement): tag: ClassVar[str] = ns("C", "schedule-tag") + + +class ScheduleInboxURL(BaseElement): + tag: ClassVar[str] = ns("C", "schedule-inbox-URL") + + +class ScheduleOutboxURL(BaseElement): + tag: ClassVar[str] = ns("C", "schedule-outbox-URL") + + +class ScheduleResponse(BaseElement): + tag: ClassVar[str] = ns("C", "schedule-response") + + +class Response(BaseElement): + """ + https://datatracker.ietf.org/doc/html/rfc6638#section-10.2 + Child of schedule-response + """ + + tag: ClassVar[str] = ns("C", "response") + + +class Recipient(BaseElement): + """ + https://datatracker.ietf.org/doc/html/rfc6638#section-10.3 + Child of response + """ + + tag: ClassVar[str] = ns("C", "recipient") + + +class RequestStatus(BaseElement): + """ + https://datatracker.ietf.org/doc/html/rfc6638#section-10.4 + Child of response + """ + + tag: ClassVar[str] = ns("C", "request-status") diff --git a/caldav/elements/dav.py b/caldav/elements/dav.py index ffe5f1e2..0c30387c 100644 --- a/caldav/elements/dav.py +++ b/caldav/elements/dav.py @@ -5,6 +5,8 @@ from .base import BaseElement, ValuedBaseElement +## TODO: add RFC references to every class, consistently with the cdav.py + # Operations class Propfind(BaseElement): diff --git a/caldav/elements/ical.py b/caldav/elements/ical.py index 1578eafc..5711e71a 100644 --- a/caldav/elements/ical.py +++ b/caldav/elements/ical.py @@ -6,7 +6,7 @@ from .base import ValuedBaseElement -# Properties +# Properties - those are non-standard but implemented in several calendar servers class CalendarColor(ValuedBaseElement): tag: ClassVar[str] = ns("I", "calendar-color") diff --git a/caldav/protocol/xml_parsers.py b/caldav/protocol/xml_parsers.py index c14452ec..f6e267a7 100644 --- a/caldav/protocol/xml_parsers.py +++ b/caldav/protocol/xml_parsers.py @@ -162,6 +162,7 @@ def _parse_calendar_query_response( return results +## TODO: the purpose of the xml_parsers was to consolidate common code to be used by sync and async code paths, to avoid duplicated code. Why cannot this code snippet be used for async? The code here is very similar to _parse_calendar_query_response - we should consolidate common code def _parse_sync_collection_response( body: bytes, status_code: int = 207, diff --git a/caldav/response.py b/caldav/response.py index 8f807cb4..b3152ddb 100644 --- a/caldav/response.py +++ b/caldav/response.py @@ -13,7 +13,8 @@ from lxml import etree from lxml.etree import _Element -from caldav.elements import dav +from caldav.calendarobjectresource import FreeBusy +from caldav.elements import cdav, dav from caldav.elements.base import BaseElement from caldav.lib import error from caldav.lib.python_utilities import to_normal_str @@ -230,17 +231,90 @@ def _parse_response(self, response: _Element) -> tuple[str, list[_Element], Any error.assert_("404" in status) return (cast(str, href), propstats, status) - ## TODO: there is currently quite some overlapping with the protocol.xml_parsers - ## we should refactor + def _parse_scheduling_response_objects(self, parent) -> dict: + """Parses an RFC6638 freebusy scheduling request response + + The response from the server is asserted to be a + scheduling-response, with freebusy status for one or more wanted + attendee - potentially with error status for all or some + of the wanted attendees. + + TODO: some asserts here - should make better error handling + + Returns: + Dict with: + * email addresses -> FreeBusy status (raw data) + * errors - dict with email addresses -> error messages + + """ + self.objects = {} + self.objects["errors"] = {} + assert self.tree.tag == cdav.ScheduleResponse.tag + for response in self.tree: + assert response.tag == cdav.Response.tag + parsed_response = self._parse_scheduling_response(response) + for x in parsed_response: + if x.endswith(":err"): + self.objects["errors"][x[:-4]] = parsed_response[x] + else: + self.objects[x] = FreeBusy(parent=parent, data=parsed_response[x]) + + return self.objects + + def _parse_scheduling_response(self, response) -> dict[str, str]: + """ + TODO: lots of asserts here - should make better error handling + + Parses one attendee response from a RFC6638 freebusy scheduling request + + Returns: + * ``{ recipient => calendar_data }`` if everything is OK, + * ``{f"{recipient}:err": status}`` if things are not OK, + * a dict with both elements if things are partially OK + """ + ret = {} + recipient = None + status = None + calendar_data = None + for x in response: + if x.tag == cdav.Recipient.tag: + if len(x) == 1: + assert x[0].tag == dav.Href.tag + recipient = x[0].text + else: + recipient = x.text + elif x.tag == cdav.RequestStatus.tag: + status = x.text + elif x.tag == cdav.CalendarData.tag: + calendar_data = x.text + else: + raise error.DAVError(f"unexpected attribute {x.tag}") + assert recipient + assert status + if not status.startswith("2.0"): + ret[f"{recipient}:err"] = status + if calendar_data: + ret[recipient] = calendar_data + return ret + + ## TODO: there is currently quite some overlapping with the + ## protocol.xml_parsers we should refactor. I'm not 100% sure the + ## protocol.xml_parsers layer is a better approach. Look for more + ## cases of old code that was is still remaining after the + ## protocol layer refactoring def _find_objects_and_props(self) -> dict[str, dict[str, _Element]]: """Internal implementation of find_objects_and_props without deprecation warning.""" self.objects: dict[str, dict[str, _Element]] = {} self.statuses: dict[str, str] = {} + ## TODO: the schedule_tag is not used anywhere as for now + ## TODO: should it be set somewhere else? (now it's not + ## covered by the scheduling freebusy requests) if "Schedule-Tag" in self.headers: self.schedule_tag = self.headers["Schedule-Tag"] responses = self._strip_to_multistatus() + for r in responses: if r.tag == dav.SyncToken.tag: self.sync_token = r.text @@ -312,7 +386,7 @@ def _expand_simple_prop( if proptag in props_found: prop_xml = props_found[proptag] for item in prop_xml.items(): - if proptag == "{urn:ietf:params:xml:ns:caldav}calendar-data": + if proptag == cdav.CalendarData.tag: if ( item[0].lower().endswith("content-type") and item[1].lower() == "text/calendar" diff --git a/caldav/search.py b/caldav/search.py index 3661860a..d5216de5 100644 --- a/caldav/search.py +++ b/caldav/search.py @@ -233,13 +233,17 @@ def _search_impl( ): """Core search implementation as a generator yielding actions. - (TODO: refactoring beyond readability? Is this sane?) - This generator contains all the search logic and yields (action, data) tuples that the caller (sync or async) executes. Results are sent back via .send(). + TODO: refactoring beyond readability? Is this sane? If + nothing else, the generator data flow better. Possibly this + method is too long and should be split up for improved + readability + Yields: Tuples of (SearchAction, data) where data depends on action type + """ if calendar is None: calendar = self._calendar diff --git a/tests/test_caldav_unit.py b/tests/test_caldav_unit.py index bf0d12fa..3df2449c 100755 --- a/tests/test_caldav_unit.py +++ b/tests/test_caldav_unit.py @@ -2336,6 +2336,109 @@ def test_principal_freebusy_request_returns_coroutine_for_async_client(self): result.close() +class TestFreeBusyScheduleResponse: + """Unit tests for parsing RFC6638 schedule-response to a freebusy request. + + The XML fixture is taken directly from RFC 6638 Appendix B.5. + Three attendees are requested; two succeed (with VFREEBUSY calendar-data), + one fails with "3.7;Invalid calendar user". + """ + + # RFC 6638 §B.5 server response (slightly compacted for readability) + RFC6638_B5_XML = b"""\ + + + + +mailto:wilfredo@example.com + +2.0;Success +BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//Example Corp.//CalDAV Server//EN +METHOD:REPLY +BEGIN:VFREEBUSY +UID:4FD3AD926350 +DTSTAMP:20090602T200733Z +DTSTART:20090602T000000Z +DTEND:20090604T000000Z +ORGANIZER;CN="Cyrus Daboo":mailto:cyrus@example.com +ATTENDEE;CN="Wilfredo Sanchez Vega":mailto:wilfredo@example.com +FREEBUSY;FBTYPE=BUSY:20090602T110000Z/20090602T120000Z +FREEBUSY;FBTYPE=BUSY:20090603T170000Z/20090603T180000Z +END:VFREEBUSY +END:VCALENDAR + + + + +mailto:bernard@example.net + +2.0;Success +BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//Example Corp.//CalDAV Server//EN +METHOD:REPLY +BEGIN:VFREEBUSY +UID:4FD3AD926350 +DTSTAMP:20090602T200733Z +DTSTART:20090602T000000Z +DTEND:20090604T000000Z +ORGANIZER;CN="Cyrus Daboo":mailto:cyrus@example.com +ATTENDEE;CN="Bernard Desruisseaux":mailto:bernard@example.net +FREEBUSY;FBTYPE=BUSY:20090602T150000Z/20090602T160000Z +FREEBUSY;FBTYPE=BUSY:20090603T090000Z/20090603T100000Z +FREEBUSY;FBTYPE=BUSY:20090603T180000Z/20090603T190000Z +END:VFREEBUSY +END:VCALENDAR + + + + +mailto:mike@example.org + +3.7;Invalid calendar user + +""" + + def _make_schedule_response(self): + """Return a DAVResponse wrapping the RFC 6638 §B.5 XML.""" + from caldav.davclient import DAVResponse + + resp = mock.MagicMock() + resp.status_code = 200 + resp.reason = "OK" + resp.headers = {"Content-Type": "application/xml; charset=utf-8"} + resp.content = self.RFC6638_B5_XML + return DAVResponse(resp) + + def test_schedule_response_returns_three_recipients(self): + """Parsing the B.5 response must yield one entry per recipient.""" + response = self._make_schedule_response() + result = response._parse_scheduling_response_objects(parent=mock.MagicMock()) + assert len(result) == 3 + + def test_schedule_response_successful_recipients_have_calendar_data(self): + """Recipients with 2.0;Success must have calendar-data containing VFREEBUSY.""" + response = self._make_schedule_response() + result = response._parse_scheduling_response_objects(parent=mock.MagicMock()) + wilfredo = result["mailto:wilfredo@example.com"] + bernard = result["mailto:bernard@example.net"] + assert "VFREEBUSY" in wilfredo.data + assert "VFREEBUSY" in bernard.data + # Wilfredo has 2 busy slots; Bernard has 3 + assert wilfredo.data.count("\nFREEBUSY") == 2 + assert bernard.data.count("\nFREEBUSY") == 3 + + def test_schedule_response_failed_recipient_has_no_calendar_data(self): + """Recipients with a non-2.x status must have no calendar data""" + response = self._make_schedule_response() + result = response._parse_scheduling_response_objects(parent=mock.MagicMock()) + assert result["errors"]["mailto:mike@example.org"] == "3.7;Invalid calendar user" + assert not result.get("mailto:mike@example.org") + + class TestRateLimitHelpers: """Unit tests for the shared rate-limit helper functions in caldav.lib.error.""" From d24174700041488d8322f2b1226319c6ce05c7d0 Mon Sep 17 00:00:00 2001 From: Tobias Brox Date: Wed, 15 Apr 2026 21:56:11 +0200 Subject: [PATCH 10/17] ci: set up ai-prompt-auto-commit pre-commit hooks Adds the pycalendar/ai-prompt-auto-commit hooks so that Claude Code prompts are automatically appended to commit messages. - unstage-ai-prompts: keeps .prompts/ out of the index - append-ai-prompts (prepare-commit-msg): injects prompts into message - archive-ai-prompts (post-commit): moves .prompts/ to committed/ - prepare-ai-repository (manual): one-time setup already run Also installs prepare-commit-msg and post-commit git hooks locally. prompt: ~/.claude/skills/python-project-modernization.md is now updated with the https://github.com/pycalendar/ai-prompt-auto-commit tool. Please fix set this up for the caldav repo. AI-generated: This is considered to be CI-infrastructure, one of the areas identified to be particularly suitable for AI assistance. Co-Authored-By: Claude Sonnet 4.6 --- .claude/settings.json | 15 +++++++++++++++ .pre-commit-config.yaml | 11 +++++++++++ 2 files changed, 26 insertions(+) create mode 100644 .claude/settings.json diff --git a/.claude/settings.json b/.claude/settings.json new file mode 100644 index 00000000..1a98275f --- /dev/null +++ b/.claude/settings.json @@ -0,0 +1,15 @@ +{ + "hooks": { + "UserPromptSubmit": [ + { + "hooks": [ + { + "id": "ai-prompt-auto-commit", + "type": "command", + "command": "input=$(cat); ts=$(date +%Y-%m-%dT%H-%M-%S); model=$(printf '%s' \"$input\" | jq -r '.model // \"claude-sonnet-4-6\"'); printf '%s' \"$input\" | jq -r '.prompt' > \".prompts/${ts}_${model}.md\"" + } + ] + } + ] + } +} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 456d698c..174bf065 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -14,6 +14,17 @@ repos: - id: trailing-whitespace - id: end-of-file-fixer + - repo: https://github.com/pycalendar/ai-prompt-auto-commit + rev: v0.0.5 + hooks: + - id: unstage-ai-prompts + - id: append-ai-prompts + stages: [prepare-commit-msg] + - id: archive-ai-prompts + stages: [post-commit] + - id: prepare-ai-repository + stages: [manual] + - repo: https://github.com/compilerla/conventional-pre-commit rev: v3.4.0 hooks: From da13b53bfe0028c2d399c96a010373d4f1f3b1ea Mon Sep 17 00:00:00 2001 From: Tobias Brox Date: Mon, 13 Apr 2026 02:36:27 +0200 Subject: [PATCH 11/17] feat: Schedule-Tag and etag support MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Ref RFC 6638 §3.2-3.3 Schedule-Tag implementation: - `save()` and `add_event()` capture the Schedule-Tag from the response header and stores it in `self.props` (same logic with etag). - `save()` sends `If-Schedule-Tag-Match` or `If-Match`-header if etag or schedule-tag is set. - raises `ScheduleTagMismatchError` or `ETagMismatchError` on 412. - `_reply_to_invite_request()`: when the server auto-schedules the event into the attendee's calendar (`scheduling.auto-schedule` supported), search all attendee calendars first and update the existing copy in place to preserve the server-assigned Schedule-Tag. Fall through to `add_event()` only for non-auto-schedule servers. - Assume SEQUENCE:0 default when SEQUENCE property is absent (RFC 5546 section 2.1.4 requires incrementing for significant changes). test: add failing tests for Schedule-Tag support (RFC 6638) Also adds design docs: - docs/design/TODO_SCHEDULE_TAG.md (analysis and implementation plan, refs https://github.com/python-caldav/caldav/issues/660) - docs/design/TODO_COMPATIBILITY_HINTS.md (FeatureSet cleanup analysis, refs https://github.com/python-caldav/caldav/issues/659) The schedule-tag logic was predominantly hand-written (with some trivial bugfixing done by Claude). Claude contributed with design suggestions, which have been partly followed. Test code is predominantly AI-written. prompt: (exact prompt is lost, but I was discussing the schedule-tags, and the output is in the new file docs/design/TODO_SCHEDULE_TAG.md) prompt: Please write up test code (unit tests + integration tests) on the schedule tags. Don't fix the code yet. prompt: Please write up test code (unit tests + integration tests) on the schedule tags. Don't fix the code yet. followup-prompts: (discussions on test breakages. While debugging, Claude has been insisting on searching for the Schedule-Tag by using a PropFind if it wasn't included in the headers, but that does not make sense) Co-Authored-By: Claude Sonnet 4.6 --- AI-POLICY.md | 21 +- caldav/base_client.py | 3 + caldav/calendarobjectresource.py | 94 ++++-- caldav/collection.py | 57 ++-- caldav/compatibility_hints.py | 97 +++--- caldav/davclient.py | 6 +- caldav/lib/error.py | 8 + docs/design/TODO_COMPATIBILITY_HINTS.md | 134 ++++++++ docs/design/TODO_SCHEDULE_TAG.md | 130 ++++++++ tests/test_async_integration.py | 403 +++++++++++++++++++++--- tests/test_caldav.py | 306 ++++++++++++++++++ tests/test_schedule_tag.py | 244 ++++++++++++++ 12 files changed, 1360 insertions(+), 143 deletions(-) create mode 100644 docs/design/TODO_COMPATIBILITY_HINTS.md create mode 100644 docs/design/TODO_SCHEDULE_TAG.md create mode 100644 tests/test_schedule_tag.py diff --git a/AI-POLICY.md b/AI-POLICY.md index 888cd3ce..c8490251 100644 --- a/AI-POLICY.md +++ b/AI-POLICY.md @@ -4,9 +4,25 @@ The most important rule: be honest and inform about it! -Also: keep a log of the prompts used - prompts may be included in the +Keep a log of the prompts used - prompts should preferably be included in the git commits. +Tools should generally be used for improving the quality of the +project, not for rapidly adding new features. + +Keep a log of the prompts used - prompts should be included +verbatimely in the git commits as long as it's possible without making +the messages too messy. When relevant, chat-output may also need to +be included. The `docs/design`-folder can be used for dumping +AI-generated design documents, code reviews, prompts that are too +large for being included in the commit message, etc. + +Keep it clear what is human-written vs what is AI-written. In a +feature-branch, separate AI-commits and human-commits is preferable. +Those should most often be squashed together before including it in +the main branch, with a notice in the commit message on what parts o +the commit is AI-generated. + ## Transparency matters If you've spent hours, perhaps a full day of your time writing up a @@ -32,8 +48,7 @@ explain in details why I'm rejecting the pull request. It's fine to ask the AI for help to analyze a bug and create a fix for it. By discovering the bug, reproducing it and testing it you're -adding real value to the project - just be transparent about AI usage -and do not take offence if the code changes are rejected, or completely +adding real value to the project - just remember to be honest, if you have no clue what Claude did and why it solves the bug, then inform! Do not take offence if the code changes are rejected, or completely rewritten. ## General rules diff --git a/caldav/base_client.py b/caldav/base_client.py index 434cd9de..e2163af2 100644 --- a/caldav/base_client.py +++ b/caldav/base_client.py @@ -22,6 +22,9 @@ log = logging.getLogger("caldav") +## Common HTTP headers +ICALH = {"Content-Type": 'text/calendar; charset="utf-8"'} + class BaseDAVClient(ABC): """ diff --git a/caldav/calendarobjectresource.py b/caldav/calendarobjectresource.py index 144f0a23..a4e55dbf 100644 --- a/caldav/calendarobjectresource.py +++ b/caldav/calendarobjectresource.py @@ -39,6 +39,7 @@ from contextlib import contextmanager +from .base_client import ICALH from .datastate import DataState, IcalendarState, NoDataState, RawDataState, VobjectState from .davobject import DAVObject from .elements import cdav, dav @@ -95,6 +96,15 @@ class CalendarObjectResource(DAVObject): _state: DataState | None = None _borrowed: bool = False + # Schedule tag (ref https://github.com/python-caldav/caldav/issues/660 and docs/design/TODO-SCHEDULE.md) + @property + def schedule_tag(self) -> str | None: + return self.props.get(cdav.ScheduleTag.tag) + + @property + def etag(self) -> str | None: + return self.props.get(dav.GetEtag.tag) + @property def id(self) -> str | None: """Returns the UID of the calendar object. @@ -410,7 +420,7 @@ def get_relatives( acceptable relation types in reltypes, or by passing a lambda function in relfilter. - TODO: Make it possible to also check up reverse relationships + TODO: Make it possible to also check up reverse relationships TODO: this is partially overlapped by plann.lib._relships_by_type in the plann tool. Should consolidate the code. @@ -773,18 +783,34 @@ def _reply_to_invite_request(self, partstat, calendar) -> None: ## we need to modify the icalendar code, update our own participant status self.icalendar_instance.pop("METHOD") self.change_attendee_status(partstat=partstat) - self.get_property(cdav.ScheduleTag(), use_cached=True) + uid = self.id + ## On auto-scheduling servers the server already places the event in the attendee's + ## calendar with a Schedule-Tag set. We must update that copy rather than creating a + ## new one via add_event() — a plain attendee PUT won't get a Schedule-Tag because + ## servers only assign it on organizer-originated scheduling operations. + if uid and self.client.features.is_supported("scheduling.auto-schedule"): + for cal in self.client.principal().calendars(): + try: + existing = cal.event_by_uid(uid) + existing.load() + existing.change_attendee_status(partstat=partstat) + existing.save() + return + except error.NotFoundError: + pass try: calendar.add_event(self.data) except Exception: - ## TODO - TODO - TODO - ## RFC6638 does not seem to be very clear (or - ## perhaps I should read it more thoroughly) neither on - ## how to handle conflicts, nor if the reply should be - ## posted to the "outbox", saved back to the same url or - ## sent to a calendar. + ## add_event() failed — the event likely already exists (e.g. non-auto-scheduling + ## server that still rejects duplicate UIDs). Reload self from the inbox so we have + ## fresh data (METHOD is restored), then retry via the outbox: posting an iTIP REPLY + ## to the outbox lets the server process the PARTSTAT update on our behalf, which is + ## the correct RFC 6638 mechanism when we cannot write directly to the calendar. + ## We intentionally do NOT do a separate PROPFIND for Schedule-Tag here: the tag must + ## be read atomically with the object data (a separate request could race with a + ## concurrent scheduling operation), and RFC 6638 requires the server to return it + ## as a response header on GET — so load() is sufficient if the server complies. self.load() - self.get_property(cdav.ScheduleTag(), use_cached=False) outbox = self.client.principal().schedule_outbox() if calendar.url != outbox.url: self._reply_to_invite_request(partstat, calendar=outbox) @@ -868,6 +894,7 @@ def load(self, only_if_unloaded: bool = False) -> Self: except Exception: return self.load_by_multiget() + ## consider refactoring - this is repeated many places now if "Etag" in r.headers: self.props[dav.GetEtag.tag] = r.headers["Etag"] if "Schedule-Tag" in r.headers: @@ -997,10 +1024,25 @@ def _find_id_path(self, id=None, path=None) -> None: self.url = URL.objectify(path) def _put(self, retry_on_failure=True): + ## TODO: quite much overlapping with _async_put, should consolidate + ## TODO: this is low-level http-communication - shouldn't it be in the davclient file rather than in calendarobjectresource.py? ## SECURITY TODO: we should probably have a check here to verify that no such object exists already - r = self.client.put(self.url, self.data, {"Content-Type": 'text/calendar; charset="utf-8"'}) - if r.status == 302: - path = [x[1] for x in r.headers if x[0] == "location"][0] + headers = {} ## TODO: use some caseinsensitivedict + if self.schedule_tag: + headers["if-schedule-tag-match"] = self.schedule_tag + elif self.etag: + headers["if-match"] = self.etag + headers |= ICALH + r = self.client.put(self.url, self.data, headers) + if r.status == 412: + if self.schedule_tag: + raise error.ScheduleTagMismatchError(errmsg(r)) + elif self.etag: + raise error.ETagMismatchError(errmsg(r)) + else: + raise error.PutError(errmsg(r)) + elif r.status == 302: + self.url = URL.objectify([x[1] for x in r.headers if x[0] == "location"][0]) elif r.status not in (204, 201): if retry_on_failure: try: @@ -1014,14 +1056,14 @@ def _put(self, retry_on_failure=True): return self._put(False) else: raise error.PutError(errmsg(r)) + if "Etag" in r.headers: + self.props[dav.GetEtag.tag] = r.headers["Etag"] + if r.headers and r.headers.get("schedule-tag"): + self.props[cdav.ScheduleTag.tag] = r.headers["schedule-tag"] async def _async_put(self, retry_on_failure=True): """Async version of _put for async clients.""" - r = await self.client.put( - str(self.url), - str(self.data), - {"Content-Type": 'text/calendar; charset="utf-8"'}, - ) + r = await self.client.put(str(self.url), str(self.data), ICALH) if r.status == 302: path = [x[1] for x in r.headers if x[0] == "location"][0] self.url = URL.objectify(path) @@ -1036,6 +1078,11 @@ async def _async_put(self, retry_on_failure=True): return await self._async_put(False) else: raise error.PutError(errmsg(r)) + ## TODO: refactor - those code lines are repeated all over the place + if "Etag" in r.headers: + self.props[dav.GetEtag.tag] = r.headers["Etag"] + if r.headers and r.headers.get("schedule-tag"): + self.props[cdav.ScheduleTag.tag] = r.headers["schedule-tag"] def _create(self, id=None, path=None, retry_on_failure=True) -> None: ## TODO: Find a better method name @@ -1120,7 +1167,6 @@ def save( no_create: bool = False, obj_type: str | None = None, increase_seqno: bool = True, - if_schedule_tag_match: bool = False, only_this_recurrence: bool = True, all_recurrences: bool = False, ) -> Self: @@ -1137,8 +1183,7 @@ def save( The SEQUENCE should be increased when saving a new version of the object. If this behaviour is unwanted, then - increase_seqno should be set to False. Also, if SEQUENCE is - not set, then this will be ignored. + increase_seqno should be set to False. The behaviour when saving a single recurrence object to the server is as far as I can understand not defined in the RFCs, @@ -1170,7 +1215,6 @@ def save( no_create=no_create, obj_type=obj_type, increase_seqno=increase_seqno, - if_schedule_tag_match=if_schedule_tag_match, only_this_recurrence=only_this_recurrence, all_recurrences=all_recurrences, ) @@ -1285,11 +1329,10 @@ def _incorporate_recurrence_into_parent(self, obj, only_this_recurrence, all_rec ici.add_component(self.icalendar_component) def _maybe_increment_sequence(self, increase_seqno): - """Increment SEQUENCE number if present and increase_seqno is True.""" + """Increment SEQUENCE number if increase_seqno is True.""" if increase_seqno and "SEQUENCE" in self.icalendar_component: - seqno = self.icalendar_component.pop("SEQUENCE", None) - if seqno is not None: - self.icalendar_component.add("SEQUENCE", seqno + 1) + seqno = self.icalendar_component.pop("SEQUENCE", 0) + self.icalendar_component.add("SEQUENCE", seqno + 1) async def _async_save( self, @@ -1297,7 +1340,6 @@ async def _async_save( no_create: bool = False, obj_type: str | None = None, increase_seqno: bool = True, - if_schedule_tag_match: bool = False, only_this_recurrence: bool = True, all_recurrences: bool = False, ) -> Self: diff --git a/caldav/collection.py b/caldav/collection.py index e17cea84..44daa662 100644 --- a/caldav/collection.py +++ b/caldav/collection.py @@ -30,6 +30,7 @@ from collections.abc import Iterable, Iterator, Sequence from typing import Literal +from .base_client import ICALH from .calendarobjectresource import ( CalendarObjectResource, Event, @@ -509,17 +510,13 @@ def freebusy_request(self, dtstart, dtend, attendees) -> dict[str, FreeBusy]: caldavobj.add_organizer() - response = self.client.post( - outbox.url, - caldavobj.data, - headers={"Content-Type": "text/calendar; charset=utf-8"}, - ) + response = self.client.post(outbox.url, caldavobj.data, headers=ICALH) return response._parse_scheduling_response_objects(parent=self) async def _async_freebusy_request(self, outbox, fb_obj) -> dict: """Async implementation of freebusy_request() for async clients.""" ## TODO: could we have common headers as global variable? - headers = {"Content-Type": "text/calendar; charset=utf-8"} + headers = ICALH outbox = await outbox ## TODO: it's really bad that arbitrary methods returns ## a coroutine in async mode. It's needed to make it much @@ -1957,35 +1954,51 @@ def __init__( def get_items(self): """ - TODO: work in progress - TODO: perhaps this belongs to the super class? + Return all items currently in this scheduling mailbox (inbox or outbox). + + Unlike regular calendars, schedule mailboxes contain raw iTIP messages + (METHOD:REQUEST, METHOD:REPLY, METHOD:CANCEL, …) rather than permanent + calendar objects. Items should be processed and then deleted; they are + not meant to be kept indefinitely. + + Servers often do not support the sync-collection REPORT (RFC 6578) on + schedule-inbox/outbox — the inbox is not a full calendar collection and + may not be indexed the same way. We therefore attempt the sync-token + path first (efficient for repeat polling) but fall back transparently to + a plain PROPFIND depth-1 followed by individual GETs. Both paths return + loaded CalendarObjectResource objects. + + This method does NOT belong on the Calendar super-class: Calendar exposes + type-specific accessors (get_events, get_todos, …) and uses search() + internally. The mailbox is a different beast — it holds transient, + mixed-type scheduling messages and must use children() as its fallback + because search() / REPORT queries against a mailbox URL are unreliable + across servers. """ + + def _load_from_children(): + items = [CalendarObjectResource(url=x[0], client=self.client) for x in self.children()] + for x in items: + x.load() + return items + if not self._items: try: self._items = self.objects(load_objects=True) except Exception: logging.debug( - "caldav server does not seem to support a sync-token REPORT query on a scheduling mailbox" + "sync-collection REPORT not supported on scheduling mailbox %s; " + "falling back to PROPFIND depth-1", + self.url, ) - error.assert_("google" in str(self.url)) - self._items = [ - CalendarObjectResource(url=x[0], client=self.client) for x in self.children() - ] - for x in self._items: - x.load() + self._items = _load_from_children() else: try: self._items.sync() except Exception: - self._items = [ - CalendarObjectResource(url=x[0], client=self.client) for x in self.children() - ] - for x in self._items: - x.load() + self._items = _load_from_children() return self._items - ## TODO: work in progress - # def get_invites(): # for item in self.get_items(): diff --git a/caldav/compatibility_hints.py b/caldav/compatibility_hints.py index a3cfb011..80819102 100644 --- a/caldav/compatibility_hints.py +++ b/caldav/compatibility_hints.py @@ -347,7 +347,7 @@ class FeatureSet: "links": ["https://datatracker.ietf.org/doc/html/rfc6638#section-2.4.1"], }, "scheduling.mailbox.inbox-delivery": { - "description": "Server delivers incoming scheduling REQUEST messages to the attendee's schedule-inbox (RFC6638 section 4.1). When unsupported, the server implements automatic scheduling: invitations are auto-processed and placed directly on the attendee's calendar without appearing in the inbox. Clients should check this feature to know whether to look for inbox items after sending an invite, or check the attendee calendar directly.", + "description": "Server delivers incoming scheduling REQUEST messages to the attendee's schedule-inbox (RFC6638 section 4.1). See also scheduling.auto-schedule for whether the server additionally auto-processes invitations into the attendee's calendar.", "links": [ "https://datatracker.ietf.org/doc/html/rfc6638#section-4.1", ], @@ -903,28 +903,22 @@ def dotted_feature_set_list(self, compact=False): ## We've sometimes been observing internal server errors on freebusy-requests. ## Should do more research on it next time it shows up. - ## Component type filtering is required - searches must specify event=True or todo=True; - ## omitting it returns empty results. - "search.comp-type.optional": "unsupported", - ## Principal property search returns 403 (not implemented) "principal-search": "ungraceful", - ## Server-side recurrence expansion is buggy for tasks and event exceptions - "search.recurrences.expanded.todo": "unsupported", + ## Server-side recurrence expansion for event exceptions is still broken; + ## VTODO RRULE expansion was fixed in xandikos PR #627 (released in 0.3.7). "search.recurrences.expanded.exception": "unsupported", + ## Open-start time-range searches (no lower bound) crash xandikos 0.3.7 with a + ## 500 Internal Server Error (OverflowError: date value out of range in icalendar.py + ## _expand_rrule_component when computing adjusted_start = start - duration). + "search.time-range.open.start": {"support": "ungraceful", "behaviour": "500 Internal Server Error (OverflowError in rrule expansion)"}, + ## this only applies for very simple installations "auto-connect.url": {"domain": "localhost", "scheme": "http", "basepath": "/"}, "scheduling": {"support": "unsupported"}, - ## Open-start searches (end bound only) cause xandikos to return 500 when processing - ## VTODOs that have DURATION but no DUE (no DUE means the index falls back to a full - ## file check, which crashes in the time-range calculation). - 'search.time-range.open.start': {'support': 'ungraceful', 'behaviour': 'xandikos returns 500 on open-start searches involving DURATION-only VTODOs'}, - ## xandikos index-based filtering for VTODO is inaccurate: tasks with DTSTART+DUE - ## entirely outside the search range can be returned as false positives. - 'search.time-range.todo.strict': {'support': 'broken', 'behaviour': 'tasks with DTSTART+DUE outside the range are returned'}, } ## This seems to work as of version 3.5.4 of Radicale. @@ -941,15 +935,8 @@ def dotted_feature_set_list(self, compact=False): "auto-connect.url": {"domain": "localhost", "scheme": "http", "basepath": "/"}, ## freebusy is not supported yet, but on the long-term road map "scheduling": {"support": "unsupported"}, - ## Radicale does not return results for open-end date searches (only start given) - 'search.time-range.open.end': {'support': 'unsupported'}, 'old_flags': [ - ## calendar listings and calendar creation works a bit - ## "weird" on radicale - - #'text_search_is_exact_match_sometimes', - - ## extra features not specified in RFC5545 + ## extra features not specified in RFC4791 "calendar_order", "calendar_color" ] @@ -976,12 +963,14 @@ def dotted_feature_set_list(self, compact=False): #'save-load.todo.mixed-calendar': {'support': 'unsupported'}, ## Why? It started complaining about this just recently. 'principal-search.by-name.self': {'support': 'unsupported'}, 'principal-search': {'support': 'ungraceful'}, + 'search.time-range.open.start.duration': 'broken', #'old_flags': ['unique_calendar_ids'], ## I'm surprised, I'm quite sure this was passing earlier. Caldav commit a98d50490b872e9b9d8e93e2e401c936ad193003, caldav server checker commit 3cae24cf99da1702b851b5a74a9b88c8e5317dad 'search.combined-is-logical-and': False, ## Observed with Nextcloud 33: server delivers iTIP notification to the inbox AND - ## auto-schedules into the attendee's calendar (same quirk as Baikal/Cyrus). - "scheduling.mailbox.inbox-delivery": {"support": "quirk", "behaviour": "server delivers iTIP notification to inbox AND auto-schedules into calendar"}, + ## auto-schedules into the attendee's calendar. + "scheduling.mailbox.inbox-delivery": True, + "scheduling.auto-schedule": True, } ## TODO: Latest - mismatch between config and test script in delete-calendar.free-namespace ... and create-calendar.set-displayname? @@ -1029,9 +1018,9 @@ def dotted_feature_set_list(self, compact=False): ## Zimbra implements server-side automatic scheduling: invitations are ## auto-processed into the attendee's calendar; no iTIP notification appears in the inbox. "scheduling.mailbox": True, - "scheduling.mailbox.inbox-delivery": {"support": "unsupported"}, + "scheduling.mailbox.inbox-delivery": False, + "scheduling.auto-schedule": True, 'save-load.icalendar.related-to': {'support': 'unsupported'}, - 'search.time-range.open.start.duration': {'support': 'unsupported'}, 'search.time-range.open.start': {'support': 'broken'}, "old_flags": [ @@ -1112,8 +1101,9 @@ def dotted_feature_set_list(self, compact=False): baikal = { ## version 0.10.1 # Baikal (sabre/dav) delivers iTIP notifications to the attendee inbox AND auto-schedules - # into their calendar (quirk: both delivery modes happen simultaneously). - "scheduling.mailbox.inbox-delivery": {"support": "quirk", "behaviour": "server delivers iTIP notification to inbox AND auto-schedules into calendar"}, + # into their calendar. + "scheduling.mailbox.inbox-delivery": True, + "scheduling.auto-schedule": True, "scheduling.mailbox": True, "http.multiplexing": "fragile", ## ref https://github.com/python-caldav/caldav/issues/564 'search.comp-type.optional': {'support': 'ungraceful'}, @@ -1158,16 +1148,12 @@ def dotted_feature_set_list(self, compact=False): # violating RFC6638 section 3.2 which requires the tag to remain stable. "scheduling.schedule-tag.stable-partstat": {"support": "unsupported"}, # Cyrus may not properly reject wrong passwords in some configurations - # Cyrus implements server-side automatic scheduling: for cross-user - # invites, the server both auto-processes the invite into the attendee's calendar + # Cyrus implements server-side automatic scheduling: for cross-user invites, + # the server both auto-processes the invite into the attendee's calendar # AND delivers an iTIP notification copy to the attendee's schedule-inbox. - # Clients do not need to explicitly accept from the inbox (auto-accept is done), - # but inbox items do appear. This is "quirk" behaviour: both delivery modes happen. "scheduling.mailbox": True, - "scheduling.mailbox.inbox-delivery": { - "support": "quirk", - "behaviour": "server delivers iTIP notification to inbox AND auto-schedules into calendar", - }, + "scheduling.mailbox.inbox-delivery": True, + "scheduling.auto-schedule": True, } ## See comments on https://github.com/python-caldav/caldav/issues/3 @@ -1188,9 +1174,10 @@ def dotted_feature_set_list(self, compact=False): # lazy responses cause MultiplexingError when accessing status_code "http.multiplexing": { "support": "unsupported" }, # DAViCal delivers iTIP notifications to the attendee inbox AND auto-schedules - # into their calendar (quirk: both delivery modes happen simultaneously). + # into their calendar. "scheduling.mailbox": True, - "scheduling.mailbox.inbox-delivery": {"support": "quirk", "behaviour": "server delivers iTIP notification to inbox AND auto-schedules into calendar"}, + "scheduling.mailbox.inbox-delivery": True, + "scheduling.auto-schedule": True, "search.comp-type.optional": { "support": "fragile" }, "search.recurrences.expanded.exception": { "support": "unsupported" }, "search.time-range.alarm": { "support": "unsupported" }, @@ -1206,7 +1193,6 @@ def dotted_feature_set_list(self, compact=False): 'calendar_order', 'vtodo_datesearch_notime_task_is_skipped', ], - 'search.time-range.open.start.duration': {'support': 'unsupported'}, } sogo = { @@ -1370,9 +1356,10 @@ def dotted_feature_set_list(self, compact=False): ## TODO: consolidate, make a sabredav dict and let davis/baikal build on it davis = { # Davis uses sabre/dav (same backend as Baikal): delivers iTIP notifications to the - # attendee inbox AND auto-schedules into their calendar (quirk behaviour). + # attendee inbox AND auto-schedules into their calendar. "scheduling.mailbox": True, - "scheduling.mailbox.inbox-delivery": {"support": "quirk", "behaviour": "server delivers iTIP notification to inbox AND auto-schedules into calendar"}, + "scheduling.mailbox.inbox-delivery": True, + "scheduling.auto-schedule": True, "search.recurrences.expanded.todo": {"support": "unsupported"}, "search.recurrences.expanded.exception": {"support": "unsupported"}, "search.recurrences.includes-implicit.todo": {"support": "unsupported"}, @@ -1394,8 +1381,9 @@ def dotted_feature_set_list(self, compact=False): ## cannot be changed. The pre-provisioned "tasks" calendar supports VTODO only. ## VJOURNAL is not supported at all. ccs = { - ## scheduling.mailbox.inbox-delivery behaviour unknown until cross-user scheduling tests run - "scheduling.mailbox.inbox-delivery": {"support": "unknown"}, + "scheduling.freebusy-query": {"support": "ungraceful"}, + "scheduling.mailbox.inbox-delivery": True, + "scheduling.auto-schedule": True, "save-load.journal": {"support": "unsupported"}, "save-load.todo.mixed-calendar": {"support": "unsupported"}, # CCS enforces unique UIDs across ALL calendars for a user @@ -1405,16 +1393,14 @@ def dotted_feature_set_list(self, compact=False): # CCS rejects multi-instance VTODOs (thisandfuture recurring completion) "save-load.todo.recurrences.thisandfuture": {"support": "unsupported"}, "search.comp-type.optional": {"support": "ungraceful"}, - "scheduling.free-busy": {"support": "broken"}, ## "full" observed, 70938dc1cbb6a839978eee4315699746d38ee5f0/3cae24cf99da1702b851b5a74a9b88c8e5317dad, 2026-02-17. ## However, this may be due to mess with the caldav-server-checker branches. "unsupported" again at be26d42b1ca3ff3b4fd183761b4a9b024ce12b84 / 537a23b145487006bb987dee5ab9e00cdebb0492 "search.text.case-sensitive": {"support": "unsupported"}, "search.time-range.event": {"support": "full"}, "search.time-range.event.old-dates": {"support": "ungraceful"}, "search.time-range.todo": {"support": "full"}, - 'search.time-range.open.start.duration': {'support': 'ungraceful'}, "search.time-range.todo.old-dates": {"support": "ungraceful"}, - "search.time-range.open.start": {"support": "ungraceful"}, + "search.time-range.open": {"support": "ungraceful"}, "search.time-range.alarm": {"support": "unsupported"}, "search.recurrences": {"support": "unsupported"}, "principal-search": {"support": "unsupported"}, @@ -1432,8 +1418,6 @@ def dotted_feature_set_list(self, compact=False): ## CalDAV served at /dav/cal// over HTTP on port 8080. ## Feature support mostly unknown until tested; starting with empty hints. stalwart = { - ## scheduling.mailbox.inbox-delivery behaviour unknown until cross-user scheduling tests run - "scheduling.mailbox.inbox-delivery": {"support": "unknown"}, 'rate-limit': { 'enable': True, 'default_sleep': 3, @@ -1454,8 +1438,11 @@ def dotted_feature_set_list(self, compact=False): 'search.recurrences.expanded.exception': False, ## Stalwart stores master+exception VEVENTs as a single resource with 2 VEVENTs. 'save-load.event.recurrences.exception': {'support': 'full'}, - ## Stalwart does not return results for open-end date searches (only start given) - 'search.time-range.open.end': {'support': 'unsupported'}, + 'search.time-range.open': True, + ## Stalwart delivers iTIP notifications to the attendee inbox AND auto-schedules + ## into their calendar (verified by running CheckSchedulingInboxDelivery). + "scheduling.mailbox.inbox-delivery": True, + "scheduling.auto-schedule": True, 'old_flags': [ ## Stalwart does not return VTODO items without DTSTART in date searches 'vtodo_datesearch_nodtstart_task_is_skipped', @@ -1585,9 +1572,15 @@ def dotted_feature_set_list(self, compact=False): 'principal-search.list-all': {'support': 'unsupported'}, ## Cross-calendar duplicate UID test fails (AuthorizationError creating second calendar) 'save.duplicate-uid.cross-calendar': {'support': 'ungraceful'}, - 'save-load.icalendar.related-to': {'support': 'unsupported'}, + 'save-load.icalendar.related-to': {'support': 'broken'}, ## OX App Suite has complex user provisioning; cross-user scheduling tests not yet set up. - "scheduling.mailbox.inbox-delivery": {"support": "unknown"}, + "scheduling": {"support": "unknown"}, + "scheduling.freebusy-query": "ungraceful", + 'search.time-range.open.start': "broken", + 'search.time-range.open.end': True, + ## time-range.open is "broken", while time-range.open.start.duration is "unsupported"? + ## this may possibly be some problems with the checker rather than with Ox + 'search.time-range.open.start.duration': "unsupported" } # fmt: on diff --git a/caldav/davclient.py b/caldav/davclient.py index bf4e30cf..c6dfaf39 100644 --- a/caldav/davclient.py +++ b/caldav/davclient.py @@ -267,7 +267,7 @@ def __init__( when returning 401, warnings will be printed which might be unwanted. Check auth parameter for details. """ - headers = headers or {} + headers = headers or CaseInsensitiveDict() ## Deprecation TODO: give a warning, user should use get_davclient or auto_calendar instead. Probably. @@ -318,7 +318,7 @@ def __init__( "Accept": "text/xml, text/calendar", } ) - self.headers.update(headers or {}) + self.headers.update(headers or CaseInsensitiveDict()) if self.url.username is not None: username = unquote(self.url.username) password = unquote(self.url.password) @@ -756,7 +756,7 @@ def report(self, url: str, query: str = "", depth: int | None = 0) -> DAVRespons Returns DAVResponse """ - headers = {"Depth": str(depth)} if depth is not None else {} + headers = {"Depth": str(depth)} if depth is not None else CaseInsensitiveDict() return self.request(url, "REPORT", query, headers) def mkcol(self, url: str, body: str, dummy: None = None) -> DAVResponse: diff --git a/caldav/lib/error.py b/caldav/lib/error.py index 63821e38..16d79883 100644 --- a/caldav/lib/error.py +++ b/caldav/lib/error.py @@ -169,6 +169,14 @@ class ResponseError(DAVError): pass +class ScheduleTagMismatchError(DAVError): + pass + + +class ETagMismatchError(DAVError): + pass + + class RateLimitError(DAVError): """Raised when the server responds with 429 Too Many Requests or 503 Service Unavailable with a Retry-After header.""" diff --git a/docs/design/TODO_COMPATIBILITY_HINTS.md b/docs/design/TODO_COMPATIBILITY_HINTS.md new file mode 100644 index 00000000..f96c8b06 --- /dev/null +++ b/docs/design/TODO_COMPATIBILITY_HINTS.md @@ -0,0 +1,134 @@ +# FeatureSet Cleanup TODO + +This document records simplification opportunities found in `caldav/compatibility_hints.py` +after reviewing the `FeatureSet` class. The class was written with a more ambitious +type-system vision (multiple feature types with different semantics, collapsible +sub-feature hierarchies) that was only partially realised. Several methods carry dead +code or over-complexity that can now be trimmed. + +## 1. Dead/buggy bool branch in `_convert_node` + +**Location**: `FeatureSet._convert_node`, the final `else` branch (~line 708) + +```python +else: + ## TODO: this may be improved + return not node.get('enable') and not node.get('behaviour') and not node.get('observed') +``` + +This branch handles `bool` return for non-`server-feature` types (i.e. `client-feature`, +`server-peculiarity`, `server-observation`). The logic is **inverted**: when +`enable=False`, `not False` returns `True`, which would read a disabled client-feature as +"supported". + +In practice this branch is unreachable: every `is_supported` call on features of those +types uses `return_type=dict` (e.g. `is_supported("rate-limit", dict)`, +`is_supported("search-cache", dict)`). Nobody queries them for a bool. + +**Fix**: Replace the else-branch with a guard that surfaces misuse: +```python +else: + raise AssertionError( + f"is_supported(return_type=bool) is not meaningful for feature type " + f"{feature_info.get('type')!r}; use return_type=dict" + ) +``` +Or simply document the restriction. + +## 2. Redundant `_derive_from_subfeatures` call in `is_supported` + +**Location**: `FeatureSet.is_supported`, lines ~594 and ~606 + +When the original feature has no dots (no parent), `_derive_from_subfeatures` is called +twice on the same `feature_` during the same lookup: once in the while-loop body (line 594) +and once in the post-loop block (line 606). The second call (line 606) uses the original +`feature_info` but `feature_` may have walked up past the original feature. + +Needs careful review; may be an off-by-one in the loop termination logic, or the two +conditions are truly independent and the second call is simply redundant. + +## 3. `_old_flags` — in-progress migration, clear removal path + +**Location**: `FeatureSet.__init__`, `FeatureSet.copyFeatureSet`, and ~12 server config +dicts in `compatibility_hints.py` + +`_old_flags` is explicitly marked `## TODO: remove this when it can be removed`. It is +a shim that carries forward the legacy flat-list quirk system while server configs are +being migrated to the new dotted-feature style. + +`test_caldav.py:1006` still reads `self.caldav.features._old_flags` to validate that old +flags are known strings. The test at line 1015-1016 confirms all flags are present in +`incompatibility_description`. + +**Fix**: For each server dict that still has `'old_flags': [...]`, translate the listed +flags into equivalent new-style features and remove the `old_flags` key. Once all server +dicts are migrated: +- Remove the `if feature == 'old_flags':` special-case in `copyFeatureSet` +- Remove `self._old_flags = []` from `__init__` +- Remove the `_old_flags` copy in the copy-constructor branch +- Remove the validation in `test_caldav.py` + +## 4. `feature_tree` / `_dots_to_tree` — internal detail that can be simplified + +**Location**: `FeatureSet.feature_tree`, `FeatureSet._dots_to_tree` + +`feature_tree()` builds and caches a full nested-dict tree of all feature names. Its +only consumer is `find_feature()`, which traverses one level of the tree to populate the +`subfeatures` key on a feature. Building and caching a full tree to answer one-level +lookups is overkill. + +**Fix**: In `find_feature`, compute subfeatures directly: +```python +prefix = feature + "." +cls.FEATURES[feature]['subfeatures'] = [ + f[len(prefix):] + for f in cls.FEATURES + if f.startswith(prefix) and '.' not in f[len(prefix):] +] +``` +Then remove `feature_tree` and `_dots_to_tree`. The `feature_tree` docstring already +questions its own existence ("TODO: is this in use at all?"). + +**Note**: Before removing `feature_tree`, confirm no external code (e.g. +`caldav-server-tester`) calls it. + +## 5. `_collapse_key` — vestigial `enable`/`observed` fields + +**Location**: `FeatureSet._collapse_key` + +```python +return ( + feature_dict.get('support'), + feature_dict.get('enable'), + feature_dict.get('observed'), +) +``` + +The `enable` and `observed` slots exist to correctly compare `client-feature` and +`server-observation` nodes during collapse. In practice, collapse is only ever invoked +on server-feature nodes (the only type that appears in the server config dicts, aside +from `old_flags`). The extra slots add noise without effect. + +**Fix**: Simplify to `return feature_dict.get('support')` once the broader type-system +simplification is settled. + +## 6. `copyFeatureSet` — rename to `copy_feature_set` + +**Location**: `FeatureSet.copyFeatureSet` + +There is already a TODO comment on the method noting the camelCase name is inconsistent. +The method has no external callers (grepping the repo outside `compatibility_hints.py` +finds none); all calls originate within the class itself plus `set_feature`. + +**Fix**: Rename to `copy_feature_set`. Trivial. + +## 7. Split this file + +Possibly into three files (or four, with the original compatibility_hints.py being a compatibility shim only importing things from the new files). + +There are three quite different things in the file now, the database of the feature names/flags, the database of server compatibility, and the match logic. + +## Ordering / dependencies + +Items 3 (old_flags) and 6 (rename) are independent and safe to do first. Items 1, 2, +4, 5 are interrelated around the type-system simplification and are best tackled together. diff --git a/docs/design/TODO_SCHEDULE_TAG.md b/docs/design/TODO_SCHEDULE_TAG.md new file mode 100644 index 00000000..8e16d41f --- /dev/null +++ b/docs/design/TODO_SCHEDULE_TAG.md @@ -0,0 +1,130 @@ +# Schedule-Tag TODO + +## What Schedule-Tag is (RFC 6638) + +Schedule-Tag is an opaque token attached to each scheduling object resource (an event/todo +that carries an ORGANIZER or ATTENDEE). It works like ETag for scheduling, but changes on +a different cadence: ETag changes on every PUT; Schedule-Tag changes only when the +**scheduling-significant** content changes. + +- **Organizer's copy**: tag changes on direct HTTP modifications (PUT/COPY/MOVE), but + **not** when the server auto-processes an attendee reply back onto the organizer's + resource. +- **Attendee's copy**: tag changes when the organizer sends an update, but **not** when + the attendee updates only their own participation status (PARTSTAT). + +The `If-Schedule-Tag-Match` request header lets a client say "only save this if my copy +has not been updated by the organizer since I fetched it". Without it, the classic race +is: + +1. Attendee fetches event (schedule-tag = "X"). +2. Organizer sends an update; server writes new data onto attendee's resource + (schedule-tag = "Y"). +3. Attendee PUTs their PARTSTAT change, unknowingly wiping out step 2. + +With `If-Schedule-Tag-Match: "X"`, step 3 returns 412 and the attendee client knows to +re-fetch and merge. + +References: +- https://datatracker.ietf.org/doc/html/rfc6638#section-3.2 +- https://datatracker.ietf.org/doc/html/rfc6638#section-3.3 +- https://datatracker.ietf.org/doc/html/rfc6638#section-8 + +## Current state in the codebase + +The infrastructure is half-built: + +- `cdav.ScheduleTag` element exists (`caldav/elements/cdav.py:202`). +- GET/load responses capture the `Schedule-Tag` response header into + `self.props[cdav.ScheduleTag.tag]` (`calendarobjectresource.py:873`, `918`). +- `save()` accepts `if_schedule_tag_match: bool = False` but the docstring says + *"is currently ignored"* — it is merely forwarded to `_async_save`, which also ignores + it (`calendarobjectresource.py:1136`). +- `_reply_to_invite_request` calls `get_property(ScheduleTag)` to populate the prop but + then never uses the value. +- `_put` / `_async_put` send a hardcoded header dict containing only `Content-Type` — no + conditional headers at all (not even `If-Match` / `If-None-Match` for ETag). + +## Suggested implementation + +### 1. Add extra-headers support to `_put` / `_async_put` + +`_put` needs to accept an optional extra-headers dict so callers can inject +`If-Schedule-Tag-Match` (and, in the future, `If-Match`): + +```python +def _put(self, retry_on_failure=True, extra_headers=None): + headers = {"Content-Type": 'text/calendar; charset="utf-8"'} + if extra_headers: + headers.update(extra_headers) + r = self.client.put(self.url, self.data, headers) + ... +``` + +### 2. Wire `if_schedule_tag_match` through `save()` → `_put()` + +In `save()` / `_async_save()`, when `if_schedule_tag_match=True`, look up the cached +schedule-tag property and inject the header: + +```python +if if_schedule_tag_match: + tag = self.props.get(cdav.ScheduleTag.tag) + if tag is None: + self.load() # fetch tag before sending conditional PUT + tag = self.props.get(cdav.ScheduleTag.tag) + if tag is not None: + extra_headers["If-Schedule-Tag-Match"] = tag +``` + +A missing cached tag is a notable edge case. The safest default is to do a `load()` +first so the tag is available; alternatively raise `ValueError` to surface the caller +error explicitly. + +### 3. Fix `_reply_to_invite_request` + +This method already calls `get_property(ScheduleTag)` but never uses the result. After +the fix to `save()`, the reply path should call `self.save(if_schedule_tag_match=True)` +so that the attendee's PARTSTAT update is protected against a racing organizer update. + +The current fallback logic in that method is also confused: it re-fetches the schedule-tag +and then recurses with `calendar=outbox`, which bypasses the conditional header entirely. +This needs a clean rewrite once the basic wiring is in place. + +### 4. Expose `schedule_tag` as a public property + +The tag is currently buried in `self.props[cdav.ScheduleTag.tag]`. A simple property +would be cleaner and avoid callers importing `cdav`: + +```python +@property +def schedule_tag(self) -> str | None: + return self.props.get(cdav.ScheduleTag.tag) +``` + +### 5. Raise a specific exception on 412 schedule-tag mismatch + +When the server returns 412 for a schedule-tag mismatch, `_put` raises a generic +`PutError`. A more specific exception lets callers handle the "re-fetch and merge" case: + +```python +class ScheduleTagMismatchError(PutError): + """Server returned 412 because If-Schedule-Tag-Match did not match.""" +``` + +Distinguishing a schedule-tag 412 from an ETag 412 may require inspecting the response +body or a `Schedule-Tag` precondition code. + +### 6. Add a `scheduling.schedule-tag` compatibility hint + +Not all RFC 6638 servers implement schedule-tag (it is a SHOULD, not a MUST). A feature +entry should be added and detected by checking for the `Schedule-Tag` header in a GET +response on a scheduling object resource. + +## What to test + +- `save(if_schedule_tag_match=True)` on a stale object (tag changed server-side) → 412 + → `ScheduleTagMismatchError`. +- `save(if_schedule_tag_match=True)` on a fresh object → 204 → tag updated in props. +- `_reply_to_invite_request` sends `If-Schedule-Tag-Match` and succeeds without wiping + an organizer's concurrent update. +- PARTSTAT-only update does **not** change the schedule-tag (server compliance check). diff --git a/tests/test_async_integration.py b/tests/test_async_integration.py index 511247e3..d7f2f539 100644 --- a/tests/test_async_integration.py +++ b/tests/test_async_integration.py @@ -566,9 +566,8 @@ async def test_invite_and_respond(self, scheduling_setup: Any) -> None: """send a calendar invite via save_with_invites and verify delivery. Async counterpart of _TestSchedulingBase.testInviteAndRespond. - Note: accept_invite() is not yet supported for async clients, so - the response half of the flow is verified only at the delivery level - (inbox item or auto-scheduled event). + NOTE: inbox listing uses get_events() as a workaround since + ScheduleMailbox.get_items() does not yet have async support. """ import uuid @@ -577,16 +576,13 @@ async def test_invite_and_respond(self, scheduling_setup: Any) -> None: pytest.skip("need 2 principals to do the invite and respond test") ## Snapshot inbox contents before the invite + inbox0 = await principals[0].schedule_inbox() + inbox1 = await principals[1].schedule_inbox() inbox_urls_before: set[Any] = set() - try: - inbox0 = await principals[0].schedule_inbox() - inbox1 = await principals[1].schedule_inbox() - for item in await inbox0.get_events(): - inbox_urls_before.add(item.url) - for item in await inbox1.get_events(): - inbox_urls_before.add(item.url) - except Exception: - pass ## inbox listing may not work on all servers + for item in await inbox0.get_events(): + inbox_urls_before.add(item.url) + for item in await inbox1.get_events(): + inbox_urls_before.add(item.url) ## Send the invite base = _get_base_date() @@ -607,40 +603,63 @@ async def test_invite_and_respond(self, scheduling_setup: Any) -> None: "Event should appear in organizer's calendar after save_with_invites" ) - ## Poll: event auto-scheduled into attendee calendar OR new inbox item + ## Poll: check attendee's inbox and calendars. Some servers process + ## scheduling asynchronously, so poll with backoff before giving up. + new_attendee_inbox_items: list[Any] = [] auto_scheduled = False - new_inbox_items: list[Any] = [] for _ in range(30): - try: + new_attendee_inbox_items = [ + item for item in await inbox1.get_events() if item.url not in inbox_urls_before + ] + ## Check whether the server auto-scheduled the event directly into + ## the attendee's calendar. The event may land in any calendar, + ## so search all attendee calendars for the event UID. + if not new_attendee_inbox_items: for cal in await principals[1].calendars(): - try: - if any(e.id == event_uid for e in await cal.get_events()): + for event in await cal.get_events(): + if event.id == event_uid: auto_scheduled = True break - except Exception: - pass - except Exception: - pass - if not auto_scheduled: - try: - new_inbox_items = [ - item - for item in await inbox1.get_events() - if item.url not in inbox_urls_before - ] - except Exception: - pass - if auto_scheduled or new_inbox_items: + if auto_scheduled: + break + if new_attendee_inbox_items or auto_scheduled: break await asyncio.sleep(1) - assert auto_scheduled or new_inbox_items, ( - "Expected invite in attendee inbox OR event auto-added to attendee calendar, " - "got neither" - ) + if len(new_attendee_inbox_items) == 0 or auto_scheduled: + ## Server implements automatic scheduling. Some servers (e.g. + ## Stalwart) may additionally deliver an iTIP copy to the inbox as + ## a notification, but the acceptance is already done. + assert auto_scheduled, ( + "Expected invite in attendee inbox OR event auto-added to attendee calendar, " + "got neither" + ) + return + + ## Normal inbox-delivery flow (RFC6638 section 4.1). - ## accept_invite() is not yet supported for async clients (raises NotImplementedError). - ## Verifying delivery is sufficient to confirm save_with_invites works end-to-end. + ## No new inbox items expected for principals[0] yet + for item in await inbox0.get_events(): + assert item.url in inbox_urls_before + + assert len(new_attendee_inbox_items) == 1 + assert new_attendee_inbox_items[0].is_invite_request() + + ## Approving the invite. accept_invite() is not yet implemented for + ## async clients; skip rather than fail so the test can be extended later. + try: + new_attendee_inbox_items[0].accept_invite(calendar=calendars[1]) + except NotImplementedError: + pytest.skip("accept_invite() not yet supported for async clients") + + ## principals[0] should now have a notification in the inbox that the + ## calendar invite was accepted + new_organizer_inbox_items = [ + item for item in await inbox0.get_events() if item.url not in inbox_urls_before + ] + assert len(new_organizer_inbox_items) == 1 + assert new_organizer_inbox_items[0].is_invite_reply() + await new_organizer_inbox_items[0].delete() @pytest.mark.asyncio async def test_freebusy(self, scheduling_setup: Any) -> None: @@ -666,6 +685,316 @@ async def test_freebusy(self, scheduling_setup: Any) -> None: ## Just verify it completes without raising; response format varies per server. await coro + # ------------------------------------------------------------------ # + # Schedule-Tag tests (RFC 6638 section 3.2–3.3) # + # These are async counterparts of the sync tests in # + # _TestSchedulingBase. They are EXPECTED TO FAIL until async # + # scheduling support (_async_put with If-Schedule-Tag-Match etc.) # + # is implemented. # + # ------------------------------------------------------------------ # + + @pytest.mark.asyncio + async def test_schedule_tag_returned_on_save(self, scheduling_setup: Any) -> None: + """Saving a scheduling object must return a Schedule-Tag header. + + Async counterpart of testScheduleTagReturnedOnSave. + Expected to fail: _async_put() does not yet capture the Schedule-Tag + response header into event.props. + """ + import uuid + + clients, principals, calendars, auto_uids = scheduling_setup + self._skip_unless_support("scheduling.schedule-tag") + if len(principals) < 2: + pytest.skip("need 2 principals") + + organizer_cal = calendars[0] + addr = await principals[0].get_vcal_address() + addr2 = await principals[1].get_vcal_address() + uid = str(uuid.uuid4()) + ical = ( + "BEGIN:VCALENDAR\r\nVERSION:2.0\r\nPRODID:-//Test//Test//EN\r\n" + "BEGIN:VEVENT\r\n" + f"UID:{uid}\r\n" + "DTSTAMP:20260101T000000Z\r\n" + "DTSTART:20320601T100000Z\r\nDURATION:PT1H\r\n" + "SUMMARY:Schedule-Tag test\r\n" + f"ORGANIZER:{addr}\r\n" + f"ATTENDEE;RSVP=TRUE;PARTSTAT=NEEDS-ACTION:{addr}\r\n" + f"ATTENDEE;RSVP=TRUE;PARTSTAT=NEEDS-ACTION:{addr2}\r\n" + "END:VEVENT\r\nEND:VCALENDAR\r\n" + ) + event = await organizer_cal.save_event(ical) + auto_uids.append(uid) + + assert event.schedule_tag is not None, "Server did not return Schedule-Tag header after PUT" + + @pytest.mark.asyncio + async def test_schedule_tag_stable_on_partstate_update(self, scheduling_setup: Any) -> None: + """PARTSTAT-only update must not change the Schedule-Tag. + + Async counterpart of testScheduleTagStableOnPartstateUpdate. + Expected to fail: accept_invite() raises NotImplementedError for + async clients. + """ + import uuid + + clients, principals, calendars, auto_uids = scheduling_setup + self._skip_unless_support("scheduling.schedule-tag") + if len(principals) < 2: + pytest.skip("need 2 principals") + if not clients[1].features.is_supported("scheduling.mailbox.inbox-delivery"): + pytest.skip("server does not deliver iTIP requests to the inbox") + + organizer_cal = calendars[0] + attendee_cal = calendars[1] + organizer_addr = await principals[0].get_vcal_address() + attendee_addr = await principals[1].get_vcal_address() + uid = str(uuid.uuid4()) + ical = ( + "BEGIN:VCALENDAR\r\nVERSION:2.0\r\nPRODID:-//Test//Test//EN\r\n" + "BEGIN:VEVENT\r\n" + f"UID:{uid}\r\n" + "SEQUENCE:0\r\n" + "DTSTAMP:20260101T000000Z\r\n" + "DTSTART:20320601T100000Z\r\nDURATION:PT1H\r\n" + "SUMMARY:Partstat stability test\r\n" + f"ORGANIZER:{organizer_addr}\r\n" + f"ATTENDEE;RSVP=TRUE;PARTSTAT=NEEDS-ACTION:{attendee_addr}\r\n" + "END:VEVENT\r\nEND:VCALENDAR\r\n" + ) + saved_event = await organizer_cal.save_with_invites(ical, [principals[0], attendee_addr]) + auto_uids.append(uid) + + ## Wait for the REQUEST invite to land in attendee's inbox + invite = None + for _ in range(30): + inbox = await principals[1].schedule_inbox() + for item in await inbox.get_items(): + await item.load() + if item.is_invite_request() and item.id == saved_event.id: + invite = item + break + if invite: + break + await asyncio.sleep(1) + + if not invite: + pytest.skip("Invite not delivered to attendee inbox; cannot test PARTSTAT stability") + + ## accept_invite is not yet implemented for async clients + invite.accept_invite(calendar=attendee_cal) + + ## Find the attendee's copy + attendee_event = None + for _ in range(5): + for cal in await principals[1].calendars(): + try: + attendee_event = await cal.get_event_by_uid(saved_event.id) + break + except Exception: + pass + if attendee_event: + break + await asyncio.sleep(1) + + assert attendee_event is not None, "Event not found in any attendee calendar after accept" + await attendee_event.load() + tag_before = attendee_event.schedule_tag + assert tag_before is not None, "No Schedule-Tag on attendee's calendar event after accept" + + ## PARTSTAT-only change — tag must not move + attendee_event.change_attendee_status(partstat="TENTATIVE") + await attendee_event.save() + await attendee_event.load() + tag_after = attendee_event.schedule_tag + + assert tag_after is not None, "No Schedule-Tag on attendee's event after PARTSTAT update" + assert tag_before == tag_after, ( + f"Schedule-Tag changed after PARTSTAT-only update: {tag_before!r} → {tag_after!r}" + ) + + @pytest.mark.asyncio + async def test_schedule_tag_changes_on_organizer_update(self, scheduling_setup: Any) -> None: + """Organizer update must advance the Schedule-Tag on the attendee's copy. + + Async counterpart of testScheduleTagChangesOnOrganizerUpdate. + Expected to fail: _async_load() does not yet capture the Schedule-Tag + response header. + """ + import uuid + + clients, principals, calendars, auto_uids = scheduling_setup + self._skip_unless_support("scheduling.schedule-tag") + if len(principals) < 2: + pytest.skip("need 2 principals") + + organizer_cal = calendars[0] + organizer_addr = await principals[0].get_vcal_address() + attendee_addr = await principals[1].get_vcal_address() + uid = str(uuid.uuid4()) + seqno = 0 + + def _make_ical(summary: str) -> str: + nonlocal seqno + s = seqno + seqno += 1 + return ( + "BEGIN:VCALENDAR\r\nVERSION:2.0\r\nPRODID:-//Test//Test//EN\r\n" + "BEGIN:VEVENT\r\n" + f"UID:{uid}\r\n" + f"SEQUENCE:{s}\r\n" + "DTSTAMP:20260101T000000Z\r\n" + "DTSTART:20320601T100000Z\r\nDURATION:PT1H\r\n" + f"SUMMARY:{summary}\r\n" + f"ORGANIZER:{organizer_addr}\r\n" + f"ATTENDEE;RSVP=TRUE;PARTSTAT=NEEDS-ACTION:{attendee_addr}\r\n" + "END:VEVENT\r\nEND:VCALENDAR\r\n" + ) + + await organizer_cal.save_with_invites( + _make_ical("Original summary"), [principals[0], attendee_addr] + ) + auto_uids.append(uid) + + ## Poll for attendee's copy + attendee_event = None + for _ in range(30): + for cal in await principals[1].calendars(): + for ev in await cal.get_events(): + if ev.id == uid: + attendee_event = ev + break + if attendee_event: + break + if attendee_event: + break + await asyncio.sleep(1) + + if attendee_event is None: + pytest.skip("Event not delivered to attendee; cannot test tag change") + + await attendee_event.load() + tag_before = attendee_event.schedule_tag + assert tag_before is not None, "No Schedule-Tag on attendee's copy before organizer update" + + ## Organizer sends a substantive update + await organizer_cal.save_with_invites( + _make_ical("Updated summary"), [principals[0], attendee_addr] + ) + + ## Poll until the tag advances + for _ in range(30): + await attendee_event.load() + if attendee_event.schedule_tag != tag_before: + break + await asyncio.sleep(1) + + assert attendee_event.schedule_tag != tag_before, ( + f"Schedule-Tag did not change after organizer update: still {tag_before!r}" + ) + + @pytest.mark.asyncio + async def test_schedule_tag_mismatch_raises_error(self, scheduling_setup: Any) -> None: + """save() with a stale Schedule-Tag must raise ScheduleTagMismatchError. + + Async counterpart of testScheduleTagMismatchRaisesError. + Expected to fail: _async_put() does not yet send If-Schedule-Tag-Match + or raise ScheduleTagMismatchError on a 412 response. + """ + import uuid + + from caldav.lib import error + + clients, principals, calendars, auto_uids = scheduling_setup + self._skip_unless_support("scheduling.schedule-tag") + if len(principals) < 2: + pytest.skip("need 2 principals to cause a server-side tag advance") + + organizer_cal = calendars[0] + organizer_addr = await principals[0].get_vcal_address() + attendee_addr = await principals[1].get_vcal_address() + uid = str(uuid.uuid4()) + + def _make_ical(summary: str, seq: int) -> str: + return ( + "BEGIN:VCALENDAR\r\nVERSION:2.0\r\nPRODID:-//Test//Test//EN\r\n" + "BEGIN:VEVENT\r\n" + f"UID:{uid}\r\n" + f"SEQUENCE:{seq}\r\n" + "DTSTAMP:20260101T000000Z\r\n" + "DTSTART:20320601T100000Z\r\nDURATION:PT1H\r\n" + f"SUMMARY:{summary}\r\n" + f"ORGANIZER:{organizer_addr}\r\n" + f"ATTENDEE;RSVP=TRUE;PARTSTAT=NEEDS-ACTION:{attendee_addr}\r\n" + "END:VEVENT\r\nEND:VCALENDAR\r\n" + ) + + ## Create event, load it: event holds original content + tag=1 + event = await organizer_cal.save_event(_make_ical("Original", 0)) + auto_uids.append(uid) + await event.load() + assert event.schedule_tag is not None, ( + "server did not return Schedule-Tag after initial save" + ) + + ## Make a local conflicting edit before the concurrent organizer update + event.icalendar_component["SUMMARY"] = "Conflicting client change" + + ## Concurrent organizer PUT advances the server-side tag + await organizer_cal.save_event(_make_ical("Organizer update", 1)) + + ## PUT stale content with stale tag — server must reject with 412 + with pytest.raises(error.ScheduleTagMismatchError): + await event.save(increase_seqno=False) + + @pytest.mark.asyncio + async def test_schedule_tag_match_succeeds(self, scheduling_setup: Any) -> None: + """save() with the correct Schedule-Tag must succeed. + + Async counterpart of testScheduleTagMatchSucceeds. + Expected to fail: _async_put() does not yet send If-Schedule-Tag-Match, + so the conditional PUT is not exercised. + """ + import uuid + + clients, principals, calendars, auto_uids = scheduling_setup + self._skip_unless_support("scheduling.schedule-tag") + if len(principals) < 2: + pytest.skip("need 2 principals for Schedule-Tag to be assigned") + + cal = calendars[0] + addr = await principals[0].get_vcal_address() + addr2 = await principals[1].get_vcal_address() + uid = str(uuid.uuid4()) + ical = ( + "BEGIN:VCALENDAR\r\nVERSION:2.0\r\nPRODID:-//Test//Test//EN\r\n" + "BEGIN:VEVENT\r\n" + f"UID:{uid}\r\n" + "DTSTAMP:20260101T000000Z\r\n" + "DTSTART:20320601T100000Z\r\nDURATION:PT1H\r\n" + "SUMMARY:Correct-tag test\r\n" + f"ORGANIZER:{addr}\r\n" + f"ATTENDEE;RSVP=TRUE;PARTSTAT=NEEDS-ACTION:{addr}\r\n" + f"ATTENDEE;RSVP=TRUE;PARTSTAT=NEEDS-ACTION:{addr2}\r\n" + "END:VEVENT\r\nEND:VCALENDAR\r\n" + ) + event = await cal.save_event(ical) + auto_uids.append(uid) + await event.load() + + tag_before = event.schedule_tag + assert tag_before is not None, "Server did not return Schedule-Tag" + + ## Minor update with the correct tag — must not raise + event.icalendar_component["SUMMARY"] = "Correct-tag test (updated)" + await event.save(increase_seqno=False) + + ## Tag must still be present after save + assert event.schedule_tag is not None, ( + "schedule_tag property disappeared after conditional save" + ) + # ==================== Dynamic Test Class Generation ==================== # diff --git a/tests/test_caldav.py b/tests/test_caldav.py index e3f7e7a7..b2ff8207 100644 --- a/tests/test_caldav.py +++ b/tests/test_caldav.py @@ -933,6 +933,312 @@ def testAcceptInviteUsernameEmailFallback(self): ## TODO: more testing ... what happens if deleting things from the ## inbox/outbox? + # ------------------------------------------------------------------ # + # Schedule-Tag tests (RFC 6638 section 3.2–3.3) # + # All tests below are expected to FAIL until the implementation is # + # complete. See docs/design/TODO_SCHEDULE_TAG.md and # + # https://github.com/python-caldav/caldav/issues/660 # + # ------------------------------------------------------------------ # + + def testScheduleTagReturnedOnSave(self): + """ + Saving a scheduling object (one with ORGANIZER/ATTENDEE) to the server + should return a Schedule-Tag header. After save() the tag must be + accessible via event.schedule_tag and stored in event.props. + + RFC 6638 section 3.2: the server MUST return Schedule-Tag on + responses to PUT requests for scheduling object resources. + """ + self._skip_unless_support("scheduling.schedule-tag") + if len(self.principals) < 2: + pytest.skip("need at least 1 principal") + + cal = self._getCalendar(0) + addr = self.principals[0].get_vcal_address() + addr2 = self.principals[1].get_vcal_address() + uid = str(uuid.uuid4()) + ical = ( + "BEGIN:VCALENDAR\r\nVERSION:2.0\r\nPRODID:-//Test//Test//EN\r\n" + "BEGIN:VEVENT\r\n" + f"UID:{uid}\r\n" + "DTSTAMP:20260101T000000Z\r\n" + "DTSTART:20320601T100000Z\r\nDURATION:PT1H\r\n" + "SUMMARY:Schedule-Tag test\r\n" + f"ORGANIZER:{addr}\r\n" + f"ATTENDEE;RSVP=TRUE;PARTSTAT=NEEDS-ACTION:{addr}\r\n" + f"ATTENDEE;RSVP=TRUE;PARTSTAT=NEEDS-ACTION:{addr2}\r\n" + "END:VEVENT\r\nEND:VCALENDAR\r\n" + ) + event = cal.save_event(ical) + + assert event.schedule_tag is not None, ( + "Server did not return Schedule-Tag header after PUT; " + "either the server does not support it or event.schedule_tag " + "property is not implemented" + ) + + def testScheduleTagStableOnPartstateUpdate(self): + """ + RFC 6638 section 3.2: when an attendee updates only their PARTSTAT + (participation status) the server MUST NOT change the Schedule-Tag. + + The tag before and after a PARTSTAT-only PUT must be identical. + """ + self._skip_unless_support("scheduling.schedule-tag") + if len(self.principals) < 2: + pytest.skip("need 2 principals") + if not self.clients[1].features.is_supported("scheduling.mailbox.inbox-delivery"): + pytest.skip("server does not deliver iTIP requests to the inbox") + + organizer_cal = self._getCalendar(0) + attendee_cal = self._getCalendar(1) + + fresh_sched = sched_template % ( + str(uuid.uuid4()), + "%2i%2i%2i" % (random.randint(0, 23), random.randint(0, 59), random.randint(0, 59)), + random.randint(1, 28), + "%2i%2i%2i" % (random.randint(0, 23), random.randint(0, 59), random.randint(0, 59)), + ) + saved_event = organizer_cal.save_with_invites( + fresh_sched, [self.principals[0], self.principals[1].get_vcal_address()] + ) + self._auto_scheduled_event_uids.append(saved_event.id) + + ## Wait for the REQUEST invite (matching our UID) to land in attendee's inbox + invite = None + for _ in range(30): + for item in self.principals[1].schedule_inbox().get_items(): + item.load() + if item.is_invite_request() and item.id == saved_event.id: + invite = item + break + if invite: + break + time.sleep(1) + + if not invite: + pytest.skip("Invite not delivered to attendee inbox; cannot test PARTSTAT stability") + + ## Accept the invite — places the event in the attendee's calendar. + ## On auto-scheduling servers the event may already exist; _reply_to_invite_request + ## handles that by finding and updating it in place. + invite.accept_invite(calendar=attendee_cal) + + ## Find the event across all attendee calendars; on auto-scheduling servers it may be + ## in the default calendar rather than attendee_cal. Retry briefly for async servers. + attendee_event = None + for _ in range(5): + for cal in self.principals[1].calendars(): + try: + attendee_event = cal.event_by_uid(saved_event.id) + break + except error.NotFoundError: + pass + if attendee_event: + break + time.sleep(1) + + assert attendee_event is not None, "Event not found in any attendee calendar after accept" + attendee_event.load() + tag_before = attendee_event.schedule_tag + assert tag_before is not None, "No Schedule-Tag on attendee's calendar event after accept" + + ## Do a second PARTSTAT-only change (ACCEPTED → TENTATIVE) and compare tags + attendee_event.change_attendee_status(partstat="TENTATIVE") + attendee_event.save() + attendee_event.load() + tag_after = attendee_event.schedule_tag + + assert tag_after is not None, "No Schedule-Tag on attendee's event after PARTSTAT update" + assert tag_before == tag_after, ( + f"Schedule-Tag changed after PARTSTAT-only update: " + f"{tag_before!r} → {tag_after!r}; " + "RFC 6638 section 3.2 requires the tag to be stable across " + "participation-status-only updates" + ) + + def testScheduleTagChangesOnOrganizerUpdate(self): + """ + RFC 6638 section 3.2: when the organizer sends a substantive update, + the server MUST update the Schedule-Tag on the attendee's copy. + + The tag on the attendee's resource before and after an organizer PUT + must differ. + """ + self._skip_unless_support("scheduling.schedule-tag") + if len(self.principals) < 2: + pytest.skip("need 2 principals") + + organizer_cal = self._getCalendar(0) + + uid = str(uuid.uuid4()) + attendee_addr = self.principals[1].get_vcal_address() + organizer_addr = self.principals[0].get_vcal_address() + + seqno = 0 + + def _make_ical(summary): + nonlocal seqno + s = seqno + seqno += 1 + return ( + "BEGIN:VCALENDAR\r\nVERSION:2.0\r\nPRODID:-//Test//Test//EN\r\n" + "BEGIN:VEVENT\r\n" + f"UID:{uid}\r\n" + f"SEQUENCE:{s}\r\n" + "DTSTAMP:20260101T000000Z\r\n" + "DTSTART:20320601T100000Z\r\nDURATION:PT1H\r\n" + f"SUMMARY:{summary}\r\n" + f"ORGANIZER:{organizer_addr}\r\n" + f"ATTENDEE;RSVP=TRUE;PARTSTAT=NEEDS-ACTION:{attendee_addr}\r\n" + "END:VEVENT\r\nEND:VCALENDAR\r\n" + ) + + saved = organizer_cal.save_with_invites( + _make_ical("Original summary"), + [self.principals[0], attendee_addr], + ) + self._auto_scheduled_event_uids.append(uid) + + ## Find the attendee's copy and record the tag + attendee_event = None + for _ in range(30): + for cal in self.principals[1].calendars(): + for ev in cal.get_events(): + if ev.id == uid: + attendee_event = ev + break + if attendee_event: + break + if attendee_event: + break + time.sleep(1) + + if attendee_event is None: + pytest.skip("Event not delivered to attendee; cannot test tag change") + + attendee_event.load() + tag_before = attendee_event.schedule_tag + assert tag_before is not None, "No Schedule-Tag on attendee's copy before organizer update" + + ## Organizer sends a substantive update (changed summary) + organizer_cal.save_with_invites( + _make_ical("Updated summary"), + [self.principals[0], attendee_addr], + ) + + ## Poll until the attendee's copy reflects the update + for _ in range(30): + attendee_event.load() + if attendee_event.schedule_tag != tag_before: + break + time.sleep(1) + + tag_after = attendee_event.schedule_tag + assert tag_after != tag_before, ( + f"Schedule-Tag did not change after organizer update: still {tag_before!r}; " + "RFC 6638 section 3.2.10 requires the attendees tag to change after organizer PUT" + ) + + def testScheduleTagMismatchRaisesError(self): + """ + save() with a stale Schedule-Tag must raise ScheduleTagMismatchError + when the server returns 412. + + _put() sends If-Schedule-Tag-Match whenever self.schedule_tag is set. + We fetch the event (tag=1), make a local conflicting edit, then a + concurrent organizer PUT advances the server-side tag to 2 with a + different edit on the same field. The resulting divergence cannot be + auto-merged, so the server must reject with 412. + """ + self._skip_unless_support("scheduling.schedule-tag") + if len(self.principals) < 2: + pytest.skip("need 2 principals to cause a server-side tag advance") + + organizer_cal = self._getCalendar(0) + attendee_addr = self.principals[1].get_vcal_address() + organizer_addr = self.principals[0].get_vcal_address() + uid = str(uuid.uuid4()) + + def _make_ical(summary, seq): + return ( + "BEGIN:VCALENDAR\r\nVERSION:2.0\r\nPRODID:-//Test//Test//EN\r\n" + "BEGIN:VEVENT\r\n" + f"UID:{uid}\r\n" + f"SEQUENCE:{seq}\r\n" + "DTSTAMP:20260101T000000Z\r\n" + "DTSTART:20320601T100000Z\r\nDURATION:PT1H\r\n" + f"SUMMARY:{summary}\r\n" + f"ORGANIZER:{organizer_addr}\r\n" + f"ATTENDEE;RSVP=TRUE;PARTSTAT=NEEDS-ACTION:{attendee_addr}\r\n" + "END:VEVENT\r\nEND:VCALENDAR\r\n" + ) + + ## Create the event and load it; event now holds original content + tag=1 + event = organizer_cal.save_event(_make_ical("Original", 0)) + self._auto_scheduled_event_uids.append(uid) + event.load() + assert event.schedule_tag is not None, ( + "server did not return Schedule-Tag after initial save" + ) + + ## Make a local conflicting edit (simulating a client that diverges from + ## the server before a concurrent organizer update arrives). + event.icalendar_component["SUMMARY"] = "Conflicting client change" + + ## Concurrent organizer PUT advances the server-side tag; the two edits + ## now conflict on SUMMARY and cannot be auto-merged. + organizer_cal.save_event(_make_ical("Organizer update", 1)) + + ## PUT the locally-modified stale version; server must reject with 412 + ## because the conflicting changes cannot be safely merged. + with pytest.raises(error.ScheduleTagMismatchError): + event.save(increase_seqno=False) + + def testScheduleTagMatchSucceeds(self): + """ + save() with the correct (current) tag must + succeed and the updated tag must be stored in event.props afterwards. + + Requires 2 principals: servers only assign a Schedule-Tag to events + that have external attendees (i.e. real scheduling objects). + """ + self._skip_unless_support("scheduling.schedule-tag") + if len(self.principals) < 2: + pytest.skip("need 2 principals for Schedule-Tag to be assigned") + + cal = self._getCalendar(0) + addr = self.principals[0].get_vcal_address() + addr2 = self.principals[1].get_vcal_address() + uid = str(uuid.uuid4()) + ical = ( + "BEGIN:VCALENDAR\r\nVERSION:2.0\r\nPRODID:-//Test//Test//EN\r\n" + "BEGIN:VEVENT\r\n" + f"UID:{uid}\r\n" + "DTSTAMP:20260101T000000Z\r\n" + "DTSTART:20320601T100000Z\r\nDURATION:PT1H\r\n" + "SUMMARY:Correct-tag test\r\n" + f"ORGANIZER:{addr}\r\n" + f"ATTENDEE;RSVP=TRUE;PARTSTAT=NEEDS-ACTION:{addr}\r\n" + f"ATTENDEE;RSVP=TRUE;PARTSTAT=NEEDS-ACTION:{addr2}\r\n" + "END:VEVENT\r\nEND:VCALENDAR\r\n" + ) + event = cal.save_event(ical) + self._auto_scheduled_event_uids.append(uid) + event.load() + + tag_before = event.schedule_tag + assert tag_before is not None, "Server did not return Schedule-Tag" + + ## Modify something minor and save with the correct tag + event.icalendar_component["SUMMARY"] = "Correct-tag test (updated)" + event.save(increase_seqno=False) + + ## Must not have raised; tag in props must be present (may have changed) + assert event.schedule_tag is not None, ( + "schedule_tag property disappeared after conditional save" + ) + ## Legacy: run TestScheduling against the top-level rfc6638_users config. if rfc6638_users: diff --git a/tests/test_schedule_tag.py b/tests/test_schedule_tag.py new file mode 100644 index 00000000..c702b321 --- /dev/null +++ b/tests/test_schedule_tag.py @@ -0,0 +1,244 @@ +#!/usr/bin/env python +""" +Unit and integration tests for Schedule-Tag support (RFC 6638). + +Unit tests (class TestScheduleTagUnit) use mocks and require no server. + +Integration tests are added to _TestSchedulingBase in test_caldav.py; +see testScheduleTag* methods there. + +RFC refs: + https://datatracker.ietf.org/doc/html/rfc6638#section-3.2 + https://datatracker.ietf.org/doc/html/rfc6638#section-3.3 +""" + +import uuid +from unittest import mock + +import pytest + +try: + from niquests.structures import CaseInsensitiveDict +except ImportError: + from requests.structures import CaseInsensitiveDict + +from caldav import Calendar, Event +from caldav.davclient import DAVClient +from caldav.elements import cdav, dav +from caldav.lib import error + +## Minimal scheduling event with ORGANIZER and ATTENDEE so that a server +## will treat it as a scheduling object resource and return Schedule-Tag. +SCHED_ICAL = """\ +BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//Test//Test//EN +BEGIN:VEVENT +UID:{uid} +DTSTAMP:20260101T000000Z +DTSTART:20320601T100000Z +DURATION:PT1H +SUMMARY:Schedule-Tag test event +ORGANIZER:mailto:organizer@example.com +ATTENDEE;RSVP=TRUE;PARTSTAT=NEEDS-ACTION:mailto:attendee@example.com +END:VEVENT +END:VCALENDAR +""" + + +def _make_put_response(status_code, headers=None): + """Return a minimal mock requests.Response for a PUT.""" + r = mock.MagicMock() + r.status_code = status_code + r.headers = CaseInsensitiveDict(headers or {}) + r.reason = "OK" if status_code in (200, 201, 204) else "Precondition Failed" + r.content = b"" + return r + + +def _make_event_with_tag(schedule_tag='"tag-abc"'): + """ + Return an Event object that already has a schedule-tag cached in props, + as would happen after a load() or save() that received a Schedule-Tag header. + """ + client = DAVClient(url="http://cal.example.com/") + cal = Calendar(client=client, url="http://cal.example.com/cal/") + event = Event( + client=client, + url="http://cal.example.com/cal/event.ics", + data=SCHED_ICAL.format(uid=str(uuid.uuid4())), + parent=cal, + ) + if schedule_tag: + event.props[cdav.ScheduleTag.tag] = schedule_tag + return event + + +class TestScheduleTagUnit: + """ + Pure unit tests — no server communication. + All tests in this class are expected to FAIL until the implementation is complete. + """ + + # ------------------------------------------------------------------ # + # 1. Public property # + # ------------------------------------------------------------------ # + + def test_schedule_tag_property_returns_cached_value(self): + """ + CalendarObjectResource.schedule_tag should expose the cached tag. + + Currently fails because the property does not exist. + """ + event = _make_event_with_tag('"tag-xyz"') + assert event.schedule_tag == '"tag-xyz"' + + def test_schedule_tag_property_returns_none_when_absent(self): + """ + schedule_tag should return None when the tag has never been received. + """ + client = DAVClient(url="http://cal.example.com/") + cal = Calendar(client=client, url="http://cal.example.com/cal/") + event = Event( + client=client, + url="http://cal.example.com/cal/event.ics", + data=SCHED_ICAL.format(uid=str(uuid.uuid4())), + parent=cal, + ) + assert event.schedule_tag is None + + # ------------------------------------------------------------------ # + # 2. Schedule-Tag captured from response headers # + # ------------------------------------------------------------------ # + + @mock.patch("caldav.davclient.requests.Session.request") + def test_schedule_tag_captured_from_put_response(self, mocked): + """ + After a PUT that returns a Schedule-Tag header, the tag should be + stored in event.props and accessible via event.schedule_tag. + + Currently fails because _put() does not read the Schedule-Tag header. + (Actually it DOES store it via self.props — but only after load(), not + after _put(). Verify the full round-trip here.) + """ + put_resp = _make_put_response(201, {"Schedule-Tag": '"initial-tag"'}) + mocked.return_value = put_resp + + client = DAVClient(url="http://cal.example.com/") + cal = Calendar(client=client, url="http://cal.example.com/cal/") + uid = str(uuid.uuid4()) + event = Event( + client=client, + url=f"http://cal.example.com/cal/{uid}.ics", + data=SCHED_ICAL.format(uid=uid), + parent=cal, + ) + event.save() + + assert event.schedule_tag == '"initial-tag"' + + # ------------------------------------------------------------------ # + # 3. If-Schedule-Tag-Match sent on save() # + # ------------------------------------------------------------------ # + + @mock.patch("caldav.davclient.requests.Session.request") + def test_if_schedule_tag_match_header_sent_when_tag_cached(self, mocked): + """ + save() must send an If-Schedule-Tag-Match + request header equal to the cached schedule-tag. + """ + ok_resp = _make_put_response(204, {"Schedule-Tag": '"tag-abc"'}) + mocked.return_value = ok_resp + + event = _make_event_with_tag('"tag-abc"') + event.save() + + # Inspect the actual HTTP call + call_kwargs = mocked.call_args + # requests.Session.request is called as (method, url, **kwargs) + # headers end up in call_args.kwargs["headers"] or positional args + sent_headers = call_kwargs[1].get( + "headers", call_kwargs[0][2] if len(call_kwargs[0]) > 2 else {} + ) + assert "If-Schedule-Tag-Match" in sent_headers, ( + "If-Schedule-Tag-Match header was not sent; save() is still a no-op" + ) + assert sent_headers["If-Schedule-Tag-Match"] == '"tag-abc"' + + @mock.patch("caldav.davclient.requests.Session.request") + def test_if_schedule_tag_match_not_sent_when_flag_false(self, mocked): + """ + save() without if_schedule_tag_match=True must NOT send the header, + even when a tag is cached. + """ + ok_resp = _make_put_response(204) + mocked.return_value = ok_resp + + event = _make_event_with_tag(None) + event.save() + + call_kwargs = mocked.call_args + sent_headers = call_kwargs[1].get( + "headers", call_kwargs[0][2] if len(call_kwargs[0]) > 2 else {} + ) + assert "If-Schedule-Tag-Match" not in sent_headers + + # ------------------------------------------------------------------ # + # 4. Load before PUT when tag not yet cached # + # ------------------------------------------------------------------ # + + ## Removed, it's moot with the current design + + # ------------------------------------------------------------------ # + # 5. 412 raises ScheduleTagMismatchError # + # ------------------------------------------------------------------ # + + @mock.patch("caldav.davclient.requests.Session.request") + def test_stale_schedule_tag_raises_mismatch_error(self, mocked): + """ + When the server returns 412 in response to an If-Schedule-Tag-Match + PUT, the client must raise ScheduleTagMismatchError (a subclass of + PutError). + + Currently fails: ScheduleTagMismatchError does not exist, and the + generic PutError is raised instead (or not at all because the header + is never sent). + """ + mocked.return_value = _make_put_response(412) + + event = _make_event_with_tag('"stale-tag"') + with pytest.raises(error.ScheduleTagMismatchError): + event.save() + + @mock.patch("caldav.davclient.requests.Session.request") + def test_412_without_schedule_tag_raises_put_error(self, mocked): + """ + A plain 412 (not from If-Schedule-Tag-Match) should still raise + the generic PutError, not ScheduleTagMismatchError. + """ + mocked.return_value = _make_put_response(412) + + event = _make_event_with_tag(None) + # save() without if_schedule_tag_match — any 412 is a plain PutError + with pytest.raises(error.PutError): + event.save() + + # ------------------------------------------------------------------ # + # 6. Tag updated in props after successful conditional save # + # ------------------------------------------------------------------ # + + @mock.patch("caldav.davclient.requests.Session.request") + def test_schedule_tag_updated_in_props_after_successful_save(self, mocked): + """ + After a successful conditional PUT the server may return a new + Schedule-Tag. The updated tag must replace the old cached value. + """ + new_tag = '"tag-after-save"' + mocked.return_value = _make_put_response(204, {"Schedule-Tag": new_tag}) + + event = _make_event_with_tag('"tag-before-save"') + event.save() + + assert event.schedule_tag == new_tag, ( + "schedule_tag prop not updated after successful conditional save" + ) From 77bbf9a26aa4c766d1724649605342e68af975e5 Mon Sep 17 00:00:00 2001 From: Tobias Brox Date: Thu, 23 Apr 2026 18:30:23 +0200 Subject: [PATCH 12/17] test: lift stalwart ratelimits Claude has been trying to disable rate-limiting in Stalwart. I still got up some RateLimit errors while doing testing on the scheduling, but while asking Claude to continue debugging it, the problems disappeared and never came back. Hm. prompt: The stalwart docker image stops working every now and then when running test code due to ratelimiting errors. Any possibilities for deactivating this in the docker image? (followup-promps complaining that it isn't working) Co-Authored-By: Claude Sonnet 4.6 --- .../stalwart/setup_stalwart.sh | 18 ++++++++++++++++++ tests/test_async_integration.py | 7 ++++--- 2 files changed, 22 insertions(+), 3 deletions(-) diff --git a/tests/docker-test-servers/stalwart/setup_stalwart.sh b/tests/docker-test-servers/stalwart/setup_stalwart.sh index e71b2b6b..39e771a7 100755 --- a/tests/docker-test-servers/stalwart/setup_stalwart.sh +++ b/tests/docker-test-servers/stalwart/setup_stalwart.sh @@ -124,6 +124,24 @@ for i in $(seq 1 $max_caldav_attempts); do sleep 2 done +echo "" +echo "Disabling rate limiting for test environment..." +# Stalwart applies HTTP and authentication rate limits by default, which causes +# 429 responses during rapid test runs. Append generous limits to config inside +# the container, then reload. +docker exec "$CONTAINER_NAME" sh -c 'cat >> /opt/stalwart/etc/config.toml << '"'"'EOF'"'"' + +[http] +rate-limit-anonymous = { count = 999999999, period = "1m" } +rate-limit-authenticated = { count = 999999999, period = "1m" } +EOF' +RELOAD_RESULT=$(curl -s -u "${ADMIN_USER}:${ADMIN_PASSWORD}" "${API_BASE}/reload") +if echo "$RELOAD_RESULT" | grep -q '"errors":{}'; then + echo "Rate limiting disabled (config reloaded)" +else + echo "Warning: config reload result: $RELOAD_RESULT" +fi + echo "" echo "Stalwart setup complete!" echo "" diff --git a/tests/test_async_integration.py b/tests/test_async_integration.py index d7f2f539..77e9da38 100644 --- a/tests/test_async_integration.py +++ b/tests/test_async_integration.py @@ -503,8 +503,6 @@ def _skip_unless_support(self, feature: str) -> None: @pytest_asyncio.fixture async def scheduling_setup(self) -> Any: """Create async clients/principals/calendars for each scheduling user.""" - import uuid - from caldav.aio import get_async_davclient from .fixture_helpers import aget_or_create_test_calendar, cleanup_calendar_objects @@ -523,11 +521,14 @@ async def scheduling_setup(self) -> Any: await client.close() continue principal = await client.principal() + ## Use a fixed cal_id (not a random UUID) so the same calendar is + ## reused across test runs and does not accumulate on the server. + ## This mirrors the sync scheduling tests which use fixed calendar names. cal, _ = await aget_or_create_test_calendar( client, principal, calendar_name=f"async scheduling test {i}", - cal_id=f"asyncschedtest{uuid.uuid4().hex[:8]}", + cal_id=f"asyncschedtest{i}", ) if cal is None: await client.close() From 5db775c4cdfaf0cbedc72e6d7c8604eb3c3b89bf Mon Sep 17 00:00:00 2001 From: Tobias Brox Date: Tue, 21 Apr 2026 09:36:13 +0200 Subject: [PATCH 13/17] feat: make more methods async-aware and refactor Invite-handling (accept_invite, decline_invite, tentatively_accept_invite) now returns awaitable coroutines when used in async mode. SynchronizableCalendarObjectCollection.sync() has also been dealt with, togheter with `MailBox.get_items()` and `DAVObject.children()` Also adds design docs: - docs/design/TODO_SCHEDULE_TAG.md (analysis and implementation plan, refs https://github.com/python-caldav/caldav/issues/660) - docs/design/TODO_COMPATIBILITY_HINTS.md (FeatureSet cleanup analysis, refs https://github.com/python-caldav/caldav/issues/659) The code logic here is partly human-created, mostly partly AI-created, certainly under human guideance. The final secision on how to handle async in 3.x has been carved in stone by a human. Test code is predominantly AI-created. The AI-generation involves tedious code duplication work and tedious routine refactoring, chances for mistakes are bigger when doing it by hand than by AI. I've been looking through the changes, and I trust the tests to uncover any errors slipping through. Some of the (many) commits dealing with this have been squashed into this commit. The return type of cached properties should always be an awaitable coroutine in async mode. prompt: Please make an async version of the get_items method in `caldav/collections.py` followup-prompt: I don't want workarounds in _async_get_items for async-unaware get_objects_by_sync_token. Fix _async_get_items assuming get_objects_by_sync_token will be made async-aware. followup-prompt: Please make an async-version of get_objects_by_sync_token prompt: Please investigate those failures: FAILED tests/test_async_integration.py::TestAsyncSchedulingForStalwart (...) prompt: make an async version of _reply_to_invite_request in `caldav/calendarobjectresource.py` prompt: In collection.py and calendarobjectresource.py and possibly in some of the other files as well, many methods are split up into a sync version and an async version. Please make appropriate type-hints for all methods that in async mode will yield a coroutine rather than an object Prompt: Let's refactor the async methods where it's possible. The existing pattern goes like this: * we have a sync version `foo` or `_foo` * we have an async version of the same method, `_async_foo` * `foo` is *most of the time* doing `if self.is_async_client: return self._async_foo(...)` I'd like to reduce the amount of duplicated code as well as to split out the IO-logic as much as possible. As for now, I want to go with this pattern: * `foo` should *always* do the `if self.is_async_client: return self._async_foo(...)`-logic * `self._async_foo` should never be called upon other places * Quite many of the methods are doing some preparations, firing off some other method causing I/O, and then doing some processing of the data returned from the server. Other methods are more complex, having mutliple code lines causing I/O. * For methods containing significant amount of logic (like, two or more code lines) before doing any IO, the `if self.is_async_client: return self._async_foo(...)`-logic should be moved to the last possible point in the method. * For methods containing significant amount of logic after doing the IO, split the logic out in a `_post_foo`-method. (the rules above was later moved to a document and tweaked a bit) prompt: Apply the rules from `docs/design/ASYNC_DUAL_MODE.md` for the `def sync` and `def async_sync` in `collection.py` Co-Authored-By: Claude Sonnet 4.6 --- CHANGELOG.md | 8 + caldav/aio.py | 4 +- caldav/async_davclient.py | 120 ++----- caldav/base_client.py | 18 + caldav/calendarobjectresource.py | 227 ++++++++---- caldav/collection.py | 587 ++++++++++++++++++------------- caldav/compatibility_hints.py | 43 +-- caldav/davclient.py | 37 +- caldav/davobject.py | 168 ++++----- caldav/response.py | 343 +++++++++++++++++- tests/test_async_davclient.py | 20 +- tests/test_async_integration.py | 36 +- tests/test_caldav.py | 4 +- tests/test_caldav_unit.py | 13 +- 14 files changed, 1039 insertions(+), 589 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cd6ad35d..1f97124a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -37,6 +37,14 @@ This project should adhere to [Semantic Versioning](https://semver.org/spec/v2.0 * Open-ended time-range search compatibility hints: new `search.time-range.open`, `search.time-range.open.end`, `search.time-range.open.start`, and `search.time-range.open.start.duration` features (RFC4791 section 9.9). Old `no_search_openended` flag and `search.time-range.todo.duration`/`search.time-range.todo.open-start` features migrated. `testTodoSearch` updated to use `is_supported("search.time-range.open.end")` instead of the old compatibility flag. * RFC 6638 scheduling feature-detection infrastructure: new `scheduling`, `scheduling.mailbox`, and `scheduling.calendar-user-address-set` compatibility hints; legacy `no_scheduling` flags migrated. Default scheduling hints set for all the servers tested. +* New `scheduling.schedule-tag` compatibility flag and tests covering RFC 6638 §3.2–3.3: `testScheduleTagReturnedOnSave`, `testScheduleTagStableOnPartstateUpdate`, `testScheduleTagChangesOnOrganizerUpdate`, `testScheduleTagMismatchRaisesError`, `testScheduleTagMatchSucceeds` — plus async counterparts of all five. +* New `scheduling.schedule-tag.stable-partstat` compatibility hint: RFC6638 §3.2 requires the Schedule-Tag to remain unchanged when an attendee performs a PARTSTAT-only update; CCS does not comply and is marked `unsupported`. `testScheduleTagStableOnPartstateUpdate` (and its async counterpart) now skip on non-compliant servers. +* New `scheduling.auto-schedule` compatibility flag (see Added section). Server entries updated: Baikal, Cyrus, DAViCal, Davis, CCS, Nextcloud, Stalwart gain explicit `inbox-delivery` + `auto-schedule` values; Zimbra: `inbox-delivery=False` + `auto-schedule=True`. +* Scheduling freebusy-query: `scheduling.freebusy-query` feature flag (RFC 6638 outbox POST); `freebusy-query.rfc4791` merged into `freebusy-query` (RFC 4791 REPORT). `testFreeBusy` added to `_TestSchedulingBase`; async counterpart added to `_AsyncTestSchedulingBase`. +* `search.time-range.todo.strict` compatibility flag: server must not return VTODOs whose time span is entirely outside the searched range; xandikos is marked `broken`. +* New `save-load.property.related-to`, `search.time-range.todo.duration`, and `search.time-range.todo.open-start` feature flags replacing old-style flags. RFC links added to all FEATURES entries. +* `_AsyncTestSchedulingBase` added: async counterpart of `_TestSchedulingBase` with `test_invite_and_respond` and `test_freebusy`; `TestAsyncSchedulingFor` classes generated for each server with `scheduling_users` configured. +* New `scheduling.schedule-tag.stable-partstat` compatibility hint: RFC6638 §3.2 requires the Schedule-Tag to remain unchanged when an attendee performs a PARTSTAT-only update; CCS does not comply and is marked `unsupported`. `testScheduleTagStableOnPartstateUpdate` (and its async counterpart) now skip on non-compliant servers. * Calendar owner example (`examples/calendar_owner_examples.py`) demonstrating how to retrieve the owner of a calendar via `DAV:owner` and resolve their calendar-user address. `testFindCalendarOwner` now exercises the full owner → principal → `get_vcal_address()` chain. Closes https://github.com/python-caldav/caldav/issues/544 * `testInviteAndRespond` implemented end-to-end: organizer creates an event, invites an attendee, attendee accepts, and the organizer verifies the updated `PARTSTAT`. Per-server compatibility flags applied for known quirks (Baikal, Cyrus, SOGo). * Multi-user RFC 6638 scheduling tests wired into the Docker server setup for Cyrus and Baikal (pre-populated `user1`–`user3`/`user1`–`user5`). diff --git a/caldav/aio.py b/caldav/aio.py index 55da69dd..6908946e 100644 --- a/caldav/aio.py +++ b/caldav/aio.py @@ -27,7 +27,7 @@ """ # Import the async client (this is truly async) -from caldav.async_davclient import AsyncDAVClient, AsyncDAVResponse, get_calendar, get_calendars +from caldav.async_davclient import AsyncDAVClient, DAVResponse, get_calendar, get_calendars from caldav.async_davclient import get_davclient as get_async_davclient from caldav.calendarobjectresource import CalendarObjectResource, Event, FreeBusy, Journal, Todo from caldav.collection import ( @@ -59,7 +59,7 @@ __all__ = [ # Client "AsyncDAVClient", - "AsyncDAVResponse", + "DAVResponse", "get_async_davclient", # Factory functions (async equivalents of caldav.get_calendar / get_calendars) "get_calendar", diff --git a/caldav/async_davclient.py b/caldav/async_davclient.py index 84e4b6fb..3b76cad3 100644 --- a/caldav/async_davclient.py +++ b/caldav/async_davclient.py @@ -71,24 +71,8 @@ def auth_flow(self, request): from caldav.compatibility_hints import FeatureSet from caldav.lib import error from caldav.lib.python_utilities import to_wire -from caldav.lib.url import URL -from caldav.protocol.types import ( - CalendarQueryResult, - PropfindResult, -) -from caldav.protocol.xml_builders import ( - _build_calendar_multiget_body, - _build_calendar_query_body, - _build_propfind_body, - _build_sync_collection_body, -) -from caldav.protocol.xml_parsers import ( - _parse_calendar_query_response, - _parse_propfind_response, - _parse_sync_collection_response, -) from caldav.requests import HTTPBearerAuth -from caldav.response import BaseDAVResponse +from caldav.response import CalendarQueryResult, DAVResponse, PropfindResult log = logging.getLogger("caldav") @@ -98,31 +82,6 @@ def auth_flow(self, request): from typing import Self -class AsyncDAVResponse(BaseDAVResponse): - """ - Response from an async DAV request. - - This class handles the parsing of DAV responses, including XML parsing. - End users typically won't interact with this class directly. - - Response parsing methods are inherited from BaseDAVResponse. - - New protocol-based attributes: - results: Parsed results from protocol layer (List[PropfindResult], etc.) - sync_token: Sync token from sync-collection response - """ - - # Protocol-based parsed results (new interface) - results: list[PropfindResult | CalendarQueryResult] | None = None - sync_token: str | None = None - - def __init__(self, response: Any, davclient: Optional["AsyncDAVClient"] = None) -> None: - """Initialize from httpx.Response or niquests.Response.""" - self._init_from_response(response, davclient) - - # Response parsing methods are inherited from BaseDAVResponse - - class AsyncDAVClient(BaseDAVClient): """ Async WebDAV/CalDAV client. @@ -366,7 +325,7 @@ async def request( body: str = "", headers: Mapping[str, str] | None = None, rate_limit_time_slept: float = 0, - ) -> AsyncDAVResponse: + ) -> DAVResponse: """ Send an async HTTP request, with optional rate-limit sleep-and-retry. @@ -404,7 +363,7 @@ async def _async_request( method: str = "GET", body: str = "", headers: Mapping[str, str] | None = None, - ) -> AsyncDAVResponse: + ) -> DAVResponse: """ Async HTTP request implementation with auth negotiation. @@ -463,7 +422,7 @@ async def _async_request( if auth_types: msg += "\nSupported authentication types: {}".format(", ".join(auth_types)) log.warning(msg) - response = AsyncDAVResponse(r, self) + response = DAVResponse(r, self) except Exception: # Workaround for servers that abort connection on unauthenticated requests # ref https://github.com/python-caldav/caldav/issues/158 @@ -498,7 +457,7 @@ async def _async_request( # Retry original request with auth request_kwargs["auth"] = self.auth r = await self.session.request(**request_kwargs) - response = AsyncDAVResponse(r, self) + response = DAVResponse(r, self) # Handle 429/503 rate-limit responses error.raise_if_rate_limited(r.status_code, str(url_obj), r.headers.get("Retry-After")) @@ -542,7 +501,7 @@ async def propfind( depth: int = 0, headers: Mapping[str, str] | None = None, props: list[str] | None = None, - ) -> AsyncDAVResponse: + ) -> DAVResponse: """ Send a PROPFIND request. @@ -554,7 +513,7 @@ async def propfind( props: List of property names to request (uses protocol layer). Returns: - AsyncDAVResponse with results attribute containing parsed PropfindResult list. + DAVResponse with results attribute containing parsed PropfindResult list. """ # Use protocol layer to build XML if props provided if props is not None and not body: @@ -580,7 +539,7 @@ async def report( body: str = "", depth: int | None = 0, headers: Mapping[str, str] | None = None, - ) -> AsyncDAVResponse: + ) -> DAVResponse: """ Send a REPORT request. @@ -592,7 +551,7 @@ async def report( headers: Additional headers. Returns: - AsyncDAVResponse + DAVResponse """ final_headers = self._build_method_headers("REPORT", depth, headers) return await self.request(url or str(self.url), "REPORT", body, final_headers) @@ -601,7 +560,7 @@ async def options( self, url: str | None = None, headers: Mapping[str, str] | None = None, - ) -> AsyncDAVResponse: + ) -> DAVResponse: """ Send an OPTIONS request. @@ -610,7 +569,7 @@ async def options( headers: Additional headers. Returns: - AsyncDAVResponse + DAVResponse """ return await self.request(url or str(self.url), "OPTIONS", "", headers) @@ -621,7 +580,7 @@ async def proppatch( url: str, body: str = "", headers: Mapping[str, str] | None = None, - ) -> AsyncDAVResponse: + ) -> DAVResponse: """ Send a PROPPATCH request. @@ -631,7 +590,7 @@ async def proppatch( headers: Additional headers. Returns: - AsyncDAVResponse + DAVResponse """ final_headers = self._build_method_headers("PROPPATCH", extra_headers=headers) return await self.request(url, "PROPPATCH", body, final_headers) @@ -641,7 +600,7 @@ async def mkcol( url: str, body: str = "", headers: Mapping[str, str] | None = None, - ) -> AsyncDAVResponse: + ) -> DAVResponse: """ Send a MKCOL request. @@ -653,7 +612,7 @@ async def mkcol( headers: Additional headers. Returns: - AsyncDAVResponse + DAVResponse """ final_headers = self._build_method_headers("MKCOL", extra_headers=headers) return await self.request(url, "MKCOL", body, final_headers) @@ -663,7 +622,7 @@ async def mkcalendar( url: str, body: str = "", headers: Mapping[str, str] | None = None, - ) -> AsyncDAVResponse: + ) -> DAVResponse: """ Send a MKCALENDAR request. @@ -673,7 +632,7 @@ async def mkcalendar( headers: Additional headers. Returns: - AsyncDAVResponse + DAVResponse """ final_headers = self._build_method_headers("MKCALENDAR", extra_headers=headers) return await self.request(url, "MKCALENDAR", body, final_headers) @@ -683,7 +642,7 @@ async def put( url: str, body: str, headers: Mapping[str, str] | None = None, - ) -> AsyncDAVResponse: + ) -> DAVResponse: """ Send a PUT request. @@ -693,7 +652,7 @@ async def put( headers: Additional headers. Returns: - AsyncDAVResponse + DAVResponse """ return await self.request(url, "PUT", body, headers) @@ -702,7 +661,7 @@ async def post( url: str, body: str, headers: Mapping[str, str] | None = None, - ) -> AsyncDAVResponse: + ) -> DAVResponse: """ Send a POST request. @@ -712,7 +671,7 @@ async def post( headers: Additional headers. Returns: - AsyncDAVResponse + DAVResponse """ return await self.request(url, "POST", body, headers) @@ -720,7 +679,7 @@ async def delete( self, url: str, headers: Mapping[str, str] | None = None, - ) -> AsyncDAVResponse: + ) -> DAVResponse: """ Send a DELETE request. @@ -729,7 +688,7 @@ async def delete( headers: Additional headers. Returns: - AsyncDAVResponse + DAVResponse """ return await self.request(url, "DELETE", "", headers) @@ -747,7 +706,7 @@ async def calendar_query( expand: bool = False, depth: int = 1, headers: Mapping[str, str] | None = None, - ) -> AsyncDAVResponse: + ) -> DAVResponse: """ Execute a calendar-query REPORT to search for calendar objects. @@ -763,7 +722,7 @@ async def calendar_query( headers: Additional headers. Returns: - AsyncDAVResponse with results containing List[CalendarQueryResult]. + DAVResponse with results containing List[CalendarQueryResult]. """ body, _ = _build_calendar_query_body( @@ -797,7 +756,7 @@ async def calendar_multiget( hrefs: list[str] | None = None, depth: int = 1, headers: Mapping[str, str] | None = None, - ) -> AsyncDAVResponse: + ) -> DAVResponse: """ Execute a calendar-multiget REPORT to fetch specific calendar objects. @@ -808,7 +767,7 @@ async def calendar_multiget( headers: Additional headers. Returns: - AsyncDAVResponse with results containing List[CalendarQueryResult]. + DAVResponse with results containing List[CalendarQueryResult]. """ body = _build_calendar_multiget_body(hrefs or []) @@ -835,7 +794,7 @@ async def sync_collection( props: list[str] | None = None, depth: int = 1, headers: Mapping[str, str] | None = None, - ) -> AsyncDAVResponse: + ) -> DAVResponse: """ Execute a sync-collection REPORT for efficient synchronization. @@ -847,7 +806,7 @@ async def sync_collection( headers: Additional headers. Returns: - AsyncDAVResponse with results containing SyncCollectionResult. + DAVResponse with results containing SyncCollectionResult. """ body = _build_sync_collection_body(sync_token=sync_token, props=props) @@ -869,6 +828,12 @@ async def sync_collection( return response + def _value_or_coroutine(self, value): + return self._async_value_or_coroutine(value) + + async def _async_value_or_coroutine(self, value): + return value + # ==================== Authentication Helpers ==================== def build_auth_object(self, auth_types: list[str] | None = None) -> None: @@ -1121,21 +1086,6 @@ async def principal(self) -> "Principal": """ return await self.get_principal() - def calendar(self, **kwargs: Any) -> "Calendar": - """Returns a calendar object. - - Typically, a URL should be given as a named parameter (url) - - No network traffic will be initiated by this method. - - If you don't know the URL of the calendar, use - ``await client.get_principal().get_calendars()`` instead, or - ``await client.get_calendars()`` - """ - from caldav.collection import Calendar - - return Calendar(client=self, **kwargs) - async def check_dav_support(self) -> str | None: """ Check if the server supports DAV. diff --git a/caldav/base_client.py b/caldav/base_client.py index e2163af2..0afeab64 100644 --- a/caldav/base_client.py +++ b/caldav/base_client.py @@ -58,6 +58,21 @@ class BaseDAVClient(ABC): features: FeatureSet | None = None url: Any = None # URL object, set by subclasses + def calendar(self, **kwargs): + """Returns a calendar object. + + Typically, a URL should be given as a named parameter (url) + + No network traffic will be initiated by this method. + + If you don't know the URL of the calendar, use + client.principal().calendar(...) instead, or + client.principal().get_calendars() + """ + from caldav.collection import Calendar + + return Calendar(client=self, **kwargs) + def _make_absolute_url(self, url: str) -> str: """Make a URL absolute by joining with the client's base URL if needed. @@ -71,6 +86,9 @@ def _make_absolute_url(self, url: str) -> str: return str(self.url.join(url)) return url + def _value_or_coroutine(self, value): + return value + def extract_auth_types(self, header: str) -> set[str]: """Extract authentication types from WWW-Authenticate header. diff --git a/caldav/calendarobjectresource.py b/caldav/calendarobjectresource.py index a4e55dbf..369d5314 100644 --- a/caldav/calendarobjectresource.py +++ b/caldav/calendarobjectresource.py @@ -29,7 +29,7 @@ from .davclient import DAVClient -from collections.abc import Callable, Container +from collections.abc import Callable, Container, Coroutine from typing import Literal if sys.version_info < (3, 11): @@ -178,7 +178,7 @@ def set_end(self, end, move_dtstart=False): i.add(self._ENDPARAM, end) - def add_organizer(self, organizer=None) -> None: + def add_organizer(self, organizer=None) -> "None | Coroutine[Any, Any, None]": """ Add (or replace) the ORGANIZER field on the calendar component. @@ -222,7 +222,7 @@ def add_organizer(self, organizer=None) -> None: async def _async_add_organizer(self) -> None: """Async implementation of add_organizer() for async clients.""" principal = await self.client.principal() - self._set_organizer(await principal._async_get_vcal_address()) + self._set_organizer(await principal.get_vcal_address()) def _set_organizer(self, organizer_obj: vCalAddress) -> None: """Write the ORGANIZER property onto the icalendar component (sync, no I/O).""" @@ -320,7 +320,7 @@ def expand_rrule(self, start: datetime, end: datetime, include_completed: bool = def set_relation( self, other, reltype=None, set_reverse=True - ) -> None: ## TODO: logic to find and set siblings? + ) -> "None | Coroutine[Any, Any, None]": ## TODO: logic to find and set siblings? """ Sets a relation between this object and another object (given by uid or object). """ @@ -411,7 +411,7 @@ def get_relatives( relfilter: Callable[[Any], bool] | None = None, fetch_objects: bool = True, ignore_missing: bool = True, - ) -> defaultdict[str, set[str]]: + ) -> "defaultdict[str, set[str]] | Coroutine[Any, Any, defaultdict[str, set]]": """ By default, loads all objects pointed to by the RELATED-TO property and loads the related objects. @@ -491,6 +491,8 @@ async def _async_get_relatives( def _set_reverse_relation(self, other, reltype): ## TODO: handle RFC9253 better! Particularly next/first-lists + if self.is_async_client: + return self._async_set_reverse_relation(other, reltype) reverse_reltype = self.RELTYPE_REVERSE_MAP.get(reltype) if not reverse_reltype: logging.error("Reltype %s not supported in object uid %s" % (reltype, self.id)) @@ -498,7 +500,7 @@ def _set_reverse_relation(self, other, reltype): other.set_relation(self, reverse_reltype, other) async def _async_set_reverse_relation(self, other, reltype): - """Async version of _set_reverse_relation.""" + """Async implementation of _set_reverse_relation.""" reverse_reltype = self.RELTYPE_REVERSE_MAP.get(reltype) if not reverse_reltype: logging.error("Reltype %s not supported in object uid %s" % (reltype, self.id)) @@ -506,6 +508,8 @@ async def _async_set_reverse_relation(self, other, reltype): await other.set_relation(self, reverse_reltype, other) def _verify_reverse_relation(self, other, reltype) -> tuple: + if self.is_async_client: + return self._async_verify_reverse_relation(other, reltype) revreltype = self.RELTYPE_REVERSE_MAP[reltype] ## TODO: special case FIRST/NEXT needs special handling other_relations = other.get_relatives(fetch_objects=False, reltypes={revreltype}) @@ -519,7 +523,7 @@ def _verify_reverse_relation(self, other, reltype) -> tuple: return False async def _async_verify_reverse_relation(self, other, reltype) -> tuple: - """Async version of _verify_reverse_relation.""" + """Async implementation of _verify_reverse_relation.""" revreltype = self.RELTYPE_REVERSE_MAP[reltype] other_relations = await other.get_relatives(fetch_objects=False, reltypes={revreltype}) my_uid = self._get_uid_cheap() or str(self.icalendar_component["uid"]) @@ -530,22 +534,22 @@ async def _async_verify_reverse_relation(self, other, reltype) -> tuple: async def _async_handle_reverse_relations( self, verify: bool = False, fix: bool = False, pdb: bool = False ) -> list: - """Async version of _handle_reverse_relations for async clients.""" + """Async implementation of _handle_reverse_relations.""" ret = [] assert verify or fix relations = await self.get_relatives() for reltype in relations: for other in relations[reltype]: if verify: - foobar = await self._async_verify_reverse_relation(other, reltype) + foobar = await self._verify_reverse_relation(other, reltype) if foobar: ret.append(foobar) if pdb: breakpoint() if fix: - await self._async_set_reverse_relation(other, reltype) + await self._set_reverse_relation(other, reltype) elif fix: - await self._async_set_reverse_relation(other, reltype) + await self._set_reverse_relation(other, reltype) return ret def _handle_reverse_relations( @@ -563,6 +567,8 @@ def _handle_reverse_relations( Assume all reverse relations are missing. Used internally when creating new objects. """ + if self.is_async_client: + return self._async_handle_reverse_relations(verify, fix, pdb) ret = [] assert verify or fix relations = self.get_relatives() @@ -715,7 +721,16 @@ def add_attendee(self, attendee, no_default_parameters: bool = False, **paramete attendee_obj = vCalAddress() ## TODO: if possible, check that the attendee exists - ## TODO: check that the attendee will not be duplicated in the event. + ievent = self.icalendar_component + existing = ievent.get("attendee", []) + if isinstance(existing, str): + existing = [existing] + + def _strip_mailto(x): + return str(x).lower().replace("mailto:", "") + + if any(_strip_mailto(a) == _strip_mailto(attendee_obj) for a in existing): + return if not no_default_parameters: ## Sensible defaults: attendee_obj.params["partstat"] = "NEEDS-ACTION" @@ -731,7 +746,6 @@ def add_attendee(self, attendee, no_default_parameters: bool = False, **paramete else: params[new_key] = parameters[key] attendee_obj.params.update(params) - ievent = self.icalendar_component ievent.add("attendee", attendee_obj) def is_invite_request(self) -> bool: @@ -750,33 +764,39 @@ def is_invite_reply(self) -> bool: self.load(only_if_unloaded=True) return self.icalendar_instance.get("method", None) == "REPLY" - def accept_invite(self, calendar: Optional["Calendar"] = None) -> None: + def accept_invite( + self, calendar: Optional["Calendar"] = None + ) -> "None | Coroutine[Any, Any, None]": """ Accepts an invite - to be used on an invite object. + For async clients, returns a coroutine that must be awaited. """ - self._reply_to_invite_request("ACCEPTED", calendar) + return self._reply_to_invite_request("ACCEPTED", calendar) - def decline_invite(self, calendar: Optional["Calendar"] = None) -> None: + def decline_invite( + self, calendar: Optional["Calendar"] = None + ) -> "None | Coroutine[Any, Any, None]": """ Declines an invite - to be used on an invite object. + For async clients, returns a coroutine that must be awaited. """ - self._reply_to_invite_request("DECLINED", calendar) + return self._reply_to_invite_request("DECLINED", calendar) - def tentatively_accept_invite(self, calendar: Any | None = None) -> None: + def tentatively_accept_invite( + self, calendar: Any | None = None + ) -> "None | Coroutine[Any, Any, None]": """ Tentatively accept an invite - to be used on an invite object. + For async clients, returns a coroutine that must be awaited. """ - self._reply_to_invite_request("TENTATIVE", calendar) + return self._reply_to_invite_request("TENTATIVE", calendar) ## TODO: DELEGATED is also a valid option, and for vtodos the ## partstat can also be set to COMPLETED and IN-PROGRESS. - def _reply_to_invite_request(self, partstat, calendar) -> None: + def _reply_to_invite_request(self, partstat, calendar) -> "None | Coroutine[Any, Any, None]": if self.is_async_client: - raise NotImplementedError( - "accept_invite/decline_invite/tentatively_accept_invite are not yet supported " - "for async clients" - ) + return self._async_reply_to_invite_request(partstat, calendar) error.assert_(self.is_invite_request()) if not calendar: calendar = self.client.principal().get_calendars()[0] @@ -817,6 +837,75 @@ def _reply_to_invite_request(self, partstat, calendar) -> None: else: self.save() + async def _async_reply_to_invite_request(self, partstat: str, calendar) -> None: + """Async implementation of _reply_to_invite_request().""" + foo = self.load(only_if_unloaded=True) + ## TODO: this is a mess + if not isinstance(foo, CalendarObjectResource): + await foo + error.assert_(self.icalendar_instance.get("method", None) == "REQUEST") + principal = await self.client.principal() + if not calendar: + calendar = (await principal.get_calendars())[0] + self.icalendar_instance.pop("METHOD") + ## change_attendee_status() resolves the attendee from self.client.principal() + ## internally; that returns a coroutine in async mode so we resolve addresses here + ## and pass them explicitly. + addresses_el = await principal.get_property( + cdav.CalendarUserAddressSet(), parse_props=False + ) + if addresses_el is not None: + addresses = sorted(list(addresses_el), key=lambda x: -int(x.get("preferred", 0))) + attendee_addresses = [x.text for x in addresses] + else: + username = getattr(self.client, "username", None) + if username and "@" in str(username): + attendee_addresses = ["mailto:" + username] + else: + raise error.NotFoundError( + "Server does not provide the calendar-user-address-set property " + "(RFC6638 §2.4.1) and the client username is not an email address. " + "Cannot determine which attendee to update." + ) + cnt = 0 + for addr in attendee_addresses: + try: + self.change_attendee_status(addr, partstat=partstat) + cnt += 1 + except error.NotFoundError: + pass + if not cnt: + raise error.NotFoundError("Principal is not invited to event") + error.assert_(cnt == 1) + uid = self.id + if uid and self.client.features.is_supported("scheduling.auto-schedule"): + for cal in await principal.calendars(): + try: + existing = await cal.event_by_uid(uid) + await existing.load() + cnt2 = 0 + for addr in attendee_addresses: + try: + existing.change_attendee_status(addr, partstat=partstat) + cnt2 += 1 + except error.NotFoundError: + pass + if not cnt2: + raise error.NotFoundError("Principal is not invited to existing event") + await existing.save() + return + except error.NotFoundError: + pass + try: + await calendar.add_event(self.data) + except Exception: + await self.load() + outbox = await principal.schedule_outbox() + if calendar.url != outbox.url: + await self._reply_to_invite_request(partstat, calendar=outbox) + else: + await self.save() + def copy(self, keep_uid: bool = False, new_parent: Any | None = None) -> Self: """ Events, todos etc can be copied within the same calendar, to another @@ -835,7 +924,7 @@ def copy(self, keep_uid: bool = False, new_parent: Any | None = None) -> Self: ## TODO: move get-logics to a load_by_get method. ## The load method should deal with "server quirks". - def load(self, only_if_unloaded: bool = False) -> Self: + def load(self, only_if_unloaded: bool = False) -> "Self | Coroutine[Any, Any, Self]": """ (Re)load the object from the caldav server. @@ -848,6 +937,13 @@ def load(self, only_if_unloaded: bool = False) -> Self: Example (async): await obj.load() """ + ## This is so bad ... the `self.load(only_if_unloaded)` has + ## been peppered all over the place, at places where no + ## server communication is expected, just for the oddball + ## case where an object expected to contain data only contains + ## an URL. This causes huge problems when trying to do the + ## async work by isolating the IO-causing methods. + # Check if already loaded BEFORE delegating to async # This avoids returning a coroutine when no work is needed if only_if_unloaded and self.is_loaded(): @@ -921,7 +1017,7 @@ async def _async_load(self, only_if_unloaded: bool = False) -> Self: if uid: # Fallback 1: try multiget (REPORT may work even when GET fails) try: - return await self._async_load_by_multiget() + return await self.load_by_multiget() except Exception: pass # Fallback 2: re-fetch by UID (server may have changed the URL) @@ -938,7 +1034,7 @@ async def _async_load(self, only_if_unloaded: bool = False) -> Self: pass raise except Exception: - return await self._async_load_by_multiget() + return await self.load_by_multiget() if "Etag" in r.headers: self.props[dav.GetEtag.tag] = r.headers["Etag"] @@ -946,34 +1042,37 @@ async def _async_load(self, only_if_unloaded: bool = False) -> Self: self.props[cdav.ScheduleTag.tag] = r.headers["Schedule-Tag"] return self - async def _async_load_by_multiget(self) -> Self: - """Async implementation of load_by_multiget.""" - error.assert_(self.url) - items = await self.parent._async_multiget(event_urls=[self.url], raise_notfound=True) - if not items: - raise error.NotFoundError(self.url) - _url, self.data = items[0] - error.assert_(self.data) - error.assert_(len(items) == 1) - return self - - def load_by_multiget(self) -> Self: + def load_by_multiget(self) -> "Self | Coroutine[Any, Any, Self]": """ Some servers do not accept a GET, but we can still do a REPORT with a multiget query """ error.assert_(self.url) - mydata = self.parent._multiget(event_urls=[self.url], raise_notfound=True) - url_data = next(mydata, None) + if not self.parent: + raise error.NotFoundError(f"Could not do a multiget because {self.url} has no parent?") + if self.is_async_client: + return self._async_load_by_multiget() + items = self.parent._multiget(event_urls=[self.url], raise_notfound=True) + return self._post_load_by_multiget(items) + + async def _async_load_by_multiget(self) -> Self: + """Async implementation of load_by_multiget.""" + items = await self.parent._async_multiget(event_urls=[self.url], raise_notfound=True) + return self._post_load_by_multiget(items) + + def _post_load_by_multiget(self, items): + if not items: + raise error.NotFoundError(self.url) + url_data = next(items, None) if url_data is None: ## We shouldn't come here. Something is wrong. ## TODO: research it ## As of 2025-05-20, this code section is used by ## TestForServerECloud::testCreateOverwriteDeleteEvent raise error.NotFoundError(self.url) - url, self.data = url_data + _url, self.data = url_data error.assert_(self.data) - error.assert_(next(mydata, None) is None) + error.assert_(next(items, None) is None) return self ## TODO: self.id should either always be available or never @@ -1023,7 +1122,7 @@ def _find_id_path(self, id=None, path=None) -> None: self.url = URL.objectify(path) - def _put(self, retry_on_failure=True): + def _put(self, retry_on_failure=True) -> "None | Coroutine[Any, Any, None]": ## TODO: quite much overlapping with _async_put, should consolidate ## TODO: this is low-level http-communication - shouldn't it be in the davclient file rather than in calendarobjectresource.py? ## SECURITY TODO: we should probably have a check here to verify that no such object exists already @@ -1033,7 +1132,16 @@ def _put(self, retry_on_failure=True): elif self.etag: headers["if-match"] = self.etag headers |= ICALH + if self.is_async_client: + return self._async_put(headers, retry_on_failure) r = self.client.put(self.url, self.data, headers) + return self._post_put(r, retry_on_failure) + + async def _async_put(self, headers, retry_on_failure=True): + r = await self.client.put(str(self.url), str(self.data), headers | ICALH) + return self._post_put(r, retry_on_failure) + + def _post_put(self, r, retry_on_failure): if r.status == 412: if self.schedule_tag: raise error.ScheduleTagMismatchError(errmsg(r)) @@ -1061,9 +1169,6 @@ def _put(self, retry_on_failure=True): if r.headers and r.headers.get("schedule-tag"): self.props[cdav.ScheduleTag.tag] = r.headers["schedule-tag"] - async def _async_put(self, retry_on_failure=True): - """Async version of _put for async clients.""" - r = await self.client.put(str(self.url), str(self.data), ICALH) if r.status == 302: path = [x[1] for x in r.headers if x[0] == "location"][0] self.url = URL.objectify(path) @@ -1074,8 +1179,9 @@ async def _async_put(self, retry_on_failure=True): except ImportError: retry_on_failure = False if retry_on_failure: - self.get_vobject_instance() - return await self._async_put(False) + ## This seems like a noop, but it may "wash" the object + dummy = self.vobject_instance + return self._put(False) else: raise error.PutError(errmsg(r)) ## TODO: refactor - those code lines are repeated all over the place @@ -1084,15 +1190,18 @@ async def _async_put(self, retry_on_failure=True): if r.headers and r.headers.get("schedule-tag"): self.props[cdav.ScheduleTag.tag] = r.headers["schedule-tag"] - def _create(self, id=None, path=None, retry_on_failure=True) -> None: + def _create( + self, id=None, path=None, retry_on_failure=True + ) -> "None | Coroutine[Any, Any, None]": ## TODO: Find a better method name self._find_id_path(id=id, path=path) - self._put() + if self.is_async_client: + return self._async_create(retry_on_failure) + self._put(retry_on_failure) - async def _async_create(self, id=None, path=None) -> None: - """Async version of _create for async clients.""" - self._find_id_path(id=id, path=path) - await self._async_put() + async def _async_create(self, retry_on_failure=True) -> None: + """Async implementation of _create.""" + await self._put(retry_on_failure) def _generate_url(self): ## See https://github.com/python-caldav/caldav/issues/143 for the rationale behind double-quoting slashes @@ -1169,7 +1278,7 @@ def save( increase_seqno: bool = True, only_this_recurrence: bool = True, all_recurrences: bool = False, - ) -> Self: + ) -> "Self | Coroutine[Any, Any, Self]": """Save the object, can be used for creation and update. no_overwrite and no_create will check if the object exists. @@ -1377,7 +1486,7 @@ async def get_self(): self._maybe_increment_sequence(increase_seqno) path = self.url.path if self.url else None - await self._async_create(id=self.id, path=path) + await self._create(id=self.id, path=path) return self def is_loaded(self): @@ -2096,7 +2205,7 @@ def complete( completion_timestamp: datetime | None = None, handle_rrule: bool = False, rrule_mode: Literal["safe", "this_and_future"] = "safe", - ) -> None: + ) -> "None | Coroutine[Any, Any, None]": """Marks the task as completed. Parameters @@ -2160,7 +2269,7 @@ def is_pending(self, i=None) -> bool | None: ## input data does not conform to the RFC raise AssertionError - def uncomplete(self) -> None: + def uncomplete(self) -> "None | Coroutine[Any, Any, None]": """Undo completion - marks a completed task as not completed""" ### TODO: needs test code for code coverage! ## (it has been tested through the calendar-cli test code) diff --git a/caldav/collection.py b/caldav/collection.py index 44daa662..3e7df983 100644 --- a/caldav/collection.py +++ b/caldav/collection.py @@ -27,7 +27,7 @@ from .davclient import DAVClient from .search import CalDAVSearcher -from collections.abc import Iterable, Iterator, Sequence +from collections.abc import Coroutine, Iterable, Iterator, Sequence from typing import Literal from .base_client import ICALH @@ -65,7 +65,7 @@ def _calendars_from_results(self, results) -> list["Calendar"]: for info in calendar_infos ] - def get_calendars(self) -> list["Calendar"]: + def get_calendars(self) -> "list[Calendar] | Coroutine[Any, Any, list[Calendar]]": """ List all calendar collections in this set. @@ -82,21 +82,21 @@ def get_calendars(self) -> list["Calendar"]: calendars = await calendar_set.get_calendars() """ if self.is_async_client: - return self._async_calendars() + return self._async_get_calendars() response = self.client.propfind( str(self.url), props=self.client.CALENDAR_LIST_PROPS, depth=1 ) return self._calendars_from_results(response.results) - async def _async_calendars(self) -> list["Calendar"]: + async def _async_get_calendars(self) -> list["Calendar"]: """Async implementation of get_calendars().""" response = await self.client.propfind( str(self.url), props=self.client.CALENDAR_LIST_PROPS, depth=1 ) return self._calendars_from_results(response.results) - def calendars(self) -> list["Calendar"]: + def calendars(self) -> "list[Calendar] | Coroutine[Any, Any, list[Calendar]]": """ Deprecated: Use :meth:`get_calendars` instead. @@ -110,7 +110,7 @@ def make_calendar( cal_id: str | None = None, supported_calendar_component_set: Any | None = None, method: str | None = None, - ) -> "Calendar": + ) -> "Calendar | Coroutine[Any, Any, Calendar]": """ Utility method for creating a new calendar. @@ -154,7 +154,7 @@ async def _async_make_calendar( id=cal_id, supported_calendar_component_set=supported_calendar_component_set, ) - return await calendar._async_save(method=method) + return await calendar.save(method=method) def calendar(self, name: str | None = None, cal_id: str | None = None) -> "Calendar": """ @@ -297,7 +297,7 @@ async def create( if url is None: # Async URL discovery - cup = await principal._async_get_property(dav.CurrentUserPrincipal()) + cup = await principal.get_property(dav.CurrentUserPrincipal()) if cup is None: log.warning("calendar server lacking a feature:") log.warning("current-user-principal property not found") @@ -313,7 +313,7 @@ def make_calendar( cal_id: str | None = None, supported_calendar_component_set: Any | None = None, method=None, - ) -> "Calendar": + ) -> "Calendar | Coroutine[Any, Any, Calendar]": """ Convenience method, bypasses the self.calendar_home_set object. See CalendarSet.make_calendar for details. @@ -339,7 +339,7 @@ async def _async_make_calendar( ) -> "Calendar": """Async implementation of make_calendar.""" calendar_home_set = await self._async_get_calendar_home_set() - return await calendar_home_set._async_make_calendar( + return await calendar_home_set.make_calendar( name, cal_id, supported_calendar_component_set=supported_calendar_component_set, @@ -351,7 +351,7 @@ async def _async_get_calendar_home_set(self) -> "CalendarSet": if self._calendar_home_set: return self._calendar_home_set - calendar_home_set_url = await self._async_get_property(cdav.CalendarHomeSet()) + calendar_home_set_url = await self.get_property(cdav.CalendarHomeSet()) if ( calendar_home_set_url is not None and "@" in calendar_home_set_url @@ -383,7 +383,7 @@ def calendar( return Calendar(self.client, url=self.client.url.join(cal_url)) - def get_vcal_address(self) -> "vCalAddress": + def get_vcal_address(self) -> "vCalAddress | Coroutine[Any, Any, vCalAddress]": """ Returns the principal, as an icalendar.vCalAddress object. For async clients, returns a coroutine that must be awaited. @@ -404,7 +404,10 @@ async def _async_get_vcal_address(self) -> "vCalAddress": """Async counterpart of get_vcal_address() for use with AsyncDAVClient.""" from icalendar import vCalAddress, vText - cn = await self.get_display_name() + cn = self.get_display_name() + if not isinstance(cn, str): + cn = await cn + addresses_el = await self.get_property(cdav.CalendarUserAddressSet(), parse_props=False) if addresses_el is None: raise error.NotFoundError("No calendar user addresses given from server") @@ -454,7 +457,7 @@ def calendar_home_set(self, url) -> None: self.client.url = sanitized_url self._calendar_home_set = CalendarSet(self.client, self.client.url.join(sanitized_url)) - def get_calendars(self) -> list["Calendar"]: + def get_calendars(self) -> "list[Calendar] | Coroutine[Any, Any, list[Calendar]]": """ Return the principal's calendars. @@ -470,7 +473,7 @@ def get_calendars(self) -> list["Calendar"]: # Delegate to client for dual-mode support return self.client.get_calendars(self) - def calendars(self) -> list["Calendar"]: + def calendars(self) -> "list[Calendar] | Coroutine[Any, Any, list[Calendar]]": """ Deprecated: Use :meth:`get_calendars` instead. @@ -483,7 +486,9 @@ def calendars(self) -> list["Calendar"]: ## for this. The cruft below for constructing the request should be ## eliminated. Also, the async diversion should happen closer to the ## bottom of the method, reducing the need of duplicating code - def freebusy_request(self, dtstart, dtend, attendees) -> dict[str, FreeBusy]: + def freebusy_request( + self, dtstart, dtend, attendees + ) -> "dict[str, FreeBusy] | Coroutine[Any, Any, dict]": """Sends a freebusy-request for some attendee to the server as per RFC6638. @@ -544,7 +549,7 @@ def calendar_user_address_set(self) -> list[str | None]: addresses.sort(key=lambda x: -int(x.get("preferred", 0))) return [x.text for x in addresses] - def schedule_inbox(self) -> "ScheduleInbox": + def schedule_inbox(self) -> "ScheduleInbox | Coroutine[Any, Any, ScheduleInbox]": """ Returns the schedule inbox, as defined in RFC6638. For async clients, returns a coroutine that must be awaited. @@ -553,7 +558,7 @@ def schedule_inbox(self) -> "ScheduleInbox": return self._async_schedule_inbox() return ScheduleInbox(principal=self) - def schedule_outbox(self) -> "ScheduleOutbox": + def schedule_outbox(self) -> "ScheduleOutbox | Coroutine[Any, Any, ScheduleOutbox]": """ Returns the schedule outbox, as defined in RFC6638. For async clients, returns a coroutine that must be awaited. @@ -605,15 +610,12 @@ def __init__( def _create( self, name=None, id=None, supported_calendar_component_set=None, method=None - ) -> None: + ) -> "None | Coroutine[Any, Any, None]": """ Create a new calendar with display name `name` in `parent`. For async clients, returns a coroutine that must be awaited. """ - if self.is_async_client: - return self._async_create(name, id, supported_calendar_component_set, method) - if id is None: id = str(uuid.uuid4()) self.id = id @@ -640,11 +642,10 @@ def _create( # zimbra gives 500 (!) if body is omitted ... prop = dav.Prop() + display_name = None if name: display_name = dav.DisplayName(name) - prop += [ - display_name, - ] + prop += [display_name] if supported_calendar_component_set: sccs = cdav.SupportedCalendarComponentSet() for scc in supported_calendar_component_set: @@ -654,9 +655,11 @@ def _create( prop += dav.ResourceType() + [dav.Collection(), cdav.Calendar()] set = dav.Set() + prop - mkcol = (dav.Mkcol() if method == "mkcol" else cdav.Mkcalendar()) + set + if self.is_async_client: + return self._async_create(path, mkcol, method, name, display_name) + self._query(root=mkcol, query_method=method, url=path, expected_return_value=201) # COMPATIBILITY ISSUE @@ -678,60 +681,17 @@ def _create( exc_info=True, ) - async def _async_create( - self, name=None, id=None, supported_calendar_component_set=None, method=None - ) -> None: - """Async implementation of _create.""" - if id is None: - id = str(uuid.uuid4()) - self.id = id - - if method is None: - if self.client: - supported = self.client.features.is_supported("create-calendar", return_type=dict) - if supported["support"] not in ("full", "fragile", "quirk"): - raise error.MkcalendarError( - "Creation of calendars (allegedly) not supported on this server" - ) - if supported["support"] == "quirk" and supported["behaviour"] == "mkcol-required": - method = "mkcol" - else: - method = "mkcalendar" - else: - method = "mkcalendar" - - path = self.parent.url.join(id + "/") - self.url = path - - prop = dav.Prop() - if name: - display_name = dav.DisplayName(name) - prop += [ - display_name, - ] - if supported_calendar_component_set: - sccs = cdav.SupportedCalendarComponentSet() - for scc in supported_calendar_component_set: - sccs += cdav.Comp(scc) - prop += sccs - if method == "mkcol": - prop += dav.ResourceType() + [dav.Collection(), cdav.Calendar()] - - set = dav.Set() + prop - - mkcol = (dav.Mkcol() if method == "mkcol" else cdav.Mkcalendar()) + set - - await self._async_query( - root=mkcol, query_method=method, url=path, expected_return_value=201 - ) + async def _async_create(self, path, mkcol, method, name, display_name) -> None: + """Async implementation of _create (call via _create, not directly).""" + await self._query(root=mkcol, query_method=method, url=path, expected_return_value=201) # COMPATIBILITY ISSUE - try to set display name explicitly if name: try: - await self._async_set_properties([display_name]) + await self.set_properties([display_name]) except Exception: try: - current_display_name = await self._async_get_property(dav.DisplayName()) + current_display_name = await self.get_property(dav.DisplayName()) error.assert_(current_display_name == name) except Exception: log.warning( @@ -745,7 +705,7 @@ def delete(self): For async clients, returns a coroutine that must be awaited. """ if self.is_async_client: - return self._async_calendar_delete() + return self._async_delete() ## TODO: remove quirk handling from the functional tests ## TODO: this needs test code @@ -771,7 +731,7 @@ def delete(self): else: super().delete() - async def _async_calendar_delete(self): + async def _async_delete(self): """Async implementation of Calendar.delete().""" import asyncio @@ -782,7 +742,7 @@ async def _async_calendar_delete(self): # Do some retries on deleting the calendar for _ in range(0, 20): try: - await self._async_delete() + await DAVObject._async_delete(self) except error.DeleteError: pass try: @@ -794,9 +754,9 @@ async def _async_calendar_delete(self): if wipe: for obj in await self.search(): - await obj._async_delete() + await obj.delete() else: - await self._async_delete() + await DAVObject._async_delete(self) def _supported_components_from_response(self, response: Any, with_fallback: bool) -> list[Any]: """Extract supported component types from a propfind DAVResponse. @@ -818,7 +778,9 @@ def _supported_components_from_response(self, response: Any, with_fallback: bool rfc_default.append("VJOURNAL") return rfc_default - def get_supported_components(self, with_fallback=True) -> list[Any]: + def get_supported_components( + self, with_fallback=True + ) -> "list[Any] | Coroutine[Any, Any, list[Any]]": """ returns a list of component types supported by the calendar, in string format (typically ['VJOURNAL', 'VTODO', 'VEVENT']) @@ -839,10 +801,12 @@ def get_supported_components(self, with_fallback=True) -> list[Any]: async def _async_get_supported_components(self, with_fallback=True) -> list[Any]: props = [cdav.SupportedCalendarComponentSet()] - response = await self._async_get_properties(props, parse_response_xml=False) + response = await self.get_properties(props, parse_response_xml=False) return self._supported_components_from_response(response, with_fallback) - def save_with_invites(self, ical: str, attendees, **attendeeoptions) -> None: + def save_with_invites( + self, ical: str, attendees, **attendeeoptions + ) -> "CalendarObjectResource | Coroutine[Any, Any, CalendarObjectResource]": """ sends a schedule request to the server. Equivalent with add_event, add_todo, etc, but the attendees will be added to the ical object before sending it to the server. @@ -895,7 +859,7 @@ def add_object( no_overwrite: bool = False, no_create: bool = False, **ical_data, - ) -> "CalendarResourceObject": + ) -> "CalendarObjectResource | Coroutine[Any, Any, CalendarObjectResource]": """Add a new calendar object (event, todo, journal) to the calendar. This method is for adding new content to the calendar. To update @@ -938,10 +902,10 @@ async def _async_add_object_finish(self, o, no_overwrite=False, no_create=False) """Async helper for add_object(): awaits save() then handles reverse relations.""" o = await o.save(no_overwrite=no_overwrite, no_create=no_create) if o.url is not None: - await o._async_handle_reverse_relations(fix=True) + await o._handle_reverse_relations(fix=True) return o - def add_event(self, *largs, **kwargs) -> "Event": + def add_event(self, *largs, **kwargs) -> "Event | Coroutine[Any, Any, Event]": """ Add an event to the calendar. @@ -949,7 +913,7 @@ def add_event(self, *largs, **kwargs) -> "Event": """ return self.add_object(Event, *largs, **kwargs) - def add_todo(self, *largs, **kwargs) -> "Todo": + def add_todo(self, *largs, **kwargs) -> "Todo | Coroutine[Any, Any, Todo]": """ Add a todo/task to the calendar. @@ -957,7 +921,7 @@ def add_todo(self, *largs, **kwargs) -> "Todo": """ return self.add_object(Todo, *largs, **kwargs) - def add_journal(self, *largs, **kwargs) -> "Journal": + def add_journal(self, *largs, **kwargs) -> "Journal | Coroutine[Any, Any, Journal]": """ Add a journal entry to the calendar. @@ -968,7 +932,9 @@ def add_journal(self, *largs, **kwargs) -> "Journal": ## Deprecated aliases - use add_* instead ## These will be removed in a future version - def save_object(self, *largs, **kwargs) -> "CalendarResourceObject": + def save_object( + self, *largs, **kwargs + ) -> "CalendarObjectResource | Coroutine[Any, Any, CalendarObjectResource]": """ Deprecated: Use :meth:`add_object` instead. @@ -977,7 +943,7 @@ def save_object(self, *largs, **kwargs) -> "CalendarResourceObject": """ return self.add_object(*largs, **kwargs) - def save_event(self, *largs, **kwargs) -> "Event": + def save_event(self, *largs, **kwargs) -> "Event | Coroutine[Any, Any, Event]": """ Deprecated: Use :meth:`add_event` instead. @@ -986,7 +952,7 @@ def save_event(self, *largs, **kwargs) -> "Event": """ return self.add_event(*largs, **kwargs) - def save_todo(self, *largs, **kwargs) -> "Todo": + def save_todo(self, *largs, **kwargs) -> "Todo | Coroutine[Any, Any, Todo]": """ Deprecated: Use :meth:`add_todo` instead. @@ -995,7 +961,7 @@ def save_todo(self, *largs, **kwargs) -> "Todo": """ return self.add_todo(*largs, **kwargs) - def save_journal(self, *largs, **kwargs) -> "Journal": + def save_journal(self, *largs, **kwargs) -> "Journal | Coroutine[Any, Any, Journal]": """ Deprecated: Use :meth:`add_journal` instead. @@ -1004,7 +970,7 @@ def save_journal(self, *largs, **kwargs) -> "Journal": """ return self.add_journal(*largs, **kwargs) - def save(self, method=None): + def save(self, method=None) -> "Calendar | Coroutine[Any, Any, Calendar]": """ The save method for a calendar is only used to create it, for now. We know we have to create it when we don't have a url. @@ -1014,23 +980,18 @@ def save(self, method=None): Returns: * self """ - if self.is_async_client: - return self._async_save(method) - if self.url is None: # Get display name from props cache display_name = self.props.get("{DAV:}displayname") + if self.is_async_client: + return self._async_save(display_name, method) + self._create(id=self.id, name=display_name, method=method, **self.extra_init_options) return self - async def _async_save(self, method=None): + async def _async_save(self, display_name, method=None): """Async implementation of save.""" - if self.url is None: - # Get display name from props cache - display_name = self.props.get("{DAV:}displayname") - await self._async_create( - name=display_name, id=self.id, method=method, **self.extra_init_options - ) + await self._create(name=display_name, id=self.id, method=method, **self.extra_init_options) return self # def data2object_class @@ -1084,7 +1045,7 @@ async def _async_multiget( prop = dav.Prop() + cdav.CalendarData() root = cdav.CalendarMultiGet() + prop + [dav.Href(value=u.path) for u in event_urls] - response = await self._async_query(root, None, "report") + response = await self._query(root, None, "report") results = response.expand_simple_props([cdav.CalendarData()]) if raise_notfound: for href in response.statuses: @@ -1174,26 +1135,9 @@ def date_search( ## TODO: this logic has been partly duplicated in calendar_multiget, but ## the code there is much more readable and condensed than this. ## Can code below be refactored? - def _request_report_build_resultlist( - self, xml, comp_class=None, props=None, no_calendardata=False - ): - """ - Takes some input XML, does a report query on a calendar object - and returns the resource objects found. - - For async clients, returns a coroutine that must be awaited. - """ - if self.is_async_client: - return self._async_request_report_build_resultlist( - xml, comp_class, props, no_calendardata - ) - + def _post_request_report_build_resultlist(self, response, comp_class, props_): + """Shared post-processing for _request_report_build_resultlist.""" matches = [] - if props is None: - props_ = [cdav.CalendarData()] - else: - props_ = [cdav.CalendarData()] + props - response = self._query(xml, 1, "report") results = response.expand_simple_props(props_) for r in results: pdata = results[r] @@ -1225,43 +1169,25 @@ def _request_report_build_resultlist( ) return (response, matches) - async def _async_request_report_build_resultlist( + def _request_report_build_resultlist( self, xml, comp_class=None, props=None, no_calendardata=False - ): + ) -> "tuple[Any, list[CalendarObjectResource]] | Coroutine[Any, Any, tuple[Any, list[CalendarObjectResource]]]": + """ + Takes some input XML, does a report query on a calendar object + and returns the resource objects found. + + For async clients, returns a coroutine that must be awaited. + """ + props_ = [cdav.CalendarData()] if props is None else [cdav.CalendarData()] + props + if self.is_async_client: + return self._async_request_report_build_resultlist(xml, comp_class, props_) + response = self._query(xml, 1, "report") + return self._post_request_report_build_resultlist(response, comp_class, props_) + + async def _async_request_report_build_resultlist(self, xml, comp_class, props_): """Async implementation of _request_report_build_resultlist.""" - matches = [] - if props is None: - props_ = [cdav.CalendarData()] - else: - props_ = [cdav.CalendarData()] + props - response = await self._async_query(xml, 1, "report") - results = response.expand_simple_props(props_) - for r in results: - pdata = results[r] - if cdav.CalendarData.tag in pdata: - cdata = pdata.pop(cdav.CalendarData.tag) - comp_class_ = ( - self._calendar_comp_class_by_data(cdata) if comp_class is None else comp_class - ) - else: - cdata = None - if comp_class_ is None: - comp_class_ = CalendarObjectResource - url = URL(r) - if url.hostname is None: - url = quote(r) - if self.url.join(url) == self.url: - continue - matches.append( - comp_class_( - self.client, - url=self.url.join(url), - data=cdata, - parent=self, - props=pdata, - ) - ) - return (response, matches) + response = await self._query(xml, 1, "report") + return self._post_request_report_build_resultlist(response, comp_class, props_) def _populate_searcher(self, my_searcher, searchargs: dict, sort_reverse: bool) -> None: """Populate a CalDAVSearcher from a dict of search keyword arguments. @@ -1335,7 +1261,7 @@ def search( post_filter=None, _hacks=None, **searchargs, - ) -> list[_CC]: + ) -> "list[_CC] | Coroutine[Any, Any, list[_CC]]": """Sends a search request towards the server, processes the results if needed and returns the objects found. @@ -1474,7 +1400,7 @@ def get_todos( sort_keys: Sequence[str] = ("due", "priority"), include_completed: bool = False, sort_key: str | None = None, - ) -> list["Todo"]: + ) -> "list[Todo] | Coroutine[Any, Any, list[Todo]]": """ Fetches a list of todo items (this is a wrapper around search). @@ -1499,7 +1425,7 @@ def get_todos( # delay decorators applied to search() are respected return self.search(todo=True, include_completed=include_completed, sort_keys=sort_keys) - def todos(self, *largs, **kwargs) -> list["Todo"]: + def todos(self, *largs, **kwargs) -> "list[Todo] | Coroutine[Any, Any, list[Todo]]": """ Deprecated: Use :meth:`get_todos` instead. @@ -1554,7 +1480,7 @@ def get_object_by_uid( uid: str, comp_filter: cdav.CompFilter | None = None, comp_class: Optional["CalendarObjectResource"] = None, - ) -> "Event": + ) -> "CalendarObjectResource | Coroutine[Any, Any, CalendarObjectResource]": """ Get one calendar object from the calendar by UID. @@ -1608,7 +1534,9 @@ async def _async_get_object_by_uid( error.assert_(len(items_found) == 1) return items_found[0] - def get_todo_by_uid(self, uid: str) -> "CalendarObjectResource": + def get_todo_by_uid( + self, uid: str + ) -> "CalendarObjectResource | Coroutine[Any, Any, CalendarObjectResource]": """ Get a task/todo from the calendar by UID. @@ -1617,7 +1545,9 @@ def get_todo_by_uid(self, uid: str) -> "CalendarObjectResource": """ return self.get_object_by_uid(uid, comp_class=Todo) - def get_event_by_uid(self, uid: str) -> "CalendarObjectResource": + def get_event_by_uid( + self, uid: str + ) -> "CalendarObjectResource | Coroutine[Any, Any, CalendarObjectResource]": """ Get an event from the calendar by UID. @@ -1626,7 +1556,9 @@ def get_event_by_uid(self, uid: str) -> "CalendarObjectResource": """ return self.get_object_by_uid(uid, comp_class=Event) - def get_journal_by_uid(self, uid: str) -> "CalendarObjectResource": + def get_journal_by_uid( + self, uid: str + ) -> "CalendarObjectResource | Coroutine[Any, Any, CalendarObjectResource]": """ Get a journal entry from the calendar by UID. @@ -1637,7 +1569,9 @@ def get_journal_by_uid(self, uid: str) -> "CalendarObjectResource": ## Deprecated aliases - use get_*_by_uid instead - def object_by_uid(self, *largs, **kwargs) -> "CalendarObjectResource": + def object_by_uid( + self, *largs, **kwargs + ) -> "CalendarObjectResource | Coroutine[Any, Any, CalendarObjectResource]": """ Deprecated: Use :meth:`get_object_by_uid` instead. @@ -1645,7 +1579,9 @@ def object_by_uid(self, *largs, **kwargs) -> "CalendarObjectResource": """ return self.get_object_by_uid(*largs, **kwargs) - def event_by_uid(self, uid: str) -> "CalendarObjectResource": + def event_by_uid( + self, uid: str + ) -> "CalendarObjectResource | Coroutine[Any, Any, CalendarObjectResource]": """ Deprecated: Use :meth:`get_event_by_uid` instead. @@ -1653,7 +1589,9 @@ def event_by_uid(self, uid: str) -> "CalendarObjectResource": """ return self.get_event_by_uid(uid) - def todo_by_uid(self, uid: str) -> "CalendarObjectResource": + def todo_by_uid( + self, uid: str + ) -> "CalendarObjectResource | Coroutine[Any, Any, CalendarObjectResource]": """ Deprecated: Use :meth:`get_todo_by_uid` instead. @@ -1661,7 +1599,9 @@ def todo_by_uid(self, uid: str) -> "CalendarObjectResource": """ return self.get_todo_by_uid(uid) - def journal_by_uid(self, uid: str) -> "CalendarObjectResource": + def journal_by_uid( + self, uid: str + ) -> "CalendarObjectResource | Coroutine[Any, Any, CalendarObjectResource]": """ Deprecated: Use :meth:`get_journal_by_uid` instead. @@ -1672,7 +1612,7 @@ def journal_by_uid(self, uid: str) -> "CalendarObjectResource": # alias for backward compatibility event = event_by_uid - def get_events(self) -> list["Event"]: + def get_events(self) -> "list[Event] | Coroutine[Any, Any, list[Event]]": """ List all events from the calendar. @@ -1692,7 +1632,7 @@ def get_events(self) -> list["Event"]: # delay decorators applied to search() are respected return self.search(comp_class=Event) - def events(self) -> list["Event"]: + def events(self) -> "list[Event] | Coroutine[Any, Any, list[Event]]": """ Deprecated: Use :meth:`get_events` instead. @@ -1730,9 +1670,11 @@ def get_objects_by_sync_token( sync_token: Any | None = None, load_objects: bool = False, disable_fallback: bool = False, - ) -> "SynchronizableCalendarObjectCollection": + ) -> "SynchronizableCalendarObjectCollection | Coroutine[Any, Any, SynchronizableCalendarObjectCollection]": """get_objects_by_sync_token aka get_objects + For async clients, returns a coroutine that must be awaited. + Do a sync-collection report, ref RFC 6578 and https://github.com/python-caldav/caldav/issues/87 @@ -1757,6 +1699,9 @@ def get_objects_by_sync_token( of falling back to retrieving all objects. This is useful for testing whether the server truly supports sync tokens. """ + if self.is_async_client: + return self._async_get_objects_by_sync_token(sync_token, load_objects, disable_fallback) + ## Check if we should attempt to use sync tokens ## (either server supports them, or we haven't checked yet, or this is a fake token) use_sync_token = True @@ -1779,11 +1724,6 @@ def get_objects_by_sync_token( (response, objects) = self._request_report_build_resultlist( root, props=[dav.GetEtag()], no_calendardata=True ) - ## TODO: look more into this, I think sync_token should be directly available through response object - try: - sync_token = response.sync_token - except AttributeError: - sync_token = response.tree.findall(".//" + dav.SyncToken.tag)[0].text ## this is not quite right - the etag we've fetched can already be outdated if load_objects: @@ -1794,7 +1734,7 @@ def get_objects_by_sync_token( ## The object was deleted pass return SynchronizableCalendarObjectCollection( - calendar=self, objects=objects, sync_token=sync_token + calendar=self, objects=objects, sync_token=response.sync_token ) except (error.ReportError, error.DAVError) as e: ## Server doesn't support sync tokens or the sync-collection REPORT failed @@ -1867,7 +1807,9 @@ def get_objects_by_sync_token( calendar=self, objects=all_objects, sync_token=fake_sync_token ) - def objects_by_sync_token(self, *largs, **kwargs) -> "SynchronizableCalendarObjectCollection": + def objects_by_sync_token( + self, *largs, **kwargs + ) -> "SynchronizableCalendarObjectCollection | Coroutine[Any, Any, SynchronizableCalendarObjectCollection]": """ Deprecated: Use :meth:`get_objects_by_sync_token` instead. @@ -1878,7 +1820,89 @@ def objects_by_sync_token(self, *largs, **kwargs) -> "SynchronizableCalendarObje objects = objects_by_sync_token get_objects = get_objects_by_sync_token - def get_journals(self) -> list["Journal"]: + async def _async_get_objects_by_sync_token( + self, + sync_token: Any | None = None, + load_objects: bool = False, + disable_fallback: bool = False, + ) -> "SynchronizableCalendarObjectCollection": + """Async implementation of get_objects_by_sync_token.""" + use_sync_token = True + sync_support = self.client.features.is_supported("sync-token", return_type=dict) + if sync_support.get("support") == "unsupported": + if disable_fallback: + raise error.ReportError("Sync tokens are not supported by the server") + use_sync_token = False + if sync_token and isinstance(sync_token, str) and sync_token.startswith("fake-"): + use_sync_token = False + + if use_sync_token: + try: + cmd = dav.SyncCollection() + token = dav.SyncToken(value=sync_token) + level = dav.SyncLevel(value="1") + props = dav.Prop() + dav.GetEtag() + root = cmd + [level, token, props] + (response, objects) = await self._request_report_build_resultlist( + root, props=[dav.GetEtag()], no_calendardata=True + ) + if load_objects: + for obj in objects: + try: + await obj.load() + except error.NotFoundError: + pass + return SynchronizableCalendarObjectCollection( + calendar=self, objects=objects, sync_token=response.sync_token + ) + except (error.ReportError, error.DAVError) as e: + if disable_fallback: + raise + log.info(f"Sync-collection REPORT failed ({e}), falling back to full retrieval") + + log.debug("Using fallback sync mechanism (retrieving all objects)") + + all_objects = list(await self.search()) + + if load_objects: + for obj in all_objects: + if not hasattr(obj, "_data") or obj._data is None: + try: + await obj.load() + except error.NotFoundError: + pass + + if all_objects and ( + not hasattr(all_objects[0], "props") or dav.GetEtag.tag not in all_objects[0].props + ): + try: + response = await self._query_properties([dav.GetEtag()], depth=1) + etag_props = response.expand_simple_props([dav.GetEtag()]) + url_to_obj = {str(obj.url.canonical()): obj for obj in all_objects} + log.debug(f"Fallback: Fetching ETags for {len(url_to_obj)} objects") + for url_str, props in etag_props.items(): + canonical_url_str = str(self.url.join(url_str).canonical()) + if canonical_url_str in url_to_obj: + if not hasattr(url_to_obj[canonical_url_str], "props"): + url_to_obj[canonical_url_str].props = {} + url_to_obj[canonical_url_str].props.update(props) + log.debug(f"Fallback: Added ETag to {canonical_url_str}") + except Exception as e: + log.debug(f"Failed to fetch ETags for fallback sync: {e}") + + fake_sync_token = self._generate_fake_sync_token(all_objects) + + if sync_token and isinstance(sync_token, str) and sync_token.startswith("fake-"): + if sync_token == fake_sync_token: + return SynchronizableCalendarObjectCollection( + calendar=self, objects=[], sync_token=fake_sync_token + ) + + return SynchronizableCalendarObjectCollection( + calendar=self, objects=all_objects, sync_token=fake_sync_token + ) + + def get_journals(self) -> "list[Journal] | Coroutine[Any, Any, list[Journal]]": """ List all journals from the calendar. @@ -1887,7 +1911,7 @@ def get_journals(self) -> list["Journal"]: """ return self.search(comp_class=Journal) - def journals(self) -> list["Journal"]: + def journals(self) -> "list[Journal] | Coroutine[Any, Any, list[Journal]]": """ Deprecated: Use :meth:`get_journals` instead. @@ -1952,9 +1976,10 @@ def __init__( "principal has no %s. %s" % (str(self.findprop()), error.ERR_FRAGMENT) # type: ignore ) from None - def get_items(self): - """ - Return all items currently in this scheduling mailbox (inbox or outbox). + def get_items(self) -> "list | Coroutine[Any, Any, list]": + """Return all items currently in this scheduling mailbox (inbox or outbox). + + For async clients, returns a coroutine that must be awaited. Unlike regular calendars, schedule mailboxes contain raw iTIP messages (METHOD:REQUEST, METHOD:REPLY, METHOD:CANCEL, …) rather than permanent @@ -1968,13 +1993,16 @@ def get_items(self): a plain PROPFIND depth-1 followed by individual GETs. Both paths return loaded CalendarObjectResource objects. - This method does NOT belong on the Calendar super-class: Calendar exposes - type-specific accessors (get_events, get_todos, …) and uses search() - internally. The mailbox is a different beast — it holds transient, - mixed-type scheduling messages and must use children() as its fallback - because search() / REPORT queries against a mailbox URL are unreliable - across servers. + Claude says that this method does NOT belong on the Calendar + super-class. Calendar exposes type-specific accessors + (get_events, get_todos, …) and uses search() internally. The + mailbox is a different beast — it holds transient, mixed-type + scheduling messages and must use children() as its fallback + because search() / REPORT queries against a mailbox URL are + unreliable across servers. """ + if self.is_async_client: + return self._async_get_items() def _load_from_children(): items = [CalendarObjectResource(url=x[0], client=self.client) for x in self.children()] @@ -1999,6 +2027,34 @@ def _load_from_children(): self._items = _load_from_children() return self._items + async def _async_get_items(self): + """Async implementation of get_items.""" + + async def _load_from_children(): + items = [ + CalendarObjectResource(url=x[0], client=self.client) for x in await self.children() + ] + for x in items: + await x.load() + return items + + if not self._items: + try: + self._items = await self.objects(load_objects=True) + except Exception: + logging.debug( + "sync-collection REPORT not supported on scheduling mailbox %s; " + "falling back to PROPFIND depth-1", + self.url, + ) + self._items = await _load_from_children() + else: + try: + await self._items._async_sync() + except Exception: + self._items = await _load_from_children() + return self._items + # def get_invites(): # for item in self.get_items(): @@ -2044,32 +2100,66 @@ def objects_by_url(self): self._objects_by_url[obj.url.canonical()] = obj return self._objects_by_url - def sync(self) -> tuple[Any, Any]: + def _post_sync_fallback( + self, current_by_url: dict, old_by_url: dict + ) -> tuple[list[Any], list[Any]]: + """Pure post-processing for the fallback sync path (no I/O). + + Compares current server state against cached state to determine what + changed, then updates internal state. """ - This method will contact the caldav server, - request all changes from it, and sync up the collection. + updated_objs: list[Any] = [] + deleted_objs: list[Any] = [] + + for url, obj in current_by_url.items(): + if url in old_by_url: + old_data = old_by_url[url].data if hasattr(old_by_url[url], "data") else None + new_data = obj.data if hasattr(obj, "data") else None + if old_data != new_data and new_data is not None: + updated_objs.append(obj) + else: + updated_objs.append(obj) - This method transparently falls back to comparing full calendar state - if the server doesn't support sync tokens. + for url in old_by_url: + if url not in current_by_url: + deleted_objs.append(old_by_url[url]) + + self.objects = list(current_by_url.values()) + self._objects_by_url = None + self.sync_token = self.calendar._generate_fake_sync_token(self.objects) + return (updated_objs, deleted_objs) + + def sync( + self, + ) -> "tuple[Any, Any] | Coroutine[Any, Any, tuple[Any, Any]]": + """Contact the server, fetch changes, and update the local collection. + + Falls back to comparing the full calendar state when the server does + not support sync tokens. + + Returns a coroutine for async clients; call with ``await`` in that case. """ - updated_objs = [] - deleted_objs = [] + if self.calendar.is_async_client: + return self._async_sync() + + updated_objs: list[Any] = [] + deleted_objs: list[Any] = [] ## Check if we're using fake sync tokens (fallback mode) is_fake_token = isinstance(self.sync_token, str) and self.sync_token.startswith("fake-") if not is_fake_token: - ## Try to use real sync tokens + ## Try to use real sync tokens. + ## NOTE: the loop below mixes I/O (obj.load()) with data manipulation; + ## any changes here must be mirrored in _async_sync(). try: updates = self.calendar.get_objects_by_sync_token( self.sync_token, load_objects=False ) - ## If we got a fake token back, we've fallen back if isinstance(updates.sync_token, str) and updates.sync_token.startswith("fake-"): is_fake_token = True else: - ## Real sync token path obu = self.objects_by_url() for obj in updates: obj.url = obj.url.canonical() @@ -2089,52 +2179,75 @@ def sync(self) -> tuple[Any, Any]: obu.pop(obj.url) self.objects = list(obu.values()) - self._objects_by_url = None ## Invalidate cache + self._objects_by_url = None self.sync_token = updates.sync_token return (updated_objs, deleted_objs) except (error.ReportError, error.DAVError): - ## Sync failed, fall back is_fake_token = True - if is_fake_token: - ## FALLBACK: Compare full calendar state - log.debug("Using fallback sync mechanism (comparing all objects)") + ## FALLBACK: fetch all objects and compare + log.debug("Using fallback sync mechanism (comparing all objects)") + current_objects = list(self.calendar.search()) + for obj in current_objects: + try: + obj.load() + except error.NotFoundError: + pass + current_by_url = {obj.url.canonical(): obj for obj in current_objects} + return self._post_sync_fallback(current_by_url, self.objects_by_url()) - ## Retrieve all current objects from server - current_objects = list(self.calendar.search()) + async def _async_sync(self) -> tuple[Any, Any]: + """Async implementation of sync(). - ## Load them - for obj in current_objects: - try: - obj.load() - except error.NotFoundError: - pass + NOTE: the real-token loop mixes I/O (await obj.load()) with data + manipulation; any changes here must be mirrored in sync(). + """ + updated_objs: list[Any] = [] + deleted_objs: list[Any] = [] - ## Build URL-indexed dicts for comparison - current_by_url = {obj.url.canonical(): obj for obj in current_objects} - old_by_url = self.objects_by_url() - - ## Find updated and new objects - for url, obj in current_by_url.items(): - if url in old_by_url: - ## Object exists in both - check if modified - ## Compare data if available, otherwise consider it unchanged - old_data = old_by_url[url].data if hasattr(old_by_url[url], "data") else None - new_data = obj.data if hasattr(obj, "data") else None - if old_data != new_data and new_data is not None: - updated_objs.append(obj) - else: - ## New object - updated_objs.append(obj) + is_fake_token = isinstance(self.sync_token, str) and self.sync_token.startswith("fake-") - ## Find deleted objects - for url in old_by_url: - if url not in current_by_url: - deleted_objs.append(old_by_url[url]) + if not is_fake_token: + try: + updates = await self.calendar.get_objects_by_sync_token( + self.sync_token, load_objects=False + ) - ## Update internal state - self.objects = list(current_by_url.values()) - self._objects_by_url = None ## Invalidate cache - self.sync_token = self.calendar._generate_fake_sync_token(self.objects) + if isinstance(updates.sync_token, str) and updates.sync_token.startswith("fake-"): + is_fake_token = True + else: + obu = self.objects_by_url() + for obj in updates: + obj.url = obj.url.canonical() + if ( + obj.url in obu + and dav.GetEtag.tag in obu[obj.url].props + and dav.GetEtag.tag in obj.props + ): + if obu[obj.url].props[dav.GetEtag.tag] == obj.props[dav.GetEtag.tag]: + continue + obu[obj.url] = obj + try: + await obj.load() + updated_objs.append(obj) + except error.NotFoundError: + deleted_objs.append(obj) + obu.pop(obj.url) - return (updated_objs, deleted_objs) + self.objects = list(obu.values()) + self._objects_by_url = None + self.sync_token = updates.sync_token + return (updated_objs, deleted_objs) + except (error.ReportError, error.DAVError): + is_fake_token = True + + ## FALLBACK: fetch all objects and compare + log.debug("Using fallback sync mechanism (comparing all objects)") + current_objects = list(await self.calendar.search()) + for obj in current_objects: + try: + await obj.load() + except error.NotFoundError: + pass + current_by_url = {obj.url.canonical(): obj for obj in current_objects} + return self._post_sync_fallback(current_by_url, self.objects_by_url()) diff --git a/caldav/compatibility_hints.py b/caldav/compatibility_hints.py index 80819102..3f730e63 100644 --- a/caldav/compatibility_hints.py +++ b/caldav/compatibility_hints.py @@ -900,9 +900,6 @@ def dotted_feature_set_list(self, compact=False): } xandikos = { - ## We've sometimes been observing internal server errors on freebusy-requests. - ## Should do more research on it next time it shows up. - ## Principal property search returns 403 (not implemented) "principal-search": "ungraceful", @@ -914,6 +911,7 @@ def dotted_feature_set_list(self, compact=False): ## 500 Internal Server Error (OverflowError: date value out of range in icalendar.py ## _expand_rrule_component when computing adjusted_start = start - duration). "search.time-range.open.start": {"support": "ungraceful", "behaviour": "500 Internal Server Error (OverflowError in rrule expansion)"}, + "search.time-range.open.start.duration": True, ## this only applies for very simple installations "auto-connect.url": {"domain": "localhost", "scheme": "http", "basepath": "/"}, @@ -933,7 +931,6 @@ def dotted_feature_set_list(self, compact=False): "principal-search": {"support": "unsupported"}, ## this only applies for very simple installations "auto-connect.url": {"domain": "localhost", "scheme": "http", "basepath": "/"}, - ## freebusy is not supported yet, but on the long-term road map "scheduling": {"support": "unsupported"}, 'old_flags': [ ## extra features not specified in RFC4791 @@ -969,8 +966,7 @@ def dotted_feature_set_list(self, compact=False): 'search.combined-is-logical-and': False, ## Observed with Nextcloud 33: server delivers iTIP notification to the inbox AND ## auto-schedules into the attendee's calendar. - "scheduling.mailbox.inbox-delivery": True, - "scheduling.auto-schedule": True, + 'scheduling.schedule-tag': False, } ## TODO: Latest - mismatch between config and test script in delete-calendar.free-namespace ... and create-calendar.set-displayname? @@ -1017,9 +1013,9 @@ def dotted_feature_set_list(self, compact=False): 'principal-search': "unsupported", ## Zimbra implements server-side automatic scheduling: invitations are ## auto-processed into the attendee's calendar; no iTIP notification appears in the inbox. - "scheduling.mailbox": True, - "scheduling.mailbox.inbox-delivery": False, - "scheduling.auto-schedule": True, + ## TODO: auto-scheduling did not work in the last test? Check more around it + #"scheduling.mailbox.inbox-delivery": False, + "scheduling.schedule-tag": False, 'save-load.icalendar.related-to': {'support': 'unsupported'}, 'search.time-range.open.start': {'support': 'broken'}, @@ -1046,6 +1042,12 @@ def dotted_feature_set_list(self, compact=False): bedework = { ## If tests are yielding unexpected results, try to increase this: 'search-cache': {'behaviour': 'delay', 'delay': 3}, + 'scheduling.auto-schedule': {'support': 'unknown'}, + 'scheduling.calendar-user-address-set': {'support': 'full'}, + 'scheduling.freebusy-query': {'support': 'full'}, + 'scheduling.mailbox': {'support': 'full'}, + 'scheduling.mailbox.inbox-delivery': {'support': 'unsupported'}, + 'scheduling.schedule-tag': {'support': 'full'}, 'test-calendar': {'cleanup-regime': 'wipe-calendar'}, 'auto-connect.url': {'basepath': '/ucaldav/'}, @@ -1073,7 +1075,8 @@ def dotted_feature_set_list(self, compact=False): #"search.unlimited-time-range": {"support": "broken"}, ## Bedework uses a pre-built Docker image with no easy way to add users, so ## cross-user scheduling tests cannot be run; inbox-delivery behaviour is unknown. - "scheduling.mailbox": {"support": "unknown"}, + ## (not expected to be working though) + "scheduling": {"support": "unknown"}, ## TODO: play with this and see if it's needed 'save-load.icalendar.related-to': {'support': 'broken', 'behaviour': 'first RELATED-TO line is preserved but subsequent RELATED-TO lines are stripped'}, @@ -1102,9 +1105,7 @@ def dotted_feature_set_list(self, compact=False): baikal = { ## version 0.10.1 # Baikal (sabre/dav) delivers iTIP notifications to the attendee inbox AND auto-schedules # into their calendar. - "scheduling.mailbox.inbox-delivery": True, - "scheduling.auto-schedule": True, - "scheduling.mailbox": True, + "scheduling.schedule-tag": False, "http.multiplexing": "fragile", ## ref https://github.com/python-caldav/caldav/issues/564 'search.comp-type.optional': {'support': 'ungraceful'}, 'search.recurrences.expanded.todo': {'support': 'unsupported'}, @@ -1151,9 +1152,6 @@ def dotted_feature_set_list(self, compact=False): # Cyrus implements server-side automatic scheduling: for cross-user invites, # the server both auto-processes the invite into the attendee's calendar # AND delivers an iTIP notification copy to the attendee's schedule-inbox. - "scheduling.mailbox": True, - "scheduling.mailbox.inbox-delivery": True, - "scheduling.auto-schedule": True, } ## See comments on https://github.com/python-caldav/caldav/issues/3 @@ -1175,9 +1173,7 @@ def dotted_feature_set_list(self, compact=False): "http.multiplexing": { "support": "unsupported" }, # DAViCal delivers iTIP notifications to the attendee inbox AND auto-schedules # into their calendar. - "scheduling.mailbox": True, - "scheduling.mailbox.inbox-delivery": True, - "scheduling.auto-schedule": True, + "scheduling.schedule-tag": False, "search.comp-type.optional": { "support": "fragile" }, "search.recurrences.expanded.exception": { "support": "unsupported" }, "search.time-range.alarm": { "support": "unsupported" }, @@ -1196,8 +1192,8 @@ def dotted_feature_set_list(self, compact=False): } sogo = { - ## scheduling.mailbox.inbox-delivery behaviour unknown until cross-user scheduling tests run - "scheduling.mailbox.inbox-delivery": {"support": "unknown"}, + "scheduling.schedule-tag": False, + "scheduling.mailbox.inbox-delivery": False, ## I'm surprised, I'm quite sure this was passing earlier. reported unsupported with caldav commit a98d50490b872e9b9d8e93e2e401c936ad193003, caldav server checker commit 3cae24cf99da1702b851b5a74a9b88c8e5317dad 2026-02-15 "search.text.category": False, "search.time-range.event.old-dates": False, @@ -1357,9 +1353,7 @@ def dotted_feature_set_list(self, compact=False): davis = { # Davis uses sabre/dav (same backend as Baikal): delivers iTIP notifications to the # attendee inbox AND auto-schedules into their calendar. - "scheduling.mailbox": True, - "scheduling.mailbox.inbox-delivery": True, - "scheduling.auto-schedule": True, + "scheduling.schedule-tag": False, "search.recurrences.expanded.todo": {"support": "unsupported"}, "search.recurrences.expanded.exception": {"support": "unsupported"}, "search.recurrences.includes-implicit.todo": {"support": "unsupported"}, @@ -1384,6 +1378,7 @@ def dotted_feature_set_list(self, compact=False): "scheduling.freebusy-query": {"support": "ungraceful"}, "scheduling.mailbox.inbox-delivery": True, "scheduling.auto-schedule": True, + "scheduling.schedule-tag.stable-partstat": {"support": "unsupported"}, "save-load.journal": {"support": "unsupported"}, "save-load.todo.mixed-calendar": {"support": "unsupported"}, # CCS enforces unique UIDs across ALL calendars for a user diff --git a/caldav/davclient.py b/caldav/davclient.py index c6dfaf39..a8161b07 100644 --- a/caldav/davclient.py +++ b/caldav/davclient.py @@ -50,7 +50,7 @@ from caldav.lib.python_utilities import to_wire from caldav.lib.url import URL from caldav.requests import HTTPBearerAuth -from caldav.response import BaseDAVResponse +from caldav.response import DAVResponse log = logging.getLogger("caldav") @@ -155,28 +155,6 @@ def _auto_url( return (url, None) -class DAVResponse(BaseDAVResponse): - """ - This class is a response from a DAV request. It is instantiated from - the DAVClient class. End users of the library should not need to - know anything about this class. Since we often get XML responses, - it tries to parse it into `self.tree` - """ - - # Protocol-layer parsed results (new interface, replaces find_objects_and_props()) - results: list | None = None - sync_token: str | None = None - - def __init__( - self, - response: Response, - davclient: Optional["DAVClient"] = None, - ) -> None: - self._init_from_response(response, davclient) - - # Response parsing methods are inherited from BaseDAVResponse - - class DAVClient(BaseDAVClient): """ Basic client for webdav, uses the niquests lib; gives access to @@ -451,19 +429,6 @@ def principal(self, *largs, **kwargs): self._principal = Principal(*largs, client=self, **kwargs) return self._principal - def calendar(self, **kwargs): - """Returns a calendar object. - - Typically, a URL should be given as a named parameter (url) - - No network traffic will be initiated by this method. - - If you don't know the URL of the calendar, use - client.principal().calendar(...) instead, or - client.principal().get_calendars() - """ - return Calendar(client=self, **kwargs) - # ==================== High-Level Methods ==================== # These methods mirror the async API for consistency. diff --git a/caldav/davobject.py b/caldav/davobject.py index a9df2c45..98a64102 100644 --- a/caldav/davobject.py +++ b/caldav/davobject.py @@ -9,7 +9,7 @@ if TYPE_CHECKING: from .davclient import DAVClient -from collections.abc import Sequence +from collections.abc import Coroutine, Sequence if sys.version_info < (3, 11): from typing_extensions import Self @@ -110,7 +110,9 @@ def is_async_client(self) -> bool: # Use string check to avoid circular imports return type(self.client).__name__ == "AsyncDAVClient" - def children(self, type: str | None = None) -> list[tuple[URL, Any, Any]]: + def children( + self, type: str | None = None + ) -> "list[tuple[URL, Any, Any]] | Coroutine[Any, Any, list[tuple[URL, Any, Any]]]": """List children, using a propfind (resourcetype) on the parent object, at depth = 1. @@ -126,8 +128,6 @@ def children(self, type: str | None = None) -> list[tuple[URL, Any, Any]]: ## Late import to avoid circular imports from .collection import CalendarSet - c = [] - depth = 1 if self.url is None: @@ -136,14 +136,26 @@ def children(self, type: str | None = None) -> list[tuple[URL, Any, Any]]: props = [dav.DisplayName()] multiprops = [dav.ResourceType()] props_multiprops = props + multiprops + + if self.is_async_client: + return self._async_children(type, props_multiprops, depth) + response = self._query_properties(props_multiprops, depth) + return self._children_post_process(type, response) + + def _children_post_process(self, type_, response): + from .collection import CalendarSet + + c = [] + props = [dav.DisplayName()] + multiprops = [dav.ResourceType()] properties = response.expand_simple_props(props=props, multi_value_props=multiprops) for path in properties: resource_types = properties[path][dav.ResourceType.tag] resource_name = properties[path][dav.DisplayName.tag] - if type is None or type in resource_types: + if type_ is None or type_ in resource_types: url = URL(path) if url.hostname is None: # Quote when path is not a full URL @@ -154,7 +166,7 @@ def children(self, type: str | None = None) -> list[tuple[URL, Any, Any]]: # And why is the strip_trailing_slash-method needed? # The collection URL should always end with a slash according # to RFC 2518, section 5.2. - if (isinstance(self, CalendarSet) and type == cdav.Calendar.tag) or ( + if (isinstance(self, CalendarSet) and type_ == cdav.Calendar.tag) or ( self.url.canonical().strip_trailing_slash() != self.url.join(path).canonical().strip_trailing_slash() ): @@ -164,6 +176,11 @@ def children(self, type: str | None = None) -> list[tuple[URL, Any, Any]]: ## the properties we've already fetched return c + ## TODO: get the typehints correct + async def _async_children(self, type, props_multiprops, depth): + response = await self._query_properties(props_multiprops, depth) + return self._children_post_process(type, response) + def _build_xml_body(self, root) -> bytes | str: """Serialize a DAV element (or raw bytes/str) to a request body.""" if root: @@ -184,7 +201,9 @@ def _build_propfind_root(self, props): return dav.Propfind() + prop return None - def _query_properties(self, props: Sequence[BaseElement] | None = None, depth: int = 0): + def _query_properties( + self, props: Sequence[BaseElement] | None = None, depth: int = 0 + ) -> "Any | Coroutine[Any, Any, Any]": """ This is an internal method for doing a propfind query. It's a result of code-refactoring work, attempting to consolidate @@ -201,7 +220,7 @@ async def _async_query_properties( self, props: Sequence[BaseElement] | None = None, depth: int = 0 ): """Async implementation of _query_properties.""" - return await self._async_query(self._build_propfind_root(props), depth) + return await self._query(self._build_propfind_root(props), depth) def _query( self, @@ -210,7 +229,7 @@ def _query( query_method="propfind", url=None, expected_return_value=None, - ): + ) -> "Any | Coroutine[Any, Any, Any]": """ This is an internal method for doing a query. It's a result of code-refactoring work, attempting to consolidate @@ -261,15 +280,13 @@ async def _async_query( body = to_wire(body) if ret.status == 500 and b"D:getetag" not in body and b" str | None: + ) -> "str | None | Coroutine[Any, Any, str | None]": """ Wrapper for the :class:`get_properties`, when only one property is wanted @@ -278,29 +295,21 @@ def get_property( prop: the property to search for use_cached: don't send anything to the server if we've asked before + TODO: use_cached default is False, should probably be True? + Other parameters are sent directly to the :class:`get_properties` method For async clients, returns a coroutine that must be awaited. """ + if use_cached and prop.tag in self.props: + return self.client._value_or_coroutine(self.props[prop.tag]) if self.is_async_client: - return self._async_get_property(prop, use_cached, **passthrough) - - ## TODO: use_cached should probably be true - if use_cached: - if prop.tag in self.props: - return self.props[prop.tag] - foo = self.get_properties([prop], **passthrough) - return foo.get(prop.tag, None) + return self._async_get_property(prop, **passthrough) + return self.get_properties([prop], **passthrough).get(prop.tag, None) - async def _async_get_property( - self, prop: BaseElement, use_cached: bool = False, **passthrough - ) -> str | None: + async def _async_get_property(self, prop: BaseElement, **passthrough) -> str | None: """Async implementation of get_property.""" - if use_cached: - if prop.tag in self.props: - return self.props[prop.tag] - foo = await self._async_get_properties([prop], **passthrough) - return foo.get(prop.tag, None) + return (await self.get_properties([prop], **passthrough)).get(prop.tag, None) def _resolve_properties(self, properties: dict) -> dict: """Resolve the correct property dict from a PROPFIND response. @@ -366,7 +375,7 @@ def get_properties( depth: int = 0, parse_response_xml: bool = True, parse_props: bool = True, - ): + ) -> "dict | Any | Coroutine[Any, Any, dict | Any]": """Get properties (PROPFIND) for this object. With parse_response_xml and parse_props set to True a @@ -390,6 +399,10 @@ def get_properties( return self._async_get_properties(props, depth, parse_response_xml, parse_props) response = self._query_properties(props, depth) + return self._post_get_properties(response, props, parse_response_xml, parse_props) + + def _post_get_properties(self, response, props, parse_response_xml, parse_props): + """Shared post-processing for get_properties and _async_get_properties.""" if not parse_response_xml: return response @@ -415,7 +428,6 @@ def get_properties( properties = response.expand_simple_props(props) error.assert_(properties) - return self._resolve_properties(properties) async def _async_get_properties( @@ -426,36 +438,10 @@ async def _async_get_properties( parse_props: bool = True, ): """Async implementation of get_properties.""" - response = await self._async_query_properties(props, depth) - if not parse_response_xml: - return response + response = await self._query_properties(props, depth) + return self._post_get_properties(response, props, parse_response_xml, parse_props) - # Use protocol layer results when available and parse_props=True - if parse_props and response.results: - # Convert results to the expected {href: {tag: value}} format - properties = {} - for result in response.results: - # Start with None for all requested props (for backward compat) - result_props = {} - if props: - for prop in props: - if prop.tag: - result_props[prop.tag] = None - # Then overlay with actual values from server - result_props.update(result.properties) - properties[result.href] = result_props - elif not parse_props: - # Caller wants raw XML elements - use internal method - properties = response._find_objects_and_props() - else: - # Fallback to expand_simple_props for mocked responses - properties = response.expand_simple_props(props) - - error.assert_(properties) - - return self._resolve_properties(properties) - - def set_properties(self, props: Any | None = None) -> Self: + def set_properties(self, props: Any | None = None) -> "Self | Coroutine[Any, Any, Self]": """ Set properties (PROPPATCH) for this object. @@ -466,9 +452,6 @@ def set_properties(self, props: Any | None = None) -> Self: Returns: * self """ - if self.is_async_client: - return self._async_set_properties(props) - props = [] if props is None else props prop = dav.Prop() + props set_elem = dav.Set() + prop @@ -480,32 +463,19 @@ def set_properties(self, props: Any | None = None) -> Self: if self.client is None: raise ValueError("Unexpected value None for self.client") - r = self.client.proppatch(str(self.url), body) + if self.is_async_client: + return self._async_set_properties(body) + return self._post_set_properties(self.client.proppatch(str(self.url), body)) + + def _post_set_properties(self, r) -> Self: if r.status >= 400: raise error.PropsetError(errmsg(r)) - return self - async def _async_set_properties(self, props: Any | None = None) -> Self: + async def _async_set_properties(self, body) -> Self: """Async implementation of set_properties.""" - props = [] if props is None else props - prop = dav.Prop() + props - set_elem = dav.Set() + prop - root = dav.PropertyUpdate() + set_elem - body = self._build_xml_body(root) - - if self.url is None: - raise ValueError("Unexpected value None for self.url") - if self.client is None: - raise ValueError("Unexpected value None for self.client") - - r = await self.client.proppatch(str(self.url), body) - - if r.status >= 400: - raise error.PropsetError(errmsg(r)) - - return self + return self._post_set_properties(await self.client.proppatch(str(self.url), body)) def save(self) -> Self: """ @@ -517,7 +487,7 @@ def save(self) -> Self: """ raise NotImplementedError() - def delete(self) -> None: + def delete(self) -> "None | Coroutine[Any, Any, None]": """ Delete the object. @@ -530,28 +500,24 @@ def delete(self) -> None: Example (async): await obj.delete() """ - if self.url is not None: - if self.client is None: - raise ValueError("Unexpected value None for self.client") - - # Delegate to client for dual-mode support - if self.is_async_client: - return self._async_delete() - - r = self.client.delete(str(self.url)) + if self.url is None: + return + if self.client is None: + raise ValueError("Unexpected value None for self.client") + if self.is_async_client: + return self._async_delete() + # TODO: find out why we get 404 + self._post_delete(self.client.delete(str(self.url))) - # TODO: find out why we get 404 - if r.status not in (200, 204, 404): - raise error.DeleteError(errmsg(r)) + def _post_delete(self, r) -> None: + if r.status not in (200, 204, 404): + raise error.DeleteError(errmsg(r)) async def _async_delete(self) -> None: """Async implementation of delete.""" - if self.url is not None and self.client is not None: - r = await self.client.delete(str(self.url)) - if r.status not in (200, 204, 404): - raise error.DeleteError(errmsg(r)) + self._post_delete(await self.client.delete(str(self.url))) - def get_display_name(self): + def get_display_name(self) -> "str | None | Coroutine[Any, Any, str | None]": """ Get display name (calendar, principal, ...more?) """ diff --git a/caldav/response.py b/caldav/response.py index b3152ddb..56923f5f 100644 --- a/caldav/response.py +++ b/caldav/response.py @@ -18,13 +18,6 @@ from caldav.elements.base import BaseElement from caldav.lib import error from caldav.lib.python_utilities import to_normal_str -from caldav.protocol.xml_parsers import ( - _normalize_href, - _validate_status, -) -from caldav.protocol.xml_parsers import ( - _strip_to_multistatus as _proto_strip, -) if TYPE_CHECKING: # Protocol for HTTP response objects (works with httpx, niquests, requests) @@ -34,7 +27,320 @@ log = logging.getLogger(__name__) -class BaseDAVResponse: +# --------------------------------------------------------------------------- +# Result dataclasses (previously in protocol/types.py) +# --------------------------------------------------------------------------- + + +@dataclass +class PropfindResult: + """Parsed result of a PROPFIND request for a single resource.""" + + href: str + properties: dict[str, Any] = field(default_factory=dict) + status: int = 200 + + +@dataclass +class CalendarQueryResult: + """Parsed result of a calendar-query or calendar-multiget REPORT for a single object.""" + + href: str + etag: str | None = None + calendar_data: str | None = None + status: int = 200 + + +@dataclass +class SyncCollectionResult: + """Parsed result of a sync-collection REPORT.""" + + changed: list[CalendarQueryResult] = field(default_factory=list) + deleted: list[str] = field(default_factory=list) + sync_token: str | None = None + + +@dataclass +class MultistatusResponse: + """Parsed multi-status (207) response containing multiple PropfindResults.""" + + responses: list[PropfindResult] = field(default_factory=list) + sync_token: str | None = None + + +# --------------------------------------------------------------------------- +# XML parse helpers (previously in protocol/xml_parsers.py) +# --------------------------------------------------------------------------- + + +def _normalize_href(text: str) -> str: + """Normalize an href string from a DAV response element. + + Handles the Confluence double-encoding bug (%2540 → %40) and converts + absolute URLs to path-only strings so callers always work with paths. + """ + # Fix for https://github.com/python-caldav/caldav/issues/471 + if "%2540" in text: + text = text.replace("%2540", "%40") + href = unquote(text) + # Ref https://github.com/python-caldav/caldav/issues/435 + if ":" in href: + href = unquote(URL(href).path) + return href + + +def _validate_status(status: str | None) -> None: + """Validate a status string like "HTTP/1.1 404 Not Found". + + 200, 201, 207 and 404 are considered acceptable statuses. + """ + if status is None: + return + if not any(code in status for code in (" 200 ", " 201 ", " 207 ", " 404 ")): + raise error.ResponseError(status) + + +def _status_to_code(status: str | None) -> int: + """Extract integer status code from a status string like "HTTP/1.1 200 OK".""" + if not status: + return 200 + parts = status.split() + if len(parts) >= 2: + try: + return int(parts[1]) + except ValueError: + pass + return 200 + + +def _strip_to_multistatus(tree: _Element) -> "_Element | list[_Element]": + """Strip outer elements to reach the multistatus response children. + + The general format is + but sometimes the multistatus and/or xml wrapper is absent. + """ + if tree.tag == "xml" and len(tree) > 0 and tree[0].tag == dav.MultiStatus.tag: + return tree[0] + if tree.tag == dav.MultiStatus.tag: + return tree + return [tree] + + +## TODO: _parse_response_element is a simplified version of DAVResponse._parse_response +## (which adds assertions and handles Stalwart/purelymail quirks). The module-level parse +## functions (_parse_multistatus etc.) use this simpler version because they are pure +## functions with no access to a response instance. If the parse pipeline were refactored +## to work through the tree already stored on self (avoiding the re-parse in _raw_bytes), +## both of these could be unified into a single method. +def _parse_response_element( + response: _Element, +) -> "tuple[str, list[_Element], str | None]": + """Parse a single DAV:response element into (href, propstats, status).""" + status: str | None = None + href: str | None = None + propstats: list[_Element] = [] + for elem in response: + if elem.tag == dav.Status.tag: + status = elem.text + _validate_status(status) + elif elem.tag == dav.Href.tag: + href = _normalize_href(elem.text or "") + elif elem.tag == dav.PropStat.tag: + propstats.append(elem) + return (href or "", propstats, status) + + +def _extract_properties(propstats: "list[_Element]") -> "dict[str, Any]": + """Extract properties from propstat elements into a flat dict.""" + properties: dict[str, Any] = {} + for propstat in propstats: + status_elem = propstat.find(dav.Status.tag) + if status_elem is not None and status_elem.text and " 404 " in status_elem.text: + continue + prop = propstat.find(dav.Prop.tag) + if prop is None: + continue + for child in prop: + if len(child) == 0: + properties[child.tag] = child.text + else: + properties[child.tag] = _element_to_value(child) + return properties + + +def _element_to_value(elem: _Element) -> Any: + """Convert a complex XML element to a Python value.""" + if len(elem) == 0: + return elem.text + + tag = elem.tag + + if tag == cdav.SupportedCalendarComponentSet.tag: + return [child.get("name") for child in elem if child.get("name")] + + if tag == cdav.CalendarUserAddressSet.tag: + return [child.text for child in elem if child.tag == dav.Href.tag and child.text] + + if tag == cdav.CalendarHomeSet.tag: + hrefs = [child.text for child in elem if child.tag == dav.Href.tag and child.text] + return hrefs[0] if len(hrefs) == 1 else hrefs + + if tag == dav.ResourceType.tag: + return [child.tag for child in elem] + + if tag == dav.CurrentUserPrincipal.tag: + for child in elem: + if child.tag == dav.Href.tag and child.text: + return child.text + return None + + children_texts = [] + for child in elem: + if child.text: + children_texts.append(child.text) + elif child.get("name"): + children_texts.append(child.get("name")) + elif len(child) == 0: + children_texts.append(child.tag) + + if len(children_texts) == 1: + return children_texts[0] + elif children_texts: + return children_texts + + return elem + + +def _parse_multistatus(body: bytes, huge_tree: bool = False) -> MultistatusResponse: + """Parse a 207 Multi-Status response body into a MultistatusResponse.""" + parser = etree.XMLParser(huge_tree=huge_tree) + tree = etree.fromstring(body, parser) + + responses: list[PropfindResult] = [] + sync_token: str | None = None + + for elem in _strip_to_multistatus(tree): + if elem.tag == dav.SyncToken.tag: + sync_token = elem.text + continue + if elem.tag != dav.Response.tag: + continue + href, propstats, status = _parse_response_element(elem) + properties = _extract_properties(propstats) + responses.append( + PropfindResult( + href=href, + properties=properties, + status=_status_to_code(status) if status else 200, + ) + ) + + return MultistatusResponse(responses=responses, sync_token=sync_token) + + +def _parse_propfind_response( + body: bytes, status_code: int = 207, huge_tree: bool = False +) -> list[PropfindResult]: + """Parse a PROPFIND response body into a list of PropfindResult objects.""" + if status_code == 404: + return [] + if status_code not in (200, 207): + raise error.ResponseError(f"PROPFIND failed with status {status_code}") + if not body: + return [] + return _parse_multistatus(body, huge_tree=huge_tree).responses + + +def _parse_calendar_query_response( + body: bytes, status_code: int = 207, huge_tree: bool = False +) -> list[CalendarQueryResult]: + """Parse a calendar-query or calendar-multiget REPORT response.""" + if status_code not in (200, 207): + raise error.ResponseError(f"REPORT failed with status {status_code}") + if not body: + return [] + + parser = etree.XMLParser(huge_tree=huge_tree) + tree = etree.fromstring(body, parser) + results: list[CalendarQueryResult] = [] + + for elem in _strip_to_multistatus(tree): + if elem.tag != dav.Response.tag: + continue + href, propstats, status = _parse_response_element(elem) + calendar_data: str | None = None + etag: str | None = None + for propstat in propstats: + prop = propstat.find(dav.Prop.tag) + if prop is None: + continue + for child in prop: + if child.tag == cdav.CalendarData.tag: + calendar_data = child.text + elif child.tag == dav.GetEtag.tag: + etag = child.text + results.append( + CalendarQueryResult( + href=href, + etag=etag, + calendar_data=calendar_data, + status=_status_to_code(status) if status else 200, + ) + ) + + return results + + +def _parse_sync_collection_response( + body: bytes, status_code: int = 207, huge_tree: bool = False +) -> SyncCollectionResult: + """Parse a sync-collection REPORT response.""" + if status_code not in (200, 207): + raise error.ResponseError(f"sync-collection failed with status {status_code}") + if not body: + return SyncCollectionResult() + + parser = etree.XMLParser(huge_tree=huge_tree) + tree = etree.fromstring(body, parser) + changed: list[CalendarQueryResult] = [] + deleted: list[str] = [] + sync_token: str | None = None + + for elem in _strip_to_multistatus(tree): + if elem.tag == dav.SyncToken.tag: + sync_token = elem.text + continue + if elem.tag != dav.Response.tag: + continue + href, propstats, status = _parse_response_element(elem) + status_code_elem = _status_to_code(status) if status else 200 + if status_code_elem == 404: + deleted.append(href) + continue + calendar_data = None + etag = None + for propstat in propstats: + prop = propstat.find(dav.Prop.tag) + if prop is None: + continue + for child in prop: + if child.tag == cdav.CalendarData.tag: + calendar_data = child.text + elif child.tag == dav.GetEtag.tag: + etag = child.text + changed.append( + CalendarQueryResult( + href=href, + etag=etag, + calendar_data=calendar_data, + status=status_code_elem, + ) + ) + + return SyncCollectionResult(changed=changed, deleted=deleted, sync_token=sync_token) + + +class DAVResponse: """ Base class containing shared response parsing logic. @@ -42,7 +348,6 @@ class BaseDAVResponse: that are common to both sync and async DAV responses. """ - # These attributes should be set by subclass __init__ tree: _Element | None = None headers: Any = None status: int = 0 @@ -50,6 +355,11 @@ class BaseDAVResponse: huge_tree: bool = False reason: str = "" davclient: Any = None + results: list[PropfindResult | CalendarQueryResult] | None = None + _sync_token: str | None = None + + def __init__(self, response: "Response", davclient: Any = None) -> None: + self._init_from_response(response, davclient) def _init_from_response(self, response: "Response", davclient: Any = None) -> None: """ @@ -297,6 +607,19 @@ def _parse_scheduling_response(self, response) -> dict[str, str]: ret[recipient] = calendar_data return ret + @property + def sync_token(self): + try: + sync_token = self._sync_token + except AttributeError: + sync_token = None + if sync_token is None: + ## TODO: this should not be needed? + ## investigate! + tokens = self.tree.findall(".//" + dav.SyncToken.tag) if self.tree is not None else [] + sync_token = tokens[0].text if tokens else None + return sync_token + ## TODO: there is currently quite some overlapping with the ## protocol.xml_parsers we should refactor. I'm not 100% sure the ## protocol.xml_parsers layer is a better approach. Look for more @@ -317,7 +640,7 @@ def _find_objects_and_props(self) -> dict[str, dict[str, _Element]]: for r in responses: if r.tag == dav.SyncToken.tag: - self.sync_token = r.text + self._sync_token = r.text continue error.assert_(r.tag == dav.Response.tag) diff --git a/tests/test_async_davclient.py b/tests/test_async_davclient.py index 6b355ed0..7d327563 100644 --- a/tests/test_async_davclient.py +++ b/tests/test_async_davclient.py @@ -11,7 +11,7 @@ import pytest -from caldav.async_davclient import AsyncDAVClient, AsyncDAVResponse, get_davclient +from caldav.async_davclient import AsyncDAVClient, DAVResponse, get_davclient from caldav.lib import error # Sample XML responses for testing @@ -68,8 +68,8 @@ def create_mock_response( return resp -class TestAsyncDAVResponse: - """Tests for AsyncDAVResponse class.""" +class TestDAVResponse: + """Tests for DAVResponse class.""" def test_response_with_xml_content(self) -> None: """Test parsing XML response.""" @@ -80,7 +80,7 @@ def test_response_with_xml_content(self) -> None: headers={"Content-Type": "text/xml; charset=utf-8"}, ) - dav_response = AsyncDAVResponse(resp) + dav_response = DAVResponse(resp) assert dav_response.status == 207 assert dav_response.reason == "Multi-Status" @@ -96,7 +96,7 @@ def test_response_with_empty_content(self) -> None: headers={"Content-Length": "0"}, ) - dav_response = AsyncDAVResponse(resp) + dav_response = DAVResponse(resp) assert dav_response.status == 204 assert dav_response.tree is None @@ -110,7 +110,7 @@ def test_response_with_non_xml_content(self) -> None: headers={"Content-Type": "text/plain"}, ) - dav_response = AsyncDAVResponse(resp) + dav_response = DAVResponse(resp) assert dav_response.status == 200 assert dav_response.tree is None @@ -120,7 +120,7 @@ def test_response_raw_property(self) -> None: """Test raw property returns string.""" resp = create_mock_response(content=b"test content") - dav_response = AsyncDAVResponse(resp) + dav_response = DAVResponse(resp) assert isinstance(dav_response.raw, str) assert "test content" in dav_response.raw @@ -129,7 +129,7 @@ def test_response_crlf_normalization(self) -> None: """Test that CRLF is normalized to LF.""" resp = create_mock_response(content=b"line1\r\nline2\r\nline3") - dav_response = AsyncDAVResponse(resp) + dav_response = DAVResponse(resp) assert b"\r\n" not in dav_response._raw assert b"\n" in dav_response._raw @@ -289,7 +289,7 @@ async def test_request_method(self) -> None: response = await client.request("/test/path", "GET") - assert isinstance(response, AsyncDAVResponse) + assert isinstance(response, DAVResponse) assert response.status == 207 client.session.request.assert_called_once() @@ -562,7 +562,7 @@ async def test_get_davclient_basic(self) -> None: status_code=200, headers=SAMPLE_OPTIONS_HEADERS, ) - mock_response_obj = AsyncDAVResponse(mock_response) + mock_response_obj = DAVResponse(mock_response) mock_options.return_value = mock_response_obj client = await get_davclient( diff --git a/tests/test_async_integration.py b/tests/test_async_integration.py index 77e9da38..a32f3a9d 100644 --- a/tests/test_async_integration.py +++ b/tests/test_async_integration.py @@ -567,8 +567,6 @@ async def test_invite_and_respond(self, scheduling_setup: Any) -> None: """send a calendar invite via save_with_invites and verify delivery. Async counterpart of _TestSchedulingBase.testInviteAndRespond. - NOTE: inbox listing uses get_events() as a workaround since - ScheduleMailbox.get_items() does not yet have async support. """ import uuid @@ -580,9 +578,9 @@ async def test_invite_and_respond(self, scheduling_setup: Any) -> None: inbox0 = await principals[0].schedule_inbox() inbox1 = await principals[1].schedule_inbox() inbox_urls_before: set[Any] = set() - for item in await inbox0.get_events(): + for item in await inbox0.get_items(): inbox_urls_before.add(item.url) - for item in await inbox1.get_events(): + for item in await inbox1.get_items(): inbox_urls_before.add(item.url) ## Send the invite @@ -610,12 +608,14 @@ async def test_invite_and_respond(self, scheduling_setup: Any) -> None: auto_scheduled = False for _ in range(30): new_attendee_inbox_items = [ - item for item in await inbox1.get_events() if item.url not in inbox_urls_before + item for item in await inbox1.get_items() if item.url not in inbox_urls_before ] ## Check whether the server auto-scheduled the event directly into ## the attendee's calendar. The event may land in any calendar, ## so search all attendee calendars for the event UID. - if not new_attendee_inbox_items: + ## Always check even when inbox items were found: some servers (e.g. + ## Davis/sabre/dav) deliver iTIP to the inbox AND auto-schedule. + if not auto_scheduled: for cal in await principals[1].calendars(): for event in await cal.get_events(): if event.id == event_uid: @@ -640,23 +640,19 @@ async def test_invite_and_respond(self, scheduling_setup: Any) -> None: ## Normal inbox-delivery flow (RFC6638 section 4.1). ## No new inbox items expected for principals[0] yet - for item in await inbox0.get_events(): + for item in await inbox0.get_items(): assert item.url in inbox_urls_before assert len(new_attendee_inbox_items) == 1 assert new_attendee_inbox_items[0].is_invite_request() - ## Approving the invite. accept_invite() is not yet implemented for - ## async clients; skip rather than fail so the test can be extended later. - try: - new_attendee_inbox_items[0].accept_invite(calendar=calendars[1]) - except NotImplementedError: - pytest.skip("accept_invite() not yet supported for async clients") + ## Approving the invite. + await new_attendee_inbox_items[0].accept_invite(calendar=calendars[1]) ## principals[0] should now have a notification in the inbox that the ## calendar invite was accepted new_organizer_inbox_items = [ - item for item in await inbox0.get_events() if item.url not in inbox_urls_before + item for item in await inbox0.get_items() if item.url not in inbox_urls_before ] assert len(new_organizer_inbox_items) == 1 assert new_organizer_inbox_items[0].is_invite_reply() @@ -735,13 +731,12 @@ async def test_schedule_tag_stable_on_partstate_update(self, scheduling_setup: A """PARTSTAT-only update must not change the Schedule-Tag. Async counterpart of testScheduleTagStableOnPartstateUpdate. - Expected to fail: accept_invite() raises NotImplementedError for - async clients. """ import uuid clients, principals, calendars, auto_uids = scheduling_setup self._skip_unless_support("scheduling.schedule-tag") + self._skip_unless_support("scheduling.schedule-tag.stable-partstat") if len(principals) < 2: pytest.skip("need 2 principals") if not clients[1].features.is_supported("scheduling.mailbox.inbox-delivery"): @@ -783,8 +778,7 @@ async def test_schedule_tag_stable_on_partstate_update(self, scheduling_setup: A if not invite: pytest.skip("Invite not delivered to attendee inbox; cannot test PARTSTAT stability") - ## accept_invite is not yet implemented for async clients - invite.accept_invite(calendar=attendee_cal) + await invite.accept_invite(calendar=attendee_cal) ## Find the attendee's copy attendee_event = None @@ -804,8 +798,10 @@ async def test_schedule_tag_stable_on_partstate_update(self, scheduling_setup: A tag_before = attendee_event.schedule_tag assert tag_before is not None, "No Schedule-Tag on attendee's calendar event after accept" - ## PARTSTAT-only change — tag must not move - attendee_event.change_attendee_status(partstat="TENTATIVE") + ## PARTSTAT-only change — tag must not move. + ## Pass attendee_addr explicitly: without an arg, change_attendee_status() resolves + ## the principal via self.client.principal(), which returns a coroutine in async mode. + attendee_event.change_attendee_status(str(attendee_addr), partstat="TENTATIVE") await attendee_event.save() await attendee_event.load() tag_after = attendee_event.schedule_tag diff --git a/tests/test_caldav.py b/tests/test_caldav.py index b2ff8207..79d68126 100644 --- a/tests/test_caldav.py +++ b/tests/test_caldav.py @@ -985,6 +985,7 @@ def testScheduleTagStableOnPartstateUpdate(self): The tag before and after a PARTSTAT-only PUT must be identical. """ self._skip_unless_support("scheduling.schedule-tag") + self._skip_unless_support("scheduling.schedule-tag.stable-partstat") if len(self.principals) < 2: pytest.skip("need 2 principals") if not self.clients[1].features.is_supported("scheduling.mailbox.inbox-delivery"): @@ -1577,7 +1578,8 @@ def testSupport(self): self.skip_on_compatibility_flag("dav_not_supported") assert self.caldav.check_dav_support() assert self.caldav.check_cdav_support() - assert self.caldav.check_scheduling_support() == self.is_supported("scheduling") + if self.is_supported("scheduling", str) != "unknown": + assert self.caldav.check_scheduling_support() == self.is_supported("scheduling") def testSchedulingInfo(self): self.skip_unless_support("scheduling.calendar-user-address-set") diff --git a/tests/test_caldav_unit.py b/tests/test_caldav_unit.py index 3df2449c..1dc515fb 100755 --- a/tests/test_caldav_unit.py +++ b/tests/test_caldav_unit.py @@ -2189,12 +2189,17 @@ def test_set_relation_returns_coroutine_for_async_client(self): ) result.close() - def test_accept_invite_raises_not_implemented_for_async_client(self): - """accept_invite() must raise Notimplemented for async clients (not silently fail).""" + def test_accept_invite_returns_coroutine_for_async_client(self): + """accept_invite() must return a coroutine for async clients.""" + import asyncio + client, calendar = self._make_async_client_and_calendar() event = Event(client=client, url="/calendar/ev1.ics", data=ev1, parent=calendar) - with pytest.raises(NotImplementedError): - event.accept_invite() + result = event.accept_invite() + assert asyncio.iscoroutine(result), ( + f"expected coroutine from accept_invite(), got {type(result)}" + ) + result.close() def test_add_organizer_explicit_arg_is_sync_safe_for_async_client(self): """add_organizer(explicit_arg) is pure in-memory: no network call, no await needed. From ef29a86dce6a8f14d4e1a7faa04f343245724e23 Mon Sep 17 00:00:00 2001 From: Tobias Brox Date: Wed, 22 Apr 2026 10:51:03 +0200 Subject: [PATCH 14/17] refactor: scrap operations and protocol After an audit of caldav/operations/ and caldav/protocol/ (documented in docs/design/OPERATIONS_PROTOCOL_AUDIT.md), both directories are deleted, the code that was in use have been moved elsewhere. The code changes are predominantly AI-written. Tedious refactoring work, chances for mistakes are bigger when doing it by hand than by AI. I've been looking through the changes, and I trust the tests to uncover any errors slipping through. prompt: During the great attempt on Sans-IO refactoring, a directory `caldav/operations/` was made. Please check up how much code is duplicated and/or dead there, and come with recommendations on whether to keep "operations" there or not. Same with the protocols folder. followup-prompt: Save the analysis to the docs/design folder followup-prompt: Kill the operations directory ref the document followup-prompt: All response-related logic in the protocol directory should be moved back to the response class. Make sure there is no duplicated code or logic. followup-prompt: move xml builders to the dav base client, and ensure sync and async code paths uses the same builder methods prompt: Deal with the code duplication in response.py followup-prompt: It seems like the last commit, with purpose "remove code duplication in response.py" has more code additions than removed code? Co-Authored-By: Claude Sonnet 4.6 --- caldav/async_davclient.py | 50 +-- caldav/base_client.py | 187 +++++++- caldav/calendarobjectresource.py | 12 +- caldav/collection.py | 112 ++++- caldav/davclient.py | 22 +- caldav/operations/__init__.py | 62 --- caldav/operations/base.py | 189 -------- caldav/operations/calendar_ops.py | 261 ----------- caldav/operations/calendarobject_ops.py | 540 ----------------------- caldav/operations/calendarset_ops.py | 245 ---------- caldav/operations/davobject_ops.py | 293 ------------ caldav/operations/principal_ops.py | 162 ------- caldav/operations/search_ops.py | 453 ------------------- caldav/protocol/__init__.py | 44 -- caldav/protocol/types.py | 221 ---------- caldav/protocol/xml_builders.py | 346 --------------- caldav/protocol/xml_parsers.py | 468 -------------------- caldav/response.py | 296 ++++++------- caldav/search.py | 214 ++++++++- docs/design/OPERATIONS_PROTOCOL_AUDIT.md | 165 +++++++ tests/test_caldav_unit.py | 16 +- tests/test_operations_base.py | 192 -------- tests/test_operations_calendar.py | 329 -------------- tests/test_operations_calendarobject.py | 529 ---------------------- tests/test_operations_calendarset.py | 277 ------------ tests/test_operations_davobject.py | 288 ------------ tests/test_operations_principal.py | 242 ---------- tests/test_protocol.py | 115 ++--- 28 files changed, 838 insertions(+), 5492 deletions(-) delete mode 100644 caldav/operations/__init__.py delete mode 100644 caldav/operations/base.py delete mode 100644 caldav/operations/calendar_ops.py delete mode 100644 caldav/operations/calendarobject_ops.py delete mode 100644 caldav/operations/calendarset_ops.py delete mode 100644 caldav/operations/davobject_ops.py delete mode 100644 caldav/operations/principal_ops.py delete mode 100644 caldav/operations/search_ops.py delete mode 100644 caldav/protocol/__init__.py delete mode 100644 caldav/protocol/types.py delete mode 100644 caldav/protocol/xml_builders.py delete mode 100644 caldav/protocol/xml_parsers.py create mode 100644 docs/design/OPERATIONS_PROTOCOL_AUDIT.md delete mode 100644 tests/test_operations_base.py delete mode 100644 tests/test_operations_calendar.py delete mode 100644 tests/test_operations_calendarobject.py delete mode 100644 tests/test_operations_calendarset.py delete mode 100644 tests/test_operations_davobject.py delete mode 100644 tests/test_operations_principal.py diff --git a/caldav/async_davclient.py b/caldav/async_davclient.py index 3b76cad3..7cef0cc5 100644 --- a/caldav/async_davclient.py +++ b/caldav/async_davclient.py @@ -71,6 +71,8 @@ def auth_flow(self, request): from caldav.compatibility_hints import FeatureSet from caldav.lib import error from caldav.lib.python_utilities import to_wire +from caldav.lib.url import URL + from caldav.requests import HTTPBearerAuth from caldav.response import CalendarQueryResult, DAVResponse, PropfindResult @@ -517,19 +519,13 @@ async def propfind( """ # Use protocol layer to build XML if props provided if props is not None and not body: - body = _build_propfind_body(props).decode("utf-8") + body = self._build_propfind_body(props).decode("utf-8") final_headers = self._build_method_headers("PROPFIND", depth, headers) response = await self.request(url or str(self.url), "PROPFIND", body, final_headers) - # Parse response using protocol layer if response.status in (200, 207) and response._raw: - raw_bytes = ( - response._raw if isinstance(response._raw, bytes) else response._raw.encode("utf-8") - ) - response.results = _parse_propfind_response( - raw_bytes, response.status, response.huge_tree - ) + response.results = response.parse_propfind() return response @@ -725,7 +721,7 @@ async def calendar_query( DAVResponse with results containing List[CalendarQueryResult]. """ - body, _ = _build_calendar_query_body( + body, _ = self._build_calendar_query_body( start=start, end=end, event=event, @@ -739,14 +735,8 @@ async def calendar_query( url or str(self.url), "REPORT", body.decode("utf-8"), final_headers ) - # Parse response using protocol layer if response.status in (200, 207) and response._raw: - raw_bytes = ( - response._raw if isinstance(response._raw, bytes) else response._raw.encode("utf-8") - ) - response.results = _parse_calendar_query_response( - raw_bytes, response.status, response.huge_tree - ) + response.results = response.parse_calendar_query() return response @@ -769,21 +759,15 @@ async def calendar_multiget( Returns: DAVResponse with results containing List[CalendarQueryResult]. """ - body = _build_calendar_multiget_body(hrefs or []) + body = self._build_calendar_multiget_body(hrefs or []) final_headers = self._build_method_headers("REPORT", depth, headers) response = await self.request( url or str(self.url), "REPORT", body.decode("utf-8"), final_headers ) - # Parse response using protocol layer if response.status in (200, 207) and response._raw: - raw_bytes = ( - response._raw if isinstance(response._raw, bytes) else response._raw.encode("utf-8") - ) - response.results = _parse_calendar_query_response( - raw_bytes, response.status, response.huge_tree - ) + response.results = response.parse_calendar_query() return response @@ -808,21 +792,15 @@ async def sync_collection( Returns: DAVResponse with results containing SyncCollectionResult. """ - body = _build_sync_collection_body(sync_token=sync_token, props=props) + body = self._build_sync_collection_body(sync_token=sync_token, props=props) final_headers = self._build_method_headers("REPORT", depth, headers) response = await self.request( url or str(self.url), "REPORT", body.decode("utf-8"), final_headers ) - # Parse response using protocol layer if response.status in (200, 207) and response._raw: - raw_bytes = ( - response._raw if isinstance(response._raw, bytes) else response._raw.encode("utf-8") - ) - sync_result = _parse_sync_collection_response( - raw_bytes, response.status, response.huge_tree - ) + sync_result = response.parse_sync_collection() response.results = sync_result.changed response.sync_token = sync_result.sync_token @@ -934,12 +912,12 @@ async def get_calendars(self, principal: Optional["Principal"] = None) -> list[" print(f"Calendar: {cal.get_display_name()}") """ from caldav.collection import Calendar - from caldav.operations.calendarset_ops import ( - _extract_calendars_from_propfind_results as extract_calendars, - ) - from caldav.operations.principal_ops import ( + from caldav.collection import ( _extract_calendar_home_set_from_results as extract_home_set, ) + from caldav.collection import ( + _extract_calendars_from_propfind_results as extract_calendars, + ) if principal is None: principal = await self.get_principal() diff --git a/caldav/base_client.py b/caldav/base_client.py index 0afeab64..d1974bb7 100644 --- a/caldav/base_client.py +++ b/caldav/base_client.py @@ -10,8 +10,13 @@ import logging from abc import ABC, abstractmethod from collections.abc import Mapping +from datetime import datetime from typing import TYPE_CHECKING, Any, NoReturn +from lxml import etree + +from caldav.elements import cdav, dav +from caldav.elements.base import BaseElement from caldav.lib import error from caldav.lib.auth import extract_auth_types, select_auth_type from caldav.lib.python_utilities import to_normal_str @@ -26,6 +31,42 @@ ICALH = {"Content-Type": 'text/calendar; charset="utf-8"'} +def _prop_name_to_element(name: str, value: Any | None = None) -> BaseElement | None: + """Convert a property name string (plain or Clark-notation) to a DAV element object.""" + dav_props: dict[str, Any] = { + "displayname": dav.DisplayName, + "resourcetype": dav.ResourceType, + "getetag": dav.GetEtag, + "current-user-principal": dav.CurrentUserPrincipal, + "owner": dav.Owner, + "sync-token": dav.SyncToken, + "supported-report-set": dav.SupportedReportSet, + } + caldav_props: dict[str, Any] = { + "calendar-data": cdav.CalendarData, + "calendar-home-set": cdav.CalendarHomeSet, + "calendar-user-address-set": cdav.CalendarUserAddressSet, + "calendar-user-type": cdav.CalendarUserType, + "calendar-description": cdav.CalendarDescription, + "calendar-timezone": cdav.CalendarTimeZone, + "supported-calendar-component-set": cdav.SupportedCalendarComponentSet, + "schedule-inbox-url": cdav.ScheduleInboxURL, + "schedule-outbox-url": cdav.ScheduleOutboxURL, + } + # Strip Clark-notation namespace prefix: "{DAV:}displayname" → "displayname" + if name.startswith("{") and "}" in name: + name = name.split("}", 1)[1] + name_lower = name.lower().replace("_", "-") + for props_dict in (dav_props, caldav_props): + if name_lower in props_dict: + cls = props_dict[name_lower] + try: + return cls(value) if value is not None else cls() + except TypeError: + return cls() + return None + + class BaseDAVClient(ABC): """ Base class for DAV clients providing shared authentication and configuration logic. @@ -214,12 +255,148 @@ def _raise_authorization_error(self, url_str: str, reason_source: Any) -> NoRetu reason = "None given" raise error.AuthorizationError(url=url_str, reason=reason) - def _build_principal_search_query(self, name: str | None) -> bytes: - """Build the XML body for a principal-property-search REPORT.""" - from lxml import etree - - from caldav.elements import cdav, dav + # ── XML builders ────────────────────────────────────────────────────────── + # All methods are static: no I/O, no server interaction, pure data + # transformation. Both DAVClient and AsyncDAVClient inherit these so + # every code path that builds request XML uses the same implementation. + + @staticmethod + def _build_propfind_body( + props: list[str] | None = None, + allprop: bool = False, + ) -> bytes: + """Build PROPFIND request body XML.""" + if allprop: + propfind = dav.Propfind() + dav.Allprop() + elif props: + prop_elements = [e for name in props if (e := _prop_name_to_element(name)) is not None] + propfind = dav.Propfind() + (dav.Prop() + prop_elements) + else: + propfind = dav.Propfind() + dav.Prop() + return etree.tostring(propfind.xmlelement(), encoding="utf-8", xml_declaration=True) + + @staticmethod + def _build_proppatch_body(set_props: dict[str, Any] | None = None) -> bytes: + """Build PROPPATCH request body for setting properties.""" + propertyupdate = dav.PropertyUpdate() + if set_props: + set_elements = [ + e + for name, value in set_props.items() + if (e := _prop_name_to_element(name, value)) is not None + ] + if set_elements: + propertyupdate += dav.Set() + (dav.Prop() + set_elements) + return etree.tostring(propertyupdate.xmlelement(), encoding="utf-8", xml_declaration=True) + + @staticmethod + def _build_calendar_query_body( + start: datetime | None = None, + end: datetime | None = None, + expand: bool = False, + comp_filter: str | None = None, + event: bool = False, + todo: bool = False, + journal: bool = False, + props: list[BaseElement] | None = None, + filters: list[BaseElement] | None = None, + ) -> tuple[bytes, str | None]: + """Build calendar-query REPORT request body. + + Returns (XML bytes, component type name or None). + """ + data = cdav.CalendarData() + if expand: + if not start or not end: + raise error.ReportError("can't expand without a date range") + data += cdav.Expand(start, end) + + props_list: list[BaseElement] = [data] + (list(props) if props else []) + prop = dav.Prop() + props_list + + vcalendar = cdav.CompFilter("VCALENDAR") + comp_type = comp_filter or ( + "VEVENT" if event else "VTODO" if todo else "VJOURNAL" if journal else None + ) + filter_list: list[BaseElement] = list(filters) if filters else [] + if start or end: + filter_list.append(cdav.TimeRange(start, end)) + + if comp_type: + comp_filter_elem = cdav.CompFilter(comp_type) + if filter_list: + comp_filter_elem += filter_list + vcalendar += comp_filter_elem + elif filter_list: + vcalendar += filter_list + + root = cdav.CalendarQuery() + [prop, cdav.Filter() + vcalendar] + return ( + etree.tostring(root.xmlelement(), encoding="utf-8", xml_declaration=True), + comp_type, + ) + @staticmethod + def _build_calendar_multiget_body( + hrefs: list[str], + include_data: bool = True, + ) -> bytes: + """Build calendar-multiget REPORT request body.""" + elements: list[BaseElement] = [] + if include_data: + elements.append(dav.Prop() + cdav.CalendarData()) + for href in hrefs: + elements.append(dav.Href(href)) + multiget = cdav.CalendarMultiGet() + elements + return etree.tostring(multiget.xmlelement(), encoding="utf-8", xml_declaration=True) + + @staticmethod + def _build_sync_collection_body( + sync_token: str | None = None, + props: list[str] | None = None, + sync_level: str = "1", + ) -> bytes: + """Build sync-collection REPORT request body.""" + elements: list[BaseElement] = [ + dav.SyncToken(value=sync_token or ""), + dav.SyncLevel(value=sync_level), + ] + if props: + prop_elements = [e for name in props if (e := _prop_name_to_element(name)) is not None] + if prop_elements: + elements.append(dav.Prop() + prop_elements) + else: + elements.append(dav.Prop() + [dav.GetEtag(), cdav.CalendarData()]) + sync_collection = dav.SyncCollection() + elements + return etree.tostring(sync_collection.xmlelement(), encoding="utf-8", xml_declaration=True) + + @staticmethod + def _build_mkcalendar_body( + displayname: str | None = None, + description: str | None = None, + timezone: str | None = None, + supported_components: list[str] | None = None, + ) -> bytes: + """Build MKCALENDAR request body.""" + prop = dav.Prop() + if displayname: + prop += dav.DisplayName(displayname) + if description: + prop += cdav.CalendarDescription(description) + if timezone: + prop += cdav.CalendarTimeZone(timezone) + if supported_components: + sccs = cdav.SupportedCalendarComponentSet() + for comp in supported_components: + sccs += cdav.Comp(comp) + prop += sccs + prop += dav.ResourceType() + [dav.Collection(), cdav.Calendar()] + mkcalendar = cdav.Mkcalendar() + (dav.Set() + prop) + return etree.tostring(mkcalendar.xmlelement(), encoding="utf-8", xml_declaration=True) + + @staticmethod + def _build_principal_search_query(name: str | None) -> bytes: + """Build the XML body for a principal-property-search REPORT.""" name_filter = ( [dav.PropertySearch() + [dav.Prop() + [dav.DisplayName()]] + dav.Match(value=name)] if name diff --git a/caldav/calendarobjectresource.py b/caldav/calendarobjectresource.py index 369d5314..ffdca8b8 100644 --- a/caldav/calendarobjectresource.py +++ b/caldav/calendarobjectresource.py @@ -18,7 +18,7 @@ from collections import defaultdict from datetime import datetime, timedelta, timezone from typing import TYPE_CHECKING, Any, ClassVar, Optional -from urllib.parse import ParseResult, SplitResult +from urllib.parse import ParseResult, SplitResult, quote import icalendar from dateutil.rrule import rrulestr @@ -47,11 +47,19 @@ from .lib.error import errmsg from .lib.python_utilities import to_normal_str, to_unicode, to_wire from .lib.url import URL -from .operations.calendarobject_ops import _quote_uid log = logging.getLogger("caldav") +def _quote_uid(uid: str) -> str: + """URL-quote a UID for use in a CalDAV object URL. + + Slashes are double-quoted (replaced with %2F before percent-encoding) + per https://github.com/python-caldav/caldav/issues/143. + """ + return quote(uid.replace("/", "%2F")) + + class CalendarObjectResource(DAVObject): """Ref RFC 4791, section 4.1, a "Calendar Object Resource" can be an event, a todo-item, a journal entry, or a free/busy entry diff --git a/caldav/collection.py b/caldav/collection.py index 3e7df983..b16d1d20 100644 --- a/caldav/collection.py +++ b/caldav/collection.py @@ -13,11 +13,12 @@ import logging import uuid import warnings +from dataclasses import dataclass from datetime import date as _date from datetime import datetime, timezone from time import sleep from typing import TYPE_CHECKING, Any, Optional, TypeVar -from urllib.parse import ParseResult, SplitResult, quote, unquote +from urllib.parse import ParseResult, SplitResult, quote, unquote, urlparse, urlunparse import icalendar @@ -48,6 +49,89 @@ log = logging.getLogger("caldav") +# --------------------------------------------------------------------------- +# Helpers for extracting calendar / principal info from PROPFIND results. +# These were previously in caldav/operations/calendarset_ops.py and +# caldav/operations/principal_ops.py. +# --------------------------------------------------------------------------- + + +@dataclass +class CalendarInfo: + """Data for a calendar extracted from a PROPFIND response.""" + + url: str + cal_id: str | None + name: str | None + resource_types: list[str] + + +def _extract_calendar_id_from_url(url: str) -> str | None: + try: + parts = str(url).rstrip("/").split("/") + if parts: + cal_id = parts[-1] + if cal_id: + return cal_id + except Exception: + log.error(f"Calendar has unexpected url {url}") + return None + + +def _quote_url_path(url: str) -> str: + """Quote the path component of a URL to handle unencoded spaces (e.g. Zimbra).""" + parsed = urlparse(url) + quoted_path = quote(unquote(parsed.path), safe="/@") + return urlunparse(parsed._replace(path=quoted_path)) + + +def _is_calendar_resource(properties: dict[str, Any]) -> bool: + rt = properties.get("{DAV:}resourcetype", []) + if not isinstance(rt, list): + rt = [rt] if rt else [] + return "{urn:ietf:params:xml:ns:caldav}calendar" in rt + + +def _extract_calendars_from_propfind_results(results: list[Any] | None) -> list[CalendarInfo]: + """Extract CalendarInfo objects from a list of PropfindResult objects.""" + calendars = [] + for result in results or []: + if not _is_calendar_resource(result.properties): + continue + url = _quote_url_path(result.href) + name = result.properties.get("{DAV:}displayname") + cal_id = _extract_calendar_id_from_url(url) + if not cal_id: + continue + calendars.append( + CalendarInfo( + url=url, + cal_id=cal_id, + name=name, + resource_types=result.properties.get("{DAV:}resourcetype", []), + ) + ) + return calendars + + +def _sanitize_calendar_home_set_url(url: str | None) -> str | None: + """Quote @ in owncloud-style URLs that are not full URLs.""" + if url is None: + return None + if "@" in url and "://" not in url and "%40" not in url: + return quote(url) + return url + + +def _extract_calendar_home_set_from_results(results: list[Any] | None) -> str | None: + """Extract calendar-home-set URL from a list of PropfindResult objects.""" + for result in results or []: + home_set = result.properties.get("{urn:ietf:params:xml:ns:caldav}calendar-home-set") + if home_set: + return _sanitize_calendar_home_set_url(home_set) + return None + + class CalendarSet(DAVObject): """ A CalendarSet is a set of calendars. @@ -55,10 +139,6 @@ class CalendarSet(DAVObject): def _calendars_from_results(self, results) -> list["Calendar"]: """Convert PropfindResult list into Calendar objects.""" - from caldav.operations.calendarset_ops import ( - _extract_calendars_from_propfind_results, - ) - calendar_infos = _extract_calendars_from_propfind_results(results) return [ Calendar(client=self.client, url=info.url, name=info.name, id=info.cal_id, parent=self) @@ -1700,6 +1780,8 @@ def get_objects_by_sync_token( the server truly supports sync tokens. """ if self.is_async_client: + ## TODO: lots of code duplication here. It's difficult, since there is a lot of + ## forth and back between the client and the server in this method. return self._async_get_objects_by_sync_token(sync_token, load_objects, disable_fallback) ## Check if we should attempt to use sync tokens @@ -1716,11 +1798,9 @@ def get_objects_by_sync_token( if use_sync_token: try: - cmd = dav.SyncCollection() - token = dav.SyncToken(value=sync_token) - level = dav.SyncLevel(value="1") - props = dav.Prop() + dav.GetEtag() - root = cmd + [level, token, props] + root = self.client._build_sync_collection_body( + sync_token=sync_token, props=["getetag"] + ) (response, objects) = self._request_report_build_resultlist( root, props=[dav.GetEtag()], no_calendardata=True ) @@ -1827,6 +1907,10 @@ async def _async_get_objects_by_sync_token( disable_fallback: bool = False, ) -> "SynchronizableCalendarObjectCollection": """Async implementation of get_objects_by_sync_token.""" + + ## TODO: lots of code duplication here. It's difficult, since there is a lot of + ## forth and back between the client and the server in this method. + use_sync_token = True sync_support = self.client.features.is_supported("sync-token", return_type=dict) if sync_support.get("support") == "unsupported": @@ -1838,11 +1922,9 @@ async def _async_get_objects_by_sync_token( if use_sync_token: try: - cmd = dav.SyncCollection() - token = dav.SyncToken(value=sync_token) - level = dav.SyncLevel(value="1") - props = dav.Prop() + dav.GetEtag() - root = cmd + [level, token, props] + root = self.client._build_sync_collection_body( + sync_token=sync_token, props=["getetag"] + ) (response, objects) = await self._request_report_build_resultlist( root, props=[dav.GetEtag()], no_calendardata=True ) diff --git a/caldav/davclient.py b/caldav/davclient.py index a8161b07..11e772d1 100644 --- a/caldav/davclient.py +++ b/caldav/davclient.py @@ -466,12 +466,12 @@ def get_calendars(self, principal: Principal | None = None) -> list[Calendar]: for cal in calendars: print(f"Calendar: {cal.get_display_name()}") """ - from caldav.operations.calendarset_ops import ( - _extract_calendars_from_propfind_results as extract_calendars, - ) - from caldav.operations.principal_ops import ( + from caldav.collection import ( _extract_calendar_home_set_from_results as extract_home_set, ) + from caldav.collection import ( + _extract_calendars_from_propfind_results as extract_calendars, + ) if principal is None: principal = self.principal() @@ -668,13 +668,11 @@ def propfind( ------- DAVResponse """ - from caldav.protocol.xml_builders import _build_propfind_body - # Handle both old interface (props=xml_string) and new interface (props=list) body = "" if props is not None: if isinstance(props, list): - body = _build_propfind_body(props).decode("utf-8") + body = self._build_propfind_body(props).decode("utf-8") else: body = props # Old interface: props is XML string @@ -682,16 +680,8 @@ def propfind( headers = {"Depth": str(depth)} response = self.request(url or str(self.url), "PROPFIND", body, headers) - # Parse response using protocol layer if response.status in (200, 207) and response._raw: - from caldav.protocol.xml_parsers import _parse_propfind_response - - raw_bytes = ( - response._raw if isinstance(response._raw, bytes) else response._raw.encode("utf-8") - ) - response.results = _parse_propfind_response( - raw_bytes, response.status, response.huge_tree - ) + response.results = response.parse_propfind() return response def proppatch(self, url: str, body: str, dummy: None = None) -> DAVResponse: diff --git a/caldav/operations/__init__.py b/caldav/operations/__init__.py deleted file mode 100644 index 008610b1..00000000 --- a/caldav/operations/__init__.py +++ /dev/null @@ -1,62 +0,0 @@ -""" -Operations Layer - Sans-I/O Business Logic for CalDAV. - -This package contains pure functions that implement CalDAV business logic -without performing any network I/O. Both sync (DAVClient) and async -(AsyncDAVClient) clients use these same functions. - -Architecture: - ┌─────────────────────────────────────┐ - │ DAVClient / AsyncDAVClient │ - │ (handles I/O) │ - ├─────────────────────────────────────┤ - │ Operations Layer (this package) │ - │ - _build_*() -> QuerySpec │ - │ - _process_*() -> Result data │ - │ - Pure functions, no I/O │ - ├─────────────────────────────────────┤ - │ Protocol Layer (caldav.protocol) │ - │ - XML building and parsing │ - └─────────────────────────────────────┘ - -The functions in this layer are private (prefixed with _) and should be -imported directly from the submodules when needed. Only data types are -exported from this package. - -Modules: - base: Common utilities and base types - davobject_ops: DAVObject operations (properties, children, delete) - calendarobject_ops: CalendarObjectResource operations (load, save, ical manipulation) - principal_ops: Principal operations (discovery, calendar home set) - calendarset_ops: CalendarSet operations (list calendars, make calendar) - calendar_ops: Calendar operations (search, multiget, sync) - search_ops: Search operations (query building, filtering, strategy) -""" - -from caldav.operations.base import PropertyData, QuerySpec -from caldav.operations.calendar_ops import CalendarObjectInfo -from caldav.operations.calendarobject_ops import CalendarObjectData -from caldav.operations.calendarset_ops import CalendarInfo -from caldav.operations.davobject_ops import ChildData, ChildrenQuery, PropertiesResult -from caldav.operations.principal_ops import PrincipalData -from caldav.operations.search_ops import SearchStrategy - -__all__ = [ - # Base types - "QuerySpec", - "PropertyData", - # DAVObject types - "ChildrenQuery", - "ChildData", - "PropertiesResult", - # CalendarObjectResource types - "CalendarObjectData", - # Principal types - "PrincipalData", - # CalendarSet types - "CalendarInfo", - # Calendar types - "CalendarObjectInfo", - # Search types - "SearchStrategy", -] diff --git a/caldav/operations/base.py b/caldav/operations/base.py deleted file mode 100644 index ab4d684d..00000000 --- a/caldav/operations/base.py +++ /dev/null @@ -1,189 +0,0 @@ -""" -Base utilities for the operations layer. - -This module provides foundational types and utilities used by all -operations modules. The operations layer contains pure functions -(Sans-I/O) that handle business logic without performing any network I/O. - -Design principles: -- All functions are pure: same inputs always produce same outputs -- No network I/O - that's the client's responsibility -- Request specs describe WHAT to request, not HOW -- Response processors transform parsed data into domain-friendly formats -""" - -from __future__ import annotations - -from dataclasses import dataclass, field -from typing import Any - -from caldav.lib.url import URL - - -@dataclass(frozen=True) -class QuerySpec: - """ - Base specification for a DAV query. - - This is an immutable description of what to request from the server. - The client uses this to construct and execute the actual HTTP request. - - Attributes: - url: The URL to query - method: HTTP method (PROPFIND, REPORT, etc.) - depth: DAV depth header (0, 1, or infinity) - props: Properties to request - body: Optional pre-built XML body (if complex) - """ - - url: str - method: str = "PROPFIND" - depth: int = 0 - props: tuple[str, ...] = () - body: bytes | None = None - - def with_url(self, new_url: str) -> QuerySpec: - """Return a copy with a different URL.""" - return QuerySpec( - url=new_url, - method=self.method, - depth=self.depth, - props=self.props, - body=self.body, - ) - - -@dataclass -class PropertyData: - """ - Generic property data extracted from a DAV response. - - Used when we need to pass around arbitrary properties - without knowing their specific structure. - """ - - href: str - properties: dict[str, Any] = field(default_factory=dict) - status: int = 200 - - -def _normalize_href(href: str, base_url: str | None = None) -> str: - """ - Normalize an href to a consistent format. - - Handles relative URLs, double slashes, and other common issues. - - Args: - href: The href from the server response - base_url: Optional base URL to resolve relative hrefs against - - Returns: - Normalized href string - """ - if not href: - return href - - # Handle double slashes - while "//" in href and not href.startswith("http"): - href = href.replace("//", "/") - - # Resolve relative URLs if base provided - if base_url and not href.startswith("http"): - try: - base = URL.objectify(base_url) - if base: - return str(base.join(href)) - except Exception: - pass - - return href - - -def _extract_resource_type(properties: dict[str, Any]) -> list[str]: - """ - Extract resource types from properties dict. - - Args: - properties: Dict of property tag -> value - - Returns: - List of resource type tags (e.g., ['{DAV:}collection', '{urn:ietf:params:xml:ns:caldav}calendar']) - """ - resource_type_key = "{DAV:}resourcetype" - rt = properties.get(resource_type_key, []) - - if isinstance(rt, list): - return rt - elif rt is None: - return [] - else: - # Single value - return [rt] if rt else [] - - -def _is_calendar_resource(properties: dict[str, Any]) -> bool: - """ - Check if properties indicate a calendar resource. - - Args: - properties: Dict of property tag -> value - - Returns: - True if this is a calendar collection - """ - resource_types = _extract_resource_type(properties) - calendar_tag = "{urn:ietf:params:xml:ns:caldav}calendar" - return calendar_tag in resource_types - - -def _is_collection_resource(properties: dict[str, Any]) -> bool: - """ - Check if properties indicate a collection resource. - - Args: - properties: Dict of property tag -> value - - Returns: - True if this is a collection - """ - resource_types = _extract_resource_type(properties) - collection_tag = "{DAV:}collection" - return collection_tag in resource_types - - -def _get_property_value( - properties: dict[str, Any], - prop_name: str, - default: Any = None, -) -> Any: - """ - Get a property value, handling both namespaced and simple keys. - - Tries the full namespaced key first, then common namespace prefixes. - - Args: - properties: Dict of property tag -> value - prop_name: Property name (e.g., 'displayname' or '{DAV:}displayname') - default: Default value if not found - - Returns: - Property value or default - """ - # Try exact key first - if prop_name in properties: - return properties[prop_name] - - # Try with common namespaces - namespaces = [ - "{DAV:}", - "{urn:ietf:params:xml:ns:caldav}", - "{http://calendarserver.org/ns/}", - "{http://apple.com/ns/ical/}", - ] - - for ns in namespaces: - full_key = f"{ns}{prop_name}" - if full_key in properties: - return properties[full_key] - - return default diff --git a/caldav/operations/calendar_ops.py b/caldav/operations/calendar_ops.py deleted file mode 100644 index 07630b46..00000000 --- a/caldav/operations/calendar_ops.py +++ /dev/null @@ -1,261 +0,0 @@ -""" -Calendar operations - Sans-I/O business logic for Calendar objects. - -This module contains pure functions for Calendar operations like -component class detection, sync token generation, and result processing. -Both sync and async clients use these same functions. -""" - -from __future__ import annotations - -import hashlib -from dataclasses import dataclass -from typing import Any -from urllib.parse import quote - -# Component type to class name mapping -COMPONENT_CLASS_MAP = { - "BEGIN:VEVENT": "Event", - "BEGIN:VTODO": "Todo", - "BEGIN:VJOURNAL": "Journal", - "BEGIN:VFREEBUSY": "FreeBusy", -} - - -@dataclass -class CalendarObjectInfo: - """Information about a calendar object extracted from server response.""" - - url: str - data: str | None - etag: str | None - component_type: str | None # "Event", "Todo", "Journal", "FreeBusy" - extra_props: dict - - -def _detect_component_type_from_string(data: str) -> str | None: - """ - Detect the component type (Event, Todo, etc.) from iCalendar string data. - - Args: - data: iCalendar data as string - - Returns: - Component type name ("Event", "Todo", "Journal", "FreeBusy") or None - """ - for line in data.split("\n"): - line = line.strip() - if line in COMPONENT_CLASS_MAP: - return COMPONENT_CLASS_MAP[line] - return None - - -def _detect_component_type_from_icalendar(ical_obj: Any) -> str | None: - """ - Detect the component type from an icalendar object. - - Args: - ical_obj: icalendar.Calendar or similar object with subcomponents - - Returns: - Component type name ("Event", "Todo", "Journal", "FreeBusy") or None - """ - import icalendar - - ical2name = { - icalendar.Event: "Event", - icalendar.Todo: "Todo", - icalendar.Journal: "Journal", - icalendar.FreeBusy: "FreeBusy", - } - - if not hasattr(ical_obj, "subcomponents"): - return None - - if not len(ical_obj.subcomponents): - return None - - for sc in ical_obj.subcomponents: - if sc.__class__ in ical2name: - return ical2name[sc.__class__] - - return None - - -def _detect_component_type(data: Any) -> str | None: - """ - Detect the component type from iCalendar data (string or object). - - Args: - data: iCalendar data as string, bytes, or icalendar object - - Returns: - Component type name ("Event", "Todo", "Journal", "FreeBusy") or None - """ - if data is None: - return None - - # Try string detection first - if hasattr(data, "split"): - return _detect_component_type_from_string(data) - - # Try icalendar object detection - if hasattr(data, "subcomponents"): - return _detect_component_type_from_icalendar(data) - - return None - - -def _generate_fake_sync_token(etags_and_urls: list[tuple[str | None, str]]) -> str: - """ - Generate a fake sync token for servers without sync support. - - Uses a hash of all ETags/URLs to detect changes. This allows clients - to use the sync token API even when the server doesn't support it. - - Args: - etags_and_urls: List of (etag, url) tuples. ETag may be None. - - Returns: - A fake sync token string prefixed with "fake-" - """ - parts = [] - for etag, url in etags_and_urls: - if etag: - parts.append(str(etag)) - else: - # Use URL as fallback identifier - parts.append(str(url)) - - parts.sort() # Consistent ordering - combined = "|".join(parts) - hash_value = hashlib.md5(combined.encode(), usedforsecurity=False).hexdigest() - return f"fake-{hash_value}" - - -def _is_fake_sync_token(token: str | None) -> bool: - """ - Check if a sync token is a fake one generated by the client. - - Args: - token: Sync token string - - Returns: - True if this is a fake sync token - """ - return token is not None and isinstance(token, str) and token.startswith("fake-") - - -def _normalize_result_url(result_url: str, parent_url: str) -> str: - """ - Normalize a URL from search/report results. - - Handles quoting for relative URLs and ensures proper joining with parent. - - Args: - result_url: URL from server response (may be relative or absolute) - parent_url: Parent calendar URL - - Returns: - Normalized URL string ready for joining with parent - """ - # If it's a full URL, return as-is - if "://" in result_url: - return result_url - - # Quote relative paths - return quote(result_url) - - -def _should_skip_calendar_self_reference(result_url: str, calendar_url: str) -> bool: - """ - Check if a result URL should be skipped because it's the calendar itself. - - iCloud and some other servers return the calendar URL along with - calendar item URLs. This function helps filter those out. - - Args: - result_url: URL from server response - calendar_url: The calendar's URL - - Returns: - True if this URL should be skipped (it's the calendar itself) - """ - # Normalize both URLs for comparison - result_normalized = result_url.rstrip("/") - calendar_normalized = calendar_url.rstrip("/") - - # Check if they're the same - return result_normalized == calendar_normalized - - -def _process_report_results( - results: dict, - calendar_url: str, - calendar_data_tag: str = "{urn:ietf:params:xml:ns:caldav}calendar-data", - etag_tag: str = "{DAV:}getetag", -) -> list[CalendarObjectInfo]: - """ - Process REPORT response results into CalendarObjectInfo objects. - - Args: - results: Dict mapping href -> properties dict - calendar_url: URL of the calendar (to filter out self-references) - calendar_data_tag: XML tag for calendar data property - etag_tag: XML tag for etag property - - Returns: - List of CalendarObjectInfo objects - """ - objects = [] - calendar_url_normalized = calendar_url.rstrip("/") - - for href, props in results.items(): - # Skip calendar self-reference - if _should_skip_calendar_self_reference(href, calendar_url_normalized): - continue - - # Extract calendar data - data = props.pop(calendar_data_tag, None) - - # Extract etag - etag = props.get(etag_tag) - - # Detect component type - component_type = _detect_component_type(data) - - # Normalize URL - normalized_url = _normalize_result_url(href, calendar_url) - - objects.append( - CalendarObjectInfo( - url=normalized_url, - data=data, - etag=etag, - component_type=component_type, - extra_props=props, - ) - ) - - return objects - - -def _build_calendar_object_url( - calendar_url: str, - object_id: str, -) -> str: - """ - Build a URL for a calendar object from calendar URL and object ID. - - Args: - calendar_url: URL of the parent calendar - object_id: ID of the calendar object (typically UID.ics) - - Returns: - Full URL for the calendar object - """ - calendar_url = str(calendar_url).rstrip("/") - object_id = quote(str(object_id)) - if not object_id.endswith(".ics"): - object_id += ".ics" - return f"{calendar_url}/{object_id}" diff --git a/caldav/operations/calendarobject_ops.py b/caldav/operations/calendarobject_ops.py deleted file mode 100644 index 8b60a34a..00000000 --- a/caldav/operations/calendarobject_ops.py +++ /dev/null @@ -1,540 +0,0 @@ -""" -CalendarObjectResource operations - Sans-I/O business logic. - -This module contains pure functions for working with calendar objects -(events, todos, journals) without performing any network I/O. -Both sync and async clients use these same functions. - -These functions work on icalendar component objects or raw data strings. -""" - -from __future__ import annotations - -import re -import uuid -from dataclasses import dataclass -from datetime import datetime, timedelta, timezone -from typing import Any -from urllib.parse import quote - -import icalendar -from dateutil.rrule import rrulestr - -# Relation type reverse mapping (RFC 9253) -RELTYPE_REVERSE_MAP = { - "PARENT": "CHILD", - "CHILD": "PARENT", - "SIBLING": "SIBLING", - "DEPENDS-ON": "FINISHTOSTART", - "FINISHTOSTART": "DEPENDENT", -} - - -@dataclass -class CalendarObjectData: - """Data extracted from a calendar object.""" - - uid: str | None - url: str | None - etag: str | None - data: str | None - - -def _generate_uid() -> str: - """Generate a new UID for a calendar object.""" - return str(uuid.uuid4()) - - -def _quote_uid(uid: str) -> str: - """ - URL-quote a UID for use in a CalDAV object URL. - - Slashes are double-quoted (replaced with %2F before percent-encoding) - per https://github.com/python-caldav/caldav/issues/143. - """ - return quote(uid.replace("/", "%2F")) - - -def _generate_url(parent_url: str, uid: str) -> str: - """ - Generate a URL for a calendar object based on its UID. - - Handles special characters in UID by proper quoting. - - Args: - parent_url: URL of the parent calendar (must end with /) - uid: The UID of the calendar object - - Returns: - Full URL for the calendar object - """ - quoted_uid = _quote_uid(uid) - if not parent_url.endswith("/"): - parent_url += "/" - return f"{parent_url}{quoted_uid}.ics" - - -def _extract_uid_from_path(path: str) -> str | None: - """ - Extract UID from a .ics file path. - - Args: - path: Path like "/calendars/user/calendar/event-uid.ics" - - Returns: - The UID portion, or None if not found - """ - if not path.endswith(".ics"): - return None - match = re.search(r"(/|^)([^/]*).ics$", path) - if match: - return match.group(2) - return None - - -def _find_id_and_path( - component: Any, # icalendar component - given_id: str | None = None, - given_path: str | None = None, - existing_id: str | None = None, -) -> tuple[str, str]: - """ - Determine the UID and path for a calendar object. - - This is Sans-I/O logic extracted from CalendarObjectResource._find_id_path(). - - Priority: - 1. given_id parameter - 2. existing_id (from object) - 3. UID from component - 4. UID extracted from path - 5. Generate new UID - - Args: - component: icalendar component (VEVENT, VTODO, etc.) - given_id: Explicitly provided ID - given_path: Explicitly provided path - existing_id: ID already set on the object - - Returns: - Tuple of (uid, relative_path) - """ - uid = given_id or existing_id - - if not uid: - # Try to get UID from component - uid_prop = component.get("UID") - if uid_prop: - uid = str(uid_prop) - - if not uid and given_path and given_path.endswith(".ics"): - # Extract from path - uid = _extract_uid_from_path(given_path) - - if not uid: - # Generate new UID - uid = _generate_uid() - - # Set UID in component (remove old one first) - if "UID" in component: - component.pop("UID") - component.add("UID", uid) - - # Determine path - if given_path: - path = given_path - else: - path = _quote_uid(uid) + ".ics" - - return uid, path - - -def _get_duration( - component: Any, # icalendar component - end_param: str = "DTEND", -) -> timedelta: - """ - Get duration from a calendar component. - - According to the RFC, either DURATION or DTEND/DUE should be set, - but never both. This function calculates duration from whichever is present. - - Args: - component: icalendar component (VEVENT, VTODO, etc.) - end_param: The end parameter name ("DTEND" for events, "DUE" for todos) - - Returns: - Duration as timedelta - """ - if "DURATION" in component: - return component["DURATION"].dt - - if "DTSTART" in component and end_param in component: - end = component[end_param].dt - start = component["DTSTART"].dt - - # Handle date vs datetime mismatch - if isinstance(end, datetime) != isinstance(start, datetime): - # Convert both to datetime for comparison - if not isinstance(start, datetime): - start = datetime(start.year, start.month, start.day) - if not isinstance(end, datetime): - end = datetime(end.year, end.month, end.day) - - return end - start - - # Default: if only DTSTART and it's a date (not datetime), assume 1 day - if "DTSTART" in component: - dtstart = component["DTSTART"].dt - if not isinstance(dtstart, datetime): - return timedelta(days=1) - - return timedelta(0) - - -def _get_due(component: Any) -> datetime | None: - """ - Get due date from a VTODO component. - - Handles DUE, DTEND, or DURATION+DTSTART. - - Args: - component: icalendar VTODO component - - Returns: - Due date/datetime, or None if not set - """ - if "DUE" in component: - return component["DUE"].dt - elif "DTEND" in component: - return component["DTEND"].dt - elif "DURATION" in component and "DTSTART" in component: - return component["DTSTART"].dt + component["DURATION"].dt - return None - - -def _set_duration( - component: Any, # icalendar component - duration: timedelta, - movable_attr: str = "DTSTART", -) -> None: - """ - Set duration on a component, adjusting other properties as needed. - - If both DTSTART and DUE/DTEND are set, one must be moved. - - Args: - component: icalendar component to modify - duration: New duration - movable_attr: Which attribute to move ("DTSTART" or "DUE") - """ - has_due = "DUE" in component or "DURATION" in component - has_start = "DTSTART" in component - - if has_due and has_start: - component.pop(movable_attr, None) - if movable_attr == "DUE": - component.pop("DURATION", None) - if movable_attr == "DTSTART": - component.add("DTSTART", component["DUE"].dt - duration) - elif movable_attr == "DUE": - component.add("DUE", component["DTSTART"].dt + duration) - elif "DUE" in component: - component.add("DTSTART", component["DUE"].dt - duration) - elif "DTSTART" in component: - component.add("DUE", component["DTSTART"].dt + duration) - else: - if "DURATION" in component: - component.pop("DURATION") - component.add("DURATION", duration) - - -def _is_task_pending(component: Any) -> bool: - """ - Check if a VTODO component is pending (not completed). - - Args: - component: icalendar VTODO component - - Returns: - True if task is pending, False if completed/cancelled - """ - if component.get("COMPLETED") is not None: - return False - - status = component.get("STATUS", "NEEDS-ACTION") - if status in ("NEEDS-ACTION", "IN-PROCESS"): - return True - if status in ("CANCELLED", "COMPLETED"): - return False - - # Unknown status - treat as pending - return True - - -def _mark_task_completed( - component: Any, # icalendar VTODO component - completion_timestamp: datetime | None = None, -) -> None: - """ - Mark a VTODO component as completed. - - Modifies the component in place. - - Args: - component: icalendar VTODO component - completion_timestamp: When the task was completed (defaults to now) - """ - if completion_timestamp is None: - completion_timestamp = datetime.now(timezone.utc) - - component.pop("STATUS", None) - component.add("STATUS", "COMPLETED") - component.add("COMPLETED", completion_timestamp) - - -def _mark_task_uncompleted(component: Any) -> None: - """ - Mark a VTODO component as not completed. - - Args: - component: icalendar VTODO component - """ - component.pop("status", None) - component.pop("STATUS", None) - component.add("STATUS", "NEEDS-ACTION") - component.pop("completed", None) - component.pop("COMPLETED", None) - - -def _calculate_next_recurrence( - component: Any, # icalendar VTODO component - completion_timestamp: datetime | None = None, - rrule: Any | None = None, - dtstart: datetime | None = None, - use_fixed_deadlines: bool | None = None, - ignore_count: bool = True, -) -> datetime | None: - """ - Calculate the next DTSTART for a recurring task after completion. - - This implements the logic from Todo._next(). - - Args: - component: icalendar VTODO component with RRULE - completion_timestamp: When the task was completed - rrule: Override RRULE (default: from component) - dtstart: Override DTSTART (default: calculated based on use_fixed_deadlines) - use_fixed_deadlines: If True, preserve DTSTART from component. - If False, use completion time minus duration. - If None, auto-detect from BY* parameters in rrule. - ignore_count: If True, ignore COUNT in RRULE - - Returns: - Next DTSTART datetime, or None if no more recurrences - """ - if rrule is None: - rrule = component.get("RRULE") - if rrule is None: - return None - - # Determine if we should use fixed deadlines - if use_fixed_deadlines is None: - use_fixed_deadlines = any(x for x in rrule if x.startswith("BY")) - - # Determine starting point for calculation - if dtstart is None: - if use_fixed_deadlines: - if "DTSTART" in component: - dtstart = component["DTSTART"].dt - else: - dtstart = completion_timestamp or datetime.now(timezone.utc) - else: - duration = _get_duration(component, "DUE") - dtstart = (completion_timestamp or datetime.now(timezone.utc)) - duration - - # Normalize to UTC for comparison - if hasattr(dtstart, "astimezone"): - dtstart = dtstart.astimezone(timezone.utc) - - ts = completion_timestamp or dtstart - - # Optionally ignore COUNT - if ignore_count and "COUNT" in rrule: - rrule = rrule.copy() - rrule.pop("COUNT") - - # Parse and calculate next occurrence - rrule_obj = rrulestr(rrule.to_ical().decode("utf-8"), dtstart=dtstart) - return rrule_obj.after(ts) - - -def _reduce_rrule_count(component: Any) -> bool: - """ - Reduce the COUNT in an RRULE by 1. - - Args: - component: icalendar component with RRULE - - Returns: - False if COUNT was 1 (task should end), True otherwise - """ - if "RRULE" not in component: - return True - - rrule = component["RRULE"] - count = rrule.get("COUNT", None) - if count is not None: - # COUNT is stored as a list in vRecur - count_val = count[0] if isinstance(count, list) else count - if count_val == 1: - return False - if isinstance(count, list): - count[0] = count_val - 1 - else: - rrule["COUNT"] = count_val - 1 - - return True - - -def _is_calendar_data_loaded( - data: str | None, - vobject_instance: Any, - icalendar_instance: Any, -) -> bool: - """ - Check if calendar object data is loaded. - - Args: - data: Raw iCalendar data string - vobject_instance: vobject instance (if any) - icalendar_instance: icalendar instance (if any) - - Returns: - True if data is loaded - """ - return bool((data and data.count("BEGIN:") > 1) or vobject_instance or icalendar_instance) - - -def _has_calendar_component(data: str | None) -> bool: - """ - Check if data contains VEVENT, VTODO, or VJOURNAL. - - Args: - data: Raw iCalendar data string - - Returns: - True if a calendar component is present - """ - if not data: - return False - - return ( - data.count("BEGIN:VEVENT") + data.count("BEGIN:VTODO") + data.count("BEGIN:VJOURNAL") - ) > 0 - - -def _get_non_timezone_subcomponents( - icalendar_instance: Any, -) -> list[Any]: - """ - Get all subcomponents except VTIMEZONE. - - Args: - icalendar_instance: icalendar.Calendar instance - - Returns: - List of non-timezone subcomponents - """ - return [x for x in icalendar_instance.subcomponents if not isinstance(x, icalendar.Timezone)] - - -def _get_primary_component(icalendar_instance: Any) -> Any | None: - """ - Get the primary (non-timezone) component from a calendar. - - For events/todos/journals, there should be exactly one. - For recurrence sets, returns the master component. - - Args: - icalendar_instance: icalendar.Calendar instance - - Returns: - The primary component (VEVENT, VTODO, VJOURNAL, or VFREEBUSY) - """ - components = _get_non_timezone_subcomponents(icalendar_instance) - if not components: - return None - - for comp in components: - if isinstance( - comp, - icalendar.Event | icalendar.Todo | icalendar.Journal | icalendar.FreeBusy, - ): - return comp - - return None - - -def _copy_component_with_new_uid( - component: Any, - new_uid: str | None = None, -) -> Any: - """ - Create a copy of a component with a new UID. - - Args: - component: icalendar component to copy - new_uid: New UID (generated if not provided) - - Returns: - Copy of the component with new UID - """ - new_comp = component.copy() - new_comp.pop("UID", None) - new_comp.add("UID", new_uid or _generate_uid()) - return new_comp - - -def _get_reverse_reltype(reltype: str) -> str | None: - """ - Get the reverse relation type for a given relation type. - - Args: - reltype: Relation type (e.g., "PARENT", "CHILD") - - Returns: - Reverse relation type, or None if not defined - """ - return RELTYPE_REVERSE_MAP.get(reltype.upper()) - - -def _extract_relations( - component: Any, - reltypes: set | None = None, -) -> dict[str, set]: - """ - Extract RELATED-TO relations from a component. - - Args: - component: icalendar component - reltypes: Optional set of relation types to filter - - Returns: - Dict mapping reltype -> set of UIDs - """ - from collections import defaultdict - - result = defaultdict(set) - relations = component.get("RELATED-TO", []) - - if not isinstance(relations, list): - relations = [relations] - - for rel in relations: - reltype = rel.params.get("RELTYPE", "PARENT") - if reltypes and reltype not in reltypes: - continue - result[reltype].add(str(rel)) - - return dict(result) diff --git a/caldav/operations/calendarset_ops.py b/caldav/operations/calendarset_ops.py deleted file mode 100644 index e7f6fe03..00000000 --- a/caldav/operations/calendarset_ops.py +++ /dev/null @@ -1,245 +0,0 @@ -""" -CalendarSet operations - Sans-I/O business logic for CalendarSet objects. - -This module contains pure functions for CalendarSet operations like -extracting calendar IDs and building calendar URLs. Both sync and async -clients use these same functions. -""" - -from __future__ import annotations - -import logging -from dataclasses import dataclass -from typing import Any -from urllib.parse import quote, unquote, urlparse, urlunparse - -log = logging.getLogger("caldav") - - -@dataclass -class CalendarInfo: - """Data for a calendar extracted from PROPFIND response.""" - - url: str - cal_id: str | None - name: str | None - resource_types: list[str] - - -def _extract_calendar_id_from_url(url: str) -> str | None: - """ - Extract calendar ID from a calendar URL. - - Calendar URLs typically look like: /calendars/user/calendar-id/ - The calendar ID is the second-to-last path segment. - - Args: - url: Calendar URL - - Returns: - Calendar ID, or None if extraction fails - """ - try: - # Split and get second-to-last segment (last is empty due to trailing /) - parts = str(url).rstrip("/").split("/") - if len(parts) >= 1: - cal_id = parts[-1] - if cal_id: - return cal_id - except Exception: - log.error(f"Calendar has unexpected url {url}") - return None - - -def _process_calendar_list( - children_data: list[tuple[str, list[str], str | None]], -) -> list[CalendarInfo]: - """ - Process children data into CalendarInfo objects. - - Args: - children_data: List of (url, resource_types, display_name) tuples - from children() call - - Returns: - List of CalendarInfo objects with extracted calendar IDs - """ - calendars = [] - for c_url, c_types, c_name in children_data: - cal_id = _extract_calendar_id_from_url(c_url) - if not cal_id: - continue - calendars.append( - CalendarInfo( - url=c_url, - cal_id=cal_id, - name=c_name, - resource_types=c_types, - ) - ) - return calendars - - -def _resolve_calendar_url( - cal_id: str, - parent_url: str, - client_base_url: str, -) -> str: - """ - Resolve a calendar URL from a calendar ID. - - Handles different formats: - - Full URLs (https://...) - - Absolute paths (/calendars/...) - - Relative IDs (just the calendar name) - - Args: - cal_id: Calendar ID or URL - parent_url: URL of the calendar set - client_base_url: Base URL of the client - - Returns: - Resolved calendar URL - """ - # Normalize URLs for comparison - client_canonical = str(client_base_url).rstrip("/") - cal_id_str = str(cal_id) - - # Check if cal_id is already a full URL under the client base - if cal_id_str.startswith(client_canonical): - # It's a full URL, just join to handle any path adjustments - return _join_url(client_base_url, cal_id) - - # Check if it's a full URL (http:// or https://) - if cal_id_str.startswith("https://") or cal_id_str.startswith("http://"): - # Join with parent URL - return _join_url(parent_url, cal_id) - - # It's a relative ID - quote it and append trailing slash - quoted_id = quote(cal_id) - if not quoted_id.endswith("/"): - quoted_id += "/" - - return _join_url(parent_url, quoted_id) - - -def _join_url(base: str, path: str) -> str: - """ - Simple URL join - concatenates base and path. - - This is a placeholder that the actual URL class will handle. - Returns a string representation for the operations layer. - - Args: - base: Base URL - path: Path to join - - Returns: - Joined URL string - """ - # Basic implementation - real code uses URL.join() - base = str(base).rstrip("/") - path = str(path).lstrip("/") - return f"{base}/{path}" - - -def _find_calendar_by_name( - calendars: list[CalendarInfo], - name: str, -) -> CalendarInfo | None: - """ - Find a calendar by display name. - - Args: - calendars: List of CalendarInfo objects - name: Display name to search for - - Returns: - CalendarInfo if found, None otherwise - """ - for cal in calendars: - if cal.name == name: - return cal - return None - - -def _find_calendar_by_id( - calendars: list[CalendarInfo], - cal_id: str, -) -> CalendarInfo | None: - """ - Find a calendar by ID. - - Args: - calendars: List of CalendarInfo objects - cal_id: Calendar ID to search for - - Returns: - CalendarInfo if found, None otherwise - """ - for cal in calendars: - if cal.cal_id == cal_id: - return cal - return None - - -def _quote_url_path(url: str) -> str: - """ - Quote the path component of a URL to handle spaces and special characters. - - Some servers (e.g., Zimbra) return URLs with unencoded spaces in the path. - This function ensures the path is properly percent-encoded. - - Args: - url: URL string that may contain unencoded characters in path - - Returns: - URL with properly encoded path - """ - parsed = urlparse(url) - # quote the path, but unquote first to avoid double-encoding - quoted_path = quote(unquote(parsed.path), safe="/@") - return urlunparse(parsed._replace(path=quoted_path)) - - -def _extract_calendars_from_propfind_results( - results: list[Any] | None, -) -> list[CalendarInfo]: - """ - Extract calendar information from PROPFIND results. - - This pure function processes propfind results to identify calendar - resources and extract their metadata. - - Args: - results: List of PropfindResult objects from parse_propfind_response - - Returns: - List of CalendarInfo objects for calendar resources found - """ - from caldav.operations.base import _is_calendar_resource as is_calendar_resource - - calendars = [] - for result in results or []: - # Check if this is a calendar resource - if not is_calendar_resource(result.properties): - continue - - # Extract calendar info - quote URL path to handle spaces - url = _quote_url_path(result.href) - name = result.properties.get("{DAV:}displayname") - cal_id = _extract_calendar_id_from_url(url) - - if not cal_id: - continue - - calendars.append( - CalendarInfo( - url=url, - cal_id=cal_id, - name=name, - resource_types=result.properties.get("{DAV:}resourcetype", []), - ) - ) - - return calendars diff --git a/caldav/operations/davobject_ops.py b/caldav/operations/davobject_ops.py deleted file mode 100644 index 406ef978..00000000 --- a/caldav/operations/davobject_ops.py +++ /dev/null @@ -1,293 +0,0 @@ -""" -DAVObject operations - Sans-I/O business logic for DAV objects. - -This module contains pure functions for DAVObject operations like -getting/setting properties, listing children, and deleting resources. -Both sync and async clients use these same functions. -""" - -from __future__ import annotations - -import logging -from dataclasses import dataclass -from typing import Any -from urllib.parse import quote, unquote - -log = logging.getLogger("caldav") - - -# Property tags used in operations -DAV_DISPLAYNAME = "{DAV:}displayname" -DAV_RESOURCETYPE = "{DAV:}resourcetype" -CALDAV_CALENDAR = "{urn:ietf:params:xml:ns:caldav}calendar" - - -@dataclass(frozen=True) -class ChildrenQuery: - """Query specification for listing children.""" - - url: str - depth: int = 1 - props: tuple[str, ...] = (DAV_DISPLAYNAME, DAV_RESOURCETYPE) - - -@dataclass -class ChildData: - """Data for a child resource.""" - - url: str - resource_types: list[str] - display_name: str | None - - -@dataclass -class PropertiesResult: - """Result of extracting properties for a specific object.""" - - properties: dict[str, Any] - matched_path: str - - -def _build_children_query(url: str) -> ChildrenQuery: - """ - Build query for listing children of a collection. - - Args: - url: URL of the parent collection - - Returns: - ChildrenQuery specification - """ - return ChildrenQuery(url=url) - - -def _process_children_response( - properties_by_href: dict[str, dict[str, Any]], - parent_url: str, - filter_type: str | None = None, - is_calendar_set: bool = False, -) -> list[ChildData]: - """ - Process PROPFIND response into list of children. - - This is Sans-I/O - works on already-parsed response data. - - Args: - properties_by_href: Dict mapping href -> properties dict - parent_url: URL of the parent collection (to exclude from results) - filter_type: Optional resource type to filter by (e.g., CALDAV_CALENDAR) - is_calendar_set: True if parent is a CalendarSet (affects filtering logic) - - Returns: - List of ChildData for matching children - """ - children = [] - - # Normalize parent URL for comparison - parent_canonical = _canonical_path(parent_url) - - for path, props in properties_by_href.items(): - resource_types = props.get(DAV_RESOURCETYPE, []) - if isinstance(resource_types, str): - resource_types = [resource_types] - elif resource_types is None: - resource_types = [] - - display_name = props.get(DAV_DISPLAYNAME) - - # Filter by type if specified - if filter_type is not None and filter_type not in resource_types: - continue - - # Build URL, quoting if it's a relative path - url_obj_path = path - if not path.startswith("http"): - url_obj_path = quote(path) - - # Determine child's canonical path for comparison - child_canonical = _canonical_path(path) - - # Skip the parent itself - # Special case for CalendarSet filtering for calendars - if is_calendar_set and filter_type == CALDAV_CALENDAR: - # Include if it's a calendar (already filtered above) - children.append( - ChildData( - url=url_obj_path, - resource_types=resource_types, - display_name=display_name, - ) - ) - elif parent_canonical != child_canonical: - children.append( - ChildData( - url=url_obj_path, - resource_types=resource_types, - display_name=display_name, - ) - ) - - return children - - -def _canonical_path(url: str) -> str: - """Get canonical path for comparison, stripping trailing slashes.""" - # Extract path from URL - if "://" in url: - # Full URL - extract path - from urllib.parse import urlparse - - parsed = urlparse(url) - path = parsed.path - else: - path = url - - # Strip trailing slash for comparison - return path.rstrip("/") - - -def _find_object_properties( - properties_by_href: dict[str, dict[str, Any]], - object_url: str, - is_principal: bool = False, -) -> PropertiesResult: - """ - Find properties for a specific object from a PROPFIND response. - - Handles various server quirks like trailing slash mismatches, - iCloud path issues, and double slashes. - - Args: - properties_by_href: Dict mapping href -> properties dict - object_url: URL of the object we're looking for - is_principal: True if object is a Principal (affects warning behavior) - - Returns: - PropertiesResult with the found properties - - Raises: - ValueError: If no matching properties found - """ - path = unquote(object_url) if "://" not in object_url else unquote(_extract_path(object_url)) - - # Try with and without trailing slash - if path.endswith("/"): - exchange_path = path[:-1] - else: - exchange_path = path + "/" - - # Try exact path match - if path in properties_by_href: - return PropertiesResult(properties=properties_by_href[path], matched_path=path) - - # Try with/without trailing slash - if exchange_path in properties_by_href: - if not is_principal: - log.warning( - f"The path {path} was not found in the properties, but {exchange_path} was. " - "This may indicate a server bug or a trailing slash issue." - ) - return PropertiesResult( - properties=properties_by_href[exchange_path], matched_path=exchange_path - ) - - # Try full URL as key - if object_url in properties_by_href: - return PropertiesResult(properties=properties_by_href[object_url], matched_path=object_url) - - # iCloud workaround - /principal/ path - if "/principal/" in properties_by_href and path.endswith("/principal/"): - log.warning("Applying iCloud workaround for /principal/ path mismatch") - return PropertiesResult( - properties=properties_by_href["/principal/"], matched_path="/principal/" - ) - - # Double slash workaround - if "//" in path: - normalized = path.replace("//", "/") - if normalized in properties_by_href: - log.warning(f"Path contained double slashes: {path} -> {normalized}") - return PropertiesResult( - properties=properties_by_href[normalized], matched_path=normalized - ) - - # Last resort: if only one result, use it - if len(properties_by_href) == 1: - only_path = list(properties_by_href.keys())[0] - log.warning( - f"Possibly the server has a path handling problem, possibly the URL configured is wrong. " - f"Path expected: {path}, path found: {only_path}. " - "Continuing, probably everything will be fine" - ) - return PropertiesResult(properties=properties_by_href[only_path], matched_path=only_path) - - # No match found - raise ValueError( - f"Could not find properties for {path}. Available paths: {list(properties_by_href.keys())}" - ) - - -def _extract_path(url: str) -> str: - """Extract path component from a URL.""" - if "://" not in url: - return url - from urllib.parse import urlparse - - return urlparse(url).path - - -def _convert_protocol_results_to_properties( - results: list[Any], # List[PropfindResult] - requested_props: list[str] | None = None, -) -> dict[str, dict[str, Any]]: - """ - Convert protocol layer results to the {href: {tag: value}} format. - - Args: - results: List of PropfindResult from protocol layer - requested_props: Optional list of property tags that were requested - (used to initialize missing props to None) - - Returns: - Dict mapping href -> properties dict - """ - properties = {} - for result in results: - result_props = {} - # Initialize requested props to None for backward compat - if requested_props: - for prop in requested_props: - result_props[prop] = None - # Overlay with actual values - result_props.update(result.properties) - properties[result.href] = result_props - return properties - - -def _validate_delete_response(status: int) -> None: - """ - Validate DELETE response status. - - Args: - status: HTTP status code - - Raises: - ValueError: If status indicates failure - """ - # 200 OK, 204 No Content, 404 Not Found (already deleted) are all acceptable - if status not in (200, 204, 404): - raise ValueError(f"Delete failed with status {status}") - - -def _validate_proppatch_response(status: int) -> None: - """ - Validate PROPPATCH response status. - - Args: - status: HTTP status code - - Raises: - ValueError: If status indicates failure - """ - if status >= 400: - raise ValueError(f"PROPPATCH failed with status {status}") diff --git a/caldav/operations/principal_ops.py b/caldav/operations/principal_ops.py deleted file mode 100644 index 8c10a98f..00000000 --- a/caldav/operations/principal_ops.py +++ /dev/null @@ -1,162 +0,0 @@ -""" -Principal operations - Sans-I/O business logic for Principal objects. - -This module contains pure functions for Principal operations like -URL sanitization and vCalAddress creation. Both sync and async clients -use these same functions. -""" - -from __future__ import annotations - -from dataclasses import dataclass -from typing import Any -from urllib.parse import quote - - -@dataclass -class PrincipalData: - """Data extracted from a principal.""" - - url: str | None - display_name: str | None - calendar_home_set_url: str | None - calendar_user_addresses: list[str] - - -def _sanitize_calendar_home_set_url(url: str | None) -> str | None: - """ - Sanitize calendar home set URL, handling server quirks. - - OwnCloud returns URLs like /remote.php/dav/calendars/tobixen@e.email/ - where the @ should be quoted. Some servers return already-quoted URLs. - - Args: - url: Calendar home set URL from server - - Returns: - Sanitized URL with @ properly quoted (if not already) - """ - if url is None: - return None - - # Quote @ in URLs that aren't full URLs (owncloud quirk) - # Don't double-quote if already quoted - if "@" in url and "://" not in url and "%40" not in url: - return quote(url) - - return url - - -def _sort_calendar_user_addresses(addresses: list[Any]) -> list[Any]: - """ - Sort calendar user addresses by preference. - - The 'preferred' attribute is possibly iCloud-specific but we honor - it when present. - - Args: - addresses: List of address elements (lxml elements with text and attributes) - - Returns: - Sorted list (highest preference first) - """ - return sorted(addresses, key=lambda x: -int(x.get("preferred", 0))) - - -def _extract_calendar_user_addresses(addresses: list[Any]) -> list[str | None]: - """ - Extract calendar user address strings from XML elements. - - Args: - addresses: List of DAV:href elements - - Returns: - List of address strings (sorted by preference) - """ - sorted_addresses = _sort_calendar_user_addresses(addresses) - return [x.text for x in sorted_addresses] - - -def _create_vcal_address( - display_name: str | None, - address: str, - calendar_user_type: str | None = None, -) -> Any: - """ - Create an icalendar vCalAddress object from principal properties. - - Args: - display_name: The principal's display name (CN parameter) - address: The primary calendar user address - calendar_user_type: CalendarUserType (CUTYPE parameter) - - Returns: - icalendar.vCalAddress object - """ - from icalendar import vCalAddress, vText - - vcal_addr = vCalAddress(address) - if display_name: - vcal_addr.params["cn"] = vText(display_name) - if calendar_user_type: - vcal_addr.params["cutype"] = vText(calendar_user_type) - - return vcal_addr - - -def _extract_calendar_home_set_from_results( - results: list[Any] | None, -) -> str | None: - """ - Extract calendar-home-set URL from PROPFIND results. - - This pure function processes propfind results to find the - calendar-home-set property, handling URL sanitization. - - Args: - results: List of PropfindResult objects from parse_propfind_response - - Returns: - Calendar home set URL, or None if not found - """ - if not results: - return None - - for result in results: - home_set = result.properties.get("{urn:ietf:params:xml:ns:caldav}calendar-home-set") - if home_set: - return _sanitize_calendar_home_set_url(home_set) - - return None - - -def _should_update_client_base_url( - calendar_home_set_url: str | None, - client_hostname: str | None, -) -> bool: - """ - Check if client base URL should be updated for load-balanced systems. - - iCloud and others use load-balanced systems where each principal - resides on one named host. If the calendar home set URL has a different - hostname, we may need to update the client's base URL. - - Args: - calendar_home_set_url: The sanitized calendar home set URL - client_hostname: The current client hostname - - Returns: - True if client URL should be updated - """ - if calendar_home_set_url is None: - return False - - # Check if it's a full URL with a different host - if "://" in calendar_home_set_url: - from urllib.parse import urlparse - - parsed = urlparse(calendar_home_set_url) - if parsed.hostname and parsed.hostname != client_hostname: - return True - - return False diff --git a/caldav/operations/search_ops.py b/caldav/operations/search_ops.py deleted file mode 100644 index 00bdcf99..00000000 --- a/caldav/operations/search_ops.py +++ /dev/null @@ -1,453 +0,0 @@ -""" -Search operations - Sans-I/O business logic for calendar search. - -This module contains pure functions that implement search logic -without performing any network I/O. Both sync (CalDAVSearcher.search) -and async (CalDAVSearcher.async_search) use these same functions. - -Key functions: -- build_search_xml_query(): Build CalDAV REPORT XML query -- filter_search_results(): Client-side filtering of search results -- determine_search_strategy(): Analyze server features and return search plan -- _collation_to_caldav(): Map collation enum to CalDAV identifier -""" - -from copy import deepcopy -from dataclasses import dataclass, field -from typing import TYPE_CHECKING, Any - -from icalendar import Timezone -from icalendar_searcher.collation import Collation - -from caldav.elements import cdav, dav -from caldav.lib import error - -if TYPE_CHECKING: - from icalendar_searcher import Searcher - - from caldav.calendarobjectresource import CalendarObjectResource - from caldav.compatibility_hints import FeatureSet - - -def _collation_to_caldav(collation: Collation, case_sensitive: bool = True) -> str: - """Map icalendar-searcher Collation enum to CalDAV collation identifier. - - CalDAV supports collation identifiers from RFC 4790. The default is "i;ascii-casemap" - and servers must support at least "i;ascii-casemap" and "i;octet". - - :param collation: icalendar-searcher Collation enum value - :param case_sensitive: Whether the collation should be case-sensitive - :return: CalDAV collation identifier string - """ - if collation == Collation.SIMPLE: - # SIMPLE collation maps to CalDAV's basic collations - if case_sensitive: - return "i;octet" - else: - return "i;ascii-casemap" - elif collation == Collation.UNICODE: - # Unicode Collation Algorithm - not all servers support this - # Note: "i;unicode-casemap" is case-insensitive by definition - # For case-sensitive Unicode, we fall back to i;octet (binary) - if case_sensitive: - return "i;octet" - else: - return "i;unicode-casemap" - elif collation == Collation.LOCALE: - # Locale-specific collation - not widely supported in CalDAV - # Fallback to i;ascii-casemap as most servers don't support locale-specific - return "i;ascii-casemap" - else: - # Default to binary/octet for unknown collations - return "i;octet" - - -@dataclass -class SearchStrategy: - """Encapsulates the search strategy decisions based on server capabilities. - - This dataclass holds all the decisions about how to execute a search, - allowing the same logic to be shared between sync and async implementations. - """ - - # Whether to apply client-side post-filtering - post_filter: bool | None = None - - # Hack mode for server compatibility - hacks: str | None = None - - # Whether to split expanded recurrences into separate objects - split_expanded: bool = True - - # Properties to remove from server query (for client-side filtering) - remove_properties: set[str] = field(default_factory=set) - - # Whether category filters should be removed (server doesn't support them) - remove_category_filter: bool = False - - # Whether we need to do multiple searches for pending todos - pending_todo_multi_search: bool = False - - # Whether to retry with individual component types - retry_with_comptypes: bool = False - - -def _determine_post_filter_needed( - searcher: "Searcher", - features: "FeatureSet", - comp_type_support: str | None, - current_hacks: str | None, - current_post_filter: bool | None, -) -> tuple[bool | None, str | None]: - """Determine if post-filtering is needed based on searcher state and server features. - - Returns (post_filter, hacks) tuple with potentially updated values. - - This is a Sans-I/O function - it only examines data and makes decisions. - """ - post_filter = current_post_filter - hacks = current_hacks - - # Handle servers with broken component-type filtering (e.g., Bedework) - if ( - ( - searcher.comp_class - or getattr(searcher, "todo", False) - or getattr(searcher, "event", False) - or getattr(searcher, "journal", False) - ) - and comp_type_support == "broken" - and not hacks - and post_filter is not False - ): - hacks = "no_comp_filter" - post_filter = True - - # Setting default value for post_filter based on various conditions - if post_filter is None and ( - (getattr(searcher, "todo", False) and not searcher.include_completed) - or searcher.expand - or "categories" in searcher._property_filters - or "category" in searcher._property_filters - or not features.is_supported("search.text.case-sensitive") - or not features.is_supported("search.time-range.accurate") - ): - post_filter = True - - return post_filter, hacks - - -def _should_remove_category_filter( - searcher: "Searcher", - features: "FeatureSet", - post_filter: bool | None, -) -> bool: - """Check if category filters should be removed from server query. - - Returns True if categories/category are in property filters but server - doesn't support category search properly. - """ - return ( - not features.is_supported("search.text.category") - and ("categories" in searcher._property_filters or "category" in searcher._property_filters) - and post_filter is not False - ) - - -def _get_explicit_contains_properties( - searcher: "Searcher", - features: "FeatureSet", - post_filter: bool | None, -) -> list[str]: - """Get list of properties with explicit 'contains' operator that server doesn't support. - - These properties should be removed from server query and applied client-side. - """ - if features.is_supported("search.text.substring") or post_filter is False: - return [] - - explicit_operators = getattr(searcher, "_explicit_operators", set()) - return [ - prop - for prop in searcher._property_operator - if prop in explicit_operators and searcher._property_operator[prop] == "contains" - ] - - -def _should_remove_property_filters_for_combined( - searcher: "Searcher", - features: "FeatureSet", -) -> bool: - """Check if property filters should be removed due to combined search issues. - - Some servers don't handle combined time-range + property filters properly. - """ - if features.is_supported("search.combined-is-logical-and"): - return False - return bool((searcher.start or searcher.end) and searcher._property_filters) - - -def _needs_pending_todo_multi_search( - searcher: "Searcher", - features: "FeatureSet", -) -> bool: - """Check if we need multiple searches for pending todos. - - Returns True if searching for pending todos and server supports the - necessary features for multi-search approach. - """ - if not (getattr(searcher, "todo", False) and searcher.include_completed is False): - return False - - return ( - features.is_supported("search.text") - and features.is_supported("search.combined-is-logical-and") - and ( - not features.is_supported("search.recurrences.includes-implicit.todo") - or features.is_supported("search.recurrences.includes-implicit.todo.pending") - ) - ) - - -def _filter_search_results( - objects: list["CalendarObjectResource"], - searcher: "Searcher", - post_filter: bool | None = None, - split_expanded: bool = True, - server_expand: bool = False, -) -> list["CalendarObjectResource"]: - """Apply client-side filtering and handle recurrence expansion/splitting. - - This is a Sans-I/O function - it only processes data without network I/O. - - :param objects: List of Event/Todo/Journal objects to filter - :param searcher: The CalDAVSearcher with filter criteria - :param post_filter: Whether to apply the searcher's filter logic. - - True: Always apply filters (check_component) - - False: Never apply filters, only handle splitting - - None: Use default behavior (depends on searcher.expand and other flags) - :param split_expanded: Whether to split recurrence sets into multiple - separate CalendarObjectResource objects. If False, a recurrence set - will be contained in a single object with multiple subcomponents. - :param server_expand: Indicates that the server was supposed to expand - recurrences. If True and split_expanded is True, splitting will be - performed even without searcher.expand being set. - :return: Filtered and/or split list of CalendarObjectResource objects - """ - if not (post_filter or searcher.expand or (split_expanded and server_expand)): - return objects - - result = [] - for o in objects: - if searcher.expand or post_filter: - try: - filtered = searcher.check_component(o, expand_only=not post_filter) - except ValueError: - ## Server returned data with invalid recurrence structure - ## (e.g. after compatibility hacks stripped DURATION). - ## Include the object unfiltered rather than crashing. - filtered = [ - x for x in o.icalendar_instance.subcomponents if not isinstance(x, Timezone) - ] - if not filtered: - continue - else: - filtered = [ - x for x in o.icalendar_instance.subcomponents if not isinstance(x, Timezone) - ] - - i = o.icalendar_instance - tz_ = [x for x in i.subcomponents if isinstance(x, Timezone)] - i.subcomponents = tz_ - - for comp in filtered: - if isinstance(comp, Timezone): - continue - if split_expanded: - new_obj = o.copy(keep_uid=True) - new_i = new_obj.icalendar_instance - new_i.subcomponents = [] - for tz in tz_: - new_i.add_component(tz) - result.append(new_obj) - else: - new_i = i - new_i.add_component(comp) - - if not split_expanded: - result.append(o) - - return result - - -def _build_search_xml_query( - searcher: "Searcher", - server_expand: bool = False, - props: list[Any] | None = None, - filters: Any = None, - _hacks: str | None = None, -) -> tuple[Any, type | None]: - """Build a CalDAV calendar-query XML request. - - This is a Sans-I/O function - it only builds XML without network I/O. - - :param searcher: CalDAVSearcher instance with search parameters - :param server_expand: Ask server to expand recurrences - :param props: Additional CalDAV properties to request - :param filters: Pre-built filter elements (or None to build from searcher) - :param _hacks: Compatibility hack mode - :return: Tuple of (xml_element, comp_class) - """ - # Import here to avoid circular imports at module level - from caldav.calendarobjectresource import Event, Journal, Todo - - # With dual-mode classes, Async* are now aliases to the sync classes - # Keep the aliases for backward compatibility in type checks - AsyncEvent = Event - AsyncTodo = Todo - AsyncJournal = Journal - - # Build the request - data = cdav.CalendarData() - if server_expand: - if not searcher.start or not searcher.end: - raise error.ReportError("can't expand without a date range") - data += cdav.Expand(searcher.start, searcher.end) - - if props is None: - props_ = [data] - else: - props_ = [data] + list(props) - prop = dav.Prop() + props_ - vcalendar = cdav.CompFilter("VCALENDAR") - - comp_filter = None - comp_class = searcher.comp_class - - if filters: - # Deep copy to avoid mutating the original - filters = deepcopy(filters) - if hasattr(filters, "tag") and filters.tag == cdav.CompFilter.tag: - comp_filter = filters - filters = [] - else: - filters = [] - - # Build status filters for pending todos - vNotCompleted = cdav.TextMatch("COMPLETED", negate=True) - vNotCancelled = cdav.TextMatch("CANCELLED", negate=True) - vNeedsAction = cdav.TextMatch("NEEDS-ACTION") - vStatusNotCompleted = cdav.PropFilter("STATUS") + vNotCompleted - vStatusNotCancelled = cdav.PropFilter("STATUS") + vNotCancelled - vStatusNeedsAction = cdav.PropFilter("STATUS") + vNeedsAction - vStatusNotDefined = cdav.PropFilter("STATUS") + cdav.NotDefined() - vNoCompleteDate = cdav.PropFilter("COMPLETED") + cdav.NotDefined() - - if _hacks == "ignore_completed1": - # Query in line with RFC 4791 section 7.8.9 - filters.extend([vNoCompleteDate, vStatusNotCompleted, vStatusNotCancelled]) - elif _hacks == "ignore_completed2": - # Handle servers that return false on negated TextMatch for undefined fields - filters.extend([vNoCompleteDate, vStatusNotDefined]) - elif _hacks == "ignore_completed3": - # Handle recurring tasks with NEEDS-ACTION status - filters.extend([vStatusNeedsAction]) - - if searcher.start or searcher.end: - filters.append(cdav.TimeRange(searcher.start, searcher.end)) - - if searcher.alarm_start or searcher.alarm_end: - filters.append( - cdav.CompFilter("VALARM") + cdav.TimeRange(searcher.alarm_start, searcher.alarm_end) - ) - - # Map component flags/classes to comp_filter - comp_mappings = [ - ("event", "VEVENT", Event, AsyncEvent), - ("todo", "VTODO", Todo, AsyncTodo), - ("journal", "VJOURNAL", Journal, AsyncJournal), - ] - - for flag, comp_name, sync_class, async_class in comp_mappings: - comp_classes = (sync_class,) if async_class is None else (sync_class, async_class) - flagged = getattr(searcher, flag, False) - - if flagged: - if comp_class is not None and comp_class not in comp_classes: - raise error.ConsistencyError( - f"inconsistent search parameters - comp_class = {comp_class}, want {sync_class}" - ) - comp_class = sync_class - - if comp_filter and comp_filter.attributes.get("name") == comp_name: - comp_class = sync_class - if ( - flag == "todo" - and not getattr(searcher, "todo", False) - and searcher.include_completed is None - ): - searcher.include_completed = True - setattr(searcher, flag, True) - - if comp_class in comp_classes: - if comp_filter: - assert comp_filter.attributes.get("name") == comp_name - else: - comp_filter = cdav.CompFilter(comp_name) - setattr(searcher, flag, True) - - if comp_class and not comp_filter: - raise error.ConsistencyError(f"unsupported comp class {comp_class} for search") - - # Special hack for bedework - no comp_filter, do client-side filtering - # Keep comp_class so the caller knows what type to filter for client-side - # and to prevent _search_with_comptypes from being triggered again - if _hacks == "no_comp_filter": - comp_filter = None - - # Add property filters - for property in searcher._property_operator: - if searcher._property_operator[property] == "undef": - match = cdav.NotDefined() - filters.append(cdav.PropFilter(property.upper()) + match) - else: - value = searcher._property_filters[property] - property_ = property.upper() - if property.lower() == "category": - property_ = "CATEGORIES" - if property.lower() == "categories": - values = value.cats - else: - values = [value] - - for value in values: - if hasattr(value, "to_ical"): - value = value.to_ical() - - # Get collation setting for this property if available - collation_str = "i;octet" # Default to binary - if ( - hasattr(searcher, "_property_collation") - and property in searcher._property_collation - ): - case_sensitive = searcher._property_case_sensitive.get(property, True) - collation_str = _collation_to_caldav( - searcher._property_collation[property], case_sensitive - ) - - match = cdav.TextMatch(value, collation=collation_str) - filters.append(cdav.PropFilter(property_) + match) - - # Assemble the query - if comp_filter and filters: - comp_filter += filters - vcalendar += comp_filter - elif comp_filter: - vcalendar += comp_filter - elif filters: - vcalendar += filters - - filter_elem = cdav.Filter() + vcalendar - root = cdav.CalendarQuery() + [prop, filter_elem] - - return (root, comp_class) diff --git a/caldav/protocol/__init__.py b/caldav/protocol/__init__.py deleted file mode 100644 index ddf68acc..00000000 --- a/caldav/protocol/__init__.py +++ /dev/null @@ -1,44 +0,0 @@ -""" -Sans-I/O CalDAV protocol implementation. - -This module provides protocol-level operations without any I/O. -It builds requests and parses responses as pure data transformations. - -The protocol layer is organized into: -- types: Core data structures (DAVRequest, DAVResponse, result types) -- xml_builders: Internal functions to build XML request bodies -- xml_parsers: Internal functions to parse XML response bodies - -Both DAVClient (sync) and AsyncDAVClient (async) use these shared -functions for XML building and parsing, ensuring consistent behavior. - -Note: The xml_builders and xml_parsers functions are internal implementation -details and should not be used directly. Use the client methods instead. -""" - -from .types import ( - CalendarQueryResult, - DAVMethod, - DAVRequest, - DAVResponse, - MultiGetResult, - MultistatusResponse, - PrincipalInfo, - PropfindResult, - SyncCollectionResult, -) - -__all__ = [ - # Enums - "DAVMethod", - # Request/Response - "DAVRequest", - "DAVResponse", - # Result types - "CalendarQueryResult", - "MultiGetResult", - "MultistatusResponse", - "PrincipalInfo", - "PropfindResult", - "SyncCollectionResult", -] diff --git a/caldav/protocol/types.py b/caldav/protocol/types.py deleted file mode 100644 index 13697e1a..00000000 --- a/caldav/protocol/types.py +++ /dev/null @@ -1,221 +0,0 @@ -""" -Core protocol types for Sans-I/O CalDAV implementation. - -These dataclasses represent HTTP requests and responses at the protocol level, -independent of any I/O implementation. -""" - -from dataclasses import dataclass, field -from enum import Enum -from typing import Any - - -class DAVMethod(Enum): - """WebDAV/CalDAV HTTP methods.""" - - GET = "GET" - PUT = "PUT" - DELETE = "DELETE" - PROPFIND = "PROPFIND" - PROPPATCH = "PROPPATCH" - REPORT = "REPORT" - MKCALENDAR = "MKCALENDAR" - MKCOL = "MKCOL" - OPTIONS = "OPTIONS" - HEAD = "HEAD" - MOVE = "MOVE" - COPY = "COPY" - POST = "POST" - - -@dataclass(frozen=True) -class DAVRequest: - """ - Represents an HTTP request to be made. - - This is a pure data structure with no I/O. It describes what request - should be made, but does not make it. - - Attributes: - method: HTTP method (GET, PUT, PROPFIND, etc.) - url: Full URL for the request - headers: HTTP headers as dict - body: Request body as bytes (optional) - """ - - method: DAVMethod - url: str - headers: dict[str, str] = field(default_factory=dict) - body: bytes | None = None - - def with_header(self, name: str, value: str) -> "DAVRequest": - """Return new request with additional header.""" - new_headers = {**self.headers, name: value} - return DAVRequest( - method=self.method, - url=self.url, - headers=new_headers, - body=self.body, - ) - - def with_body(self, body: bytes) -> "DAVRequest": - """Return new request with body.""" - return DAVRequest( - method=self.method, - url=self.url, - headers=self.headers, - body=body, - ) - - -@dataclass(frozen=True) -class DAVResponse: - """ - Represents an HTTP response received. - - This is a pure data structure with no I/O. It contains the response - data but does not fetch it. - - Attributes: - status: HTTP status code - headers: HTTP headers as dict - body: Response body as bytes - """ - - status: int - headers: dict[str, str] - body: bytes - - @property - def ok(self) -> bool: - """True if status indicates success (2xx).""" - return 200 <= self.status < 300 - - @property - def is_multistatus(self) -> bool: - """True if this is a 207 Multi-Status response.""" - return self.status == 207 - - @property - def reason(self) -> str: - """Return a reason phrase for the status code.""" - reasons = { - 200: "OK", - 201: "Created", - 204: "No Content", - 207: "Multi-Status", - 301: "Moved Permanently", - 302: "Found", - 304: "Not Modified", - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 405: "Method Not Allowed", - 409: "Conflict", - 412: "Precondition Failed", - 415: "Unsupported Media Type", - 500: "Internal Server Error", - 501: "Not Implemented", - 502: "Bad Gateway", - 503: "Service Unavailable", - } - return reasons.get(self.status, "Unknown") - - -@dataclass -class PropfindResult: - """ - Parsed result of a PROPFIND request for a single resource. - - Attributes: - href: URL/path of the resource - properties: Dict of property name -> value - status: HTTP status for this resource (default 200) - """ - - href: str - properties: dict[str, Any] = field(default_factory=dict) - status: int = 200 - - -@dataclass -class CalendarQueryResult: - """ - Parsed result of a calendar-query REPORT for a single object. - - Attributes: - href: URL/path of the calendar object - etag: ETag of the object (for conditional updates) - calendar_data: iCalendar data as string - status: HTTP status for this resource (default 200) - """ - - href: str - etag: str | None = None - calendar_data: str | None = None - status: int = 200 - - -@dataclass -class MultiGetResult: - """ - Parsed result of a calendar-multiget REPORT for a single object. - - Same structure as CalendarQueryResult but semantically different operation. - """ - - href: str - etag: str | None = None - calendar_data: str | None = None - status: int = 200 - - -@dataclass -class SyncCollectionResult: - """ - Parsed result of a sync-collection REPORT. - - Attributes: - changed: List of changed/new resources - deleted: List of deleted resource hrefs - sync_token: New sync token for next sync - """ - - changed: list[CalendarQueryResult] = field(default_factory=list) - deleted: list[str] = field(default_factory=list) - sync_token: str | None = None - - -@dataclass -class MultistatusResponse: - """ - Parsed multi-status response containing multiple results. - - This is the raw parsed form of a 207 Multi-Status response. - - Attributes: - responses: List of individual response results - sync_token: Sync token if present (for sync-collection) - """ - - responses: list[PropfindResult] = field(default_factory=list) - sync_token: str | None = None - - -@dataclass -class PrincipalInfo: - """ - Information about a CalDAV principal. - - Attributes: - url: Principal URL - calendar_home_set: URL of calendar home - displayname: Display name of principal - calendar_user_address_set: Set of calendar user addresses (email-like) - """ - - url: str - calendar_home_set: str | None = None - displayname: str | None = None - calendar_user_address_set: list[str] = field(default_factory=list) diff --git a/caldav/protocol/xml_builders.py b/caldav/protocol/xml_builders.py deleted file mode 100644 index 6009ca83..00000000 --- a/caldav/protocol/xml_builders.py +++ /dev/null @@ -1,346 +0,0 @@ -""" -Pure functions for building CalDAV XML request bodies. - -All functions in this module are pure - they take data in and return XML out, -with no side effects or I/O. -""" - -from datetime import datetime -from typing import Any - -from lxml import etree - -from caldav.elements import cdav, dav -from caldav.elements.base import BaseElement - - -def _build_propfind_body( - props: list[str] | None = None, - allprop: bool = False, -) -> bytes: - """ - Build PROPFIND request body XML. - - Args: - props: List of property names to retrieve. If None and allprop=False, - returns minimal propfind. - allprop: If True, request all properties. - - Returns: - UTF-8 encoded XML bytes - """ - if allprop: - propfind = dav.Propfind() + dav.Allprop() - elif props: - prop_elements = [] - for prop_name in props: - prop_element = _prop_name_to_element(prop_name) - if prop_element is not None: - prop_elements.append(prop_element) - propfind = dav.Propfind() + (dav.Prop() + prop_elements) - else: - propfind = dav.Propfind() + dav.Prop() - - return etree.tostring(propfind.xmlelement(), encoding="utf-8", xml_declaration=True) - - -def _build_proppatch_body( - set_props: dict[str, Any] | None = None, -) -> bytes: - """ - Build PROPPATCH request body for setting properties. - - Args: - set_props: Properties to set (name -> value) - - Returns: - UTF-8 encoded XML bytes - """ - propertyupdate = dav.PropertyUpdate() - - if set_props: - set_elements = [] - for name, value in set_props.items(): - prop_element = _prop_name_to_element(name, value) - if prop_element is not None: - set_elements.append(prop_element) - if set_elements: - set_element = dav.Set() + (dav.Prop() + set_elements) - propertyupdate += set_element - - return etree.tostring(propertyupdate.xmlelement(), encoding="utf-8", xml_declaration=True) - - -def _build_calendar_query_body( - start: datetime | None = None, - end: datetime | None = None, - expand: bool = False, - comp_filter: str | None = None, - event: bool = False, - todo: bool = False, - journal: bool = False, - props: list[BaseElement] | None = None, - filters: list[BaseElement] | None = None, -) -> tuple[bytes, str | None]: - """ - Build calendar-query REPORT request body. - - This is the core CalDAV search operation for retrieving calendar objects - matching specified criteria. - - Args: - start: Start of time range filter - end: End of time range filter - expand: Whether to expand recurring events - comp_filter: Component type filter name (VEVENT, VTODO, VJOURNAL) - event: Include VEVENT components (sets comp_filter if not specified) - todo: Include VTODO components (sets comp_filter if not specified) - journal: Include VJOURNAL components (sets comp_filter if not specified) - props: Additional CalDAV properties to include - filters: Additional filters to apply - - Returns: - Tuple of (UTF-8 encoded XML bytes, component type name or None) - """ - # Build calendar-data element with optional expansion - data = cdav.CalendarData() - if expand: - if not start or not end: - from caldav.lib import error - - raise error.ReportError("can't expand without a date range") - data += cdav.Expand(start, end) - - # Build props - props_list: list[BaseElement] = [data] - if props: - props_list.extend(props) - prop = dav.Prop() + props_list - - # Build VCALENDAR filter - vcalendar = cdav.CompFilter("VCALENDAR") - - # Determine component filter from flags - comp_type = comp_filter - if not comp_type: - if event: - comp_type = "VEVENT" - elif todo: - comp_type = "VTODO" - elif journal: - comp_type = "VJOURNAL" - - # Build filter list - filter_list: list[BaseElement] = [] - if filters: - filter_list.extend(filters) - - # Add time range filter if specified - if start or end: - filter_list.append(cdav.TimeRange(start, end)) - - # Build component filter - if comp_type: - comp_filter_elem = cdav.CompFilter(comp_type) - if filter_list: - comp_filter_elem += filter_list - vcalendar += comp_filter_elem - elif filter_list: - vcalendar += filter_list - - # Build final query - filter_elem = cdav.Filter() + vcalendar - root = cdav.CalendarQuery() + [prop, filter_elem] - - return ( - etree.tostring(root.xmlelement(), encoding="utf-8", xml_declaration=True), - comp_type, - ) - - -def _build_calendar_multiget_body( - hrefs: list[str], - include_data: bool = True, -) -> bytes: - """ - Build calendar-multiget REPORT request body. - - Used to retrieve multiple calendar objects by their URLs in a single request. - - Args: - hrefs: List of calendar object URLs to retrieve - include_data: Include calendar-data in response - - Returns: - UTF-8 encoded XML bytes - """ - elements: list[BaseElement] = [] - - if include_data: - prop = dav.Prop() + cdav.CalendarData() - elements.append(prop) - - for href in hrefs: - elements.append(dav.Href(href)) - - multiget = cdav.CalendarMultiGet() + elements - - return etree.tostring(multiget.xmlelement(), encoding="utf-8", xml_declaration=True) - - -def _build_sync_collection_body( - sync_token: str | None = None, - props: list[str] | None = None, - sync_level: str = "1", -) -> bytes: - """ - Build sync-collection REPORT request body. - - Used for efficient synchronization - only returns changed items since - the given sync token. - - Args: - sync_token: Previous sync token (empty string for initial sync) - props: Property names to include in response - sync_level: Sync level (usually "1") - - Returns: - UTF-8 encoded XML bytes - """ - elements: list[BaseElement] = [] - - # Sync token (empty for initial sync) - token_elem = dav.SyncToken(sync_token or "") - elements.append(token_elem) - - # Sync level - level_elem = dav.SyncLevel(sync_level) - elements.append(level_elem) - - # Properties to return - if props: - prop_elements = [] - for prop_name in props: - prop_element = _prop_name_to_element(prop_name) - if prop_element is not None: - prop_elements.append(prop_element) - if prop_elements: - elements.append(dav.Prop() + prop_elements) - else: - # Default: return etag and calendar-data - elements.append(dav.Prop() + [dav.GetEtag(), cdav.CalendarData()]) - - sync_collection = dav.SyncCollection() + elements - - return etree.tostring(sync_collection.xmlelement(), encoding="utf-8", xml_declaration=True) - - -def _build_mkcalendar_body( - displayname: str | None = None, - description: str | None = None, - timezone: str | None = None, - supported_components: list[str] | None = None, -) -> bytes: - """ - Build MKCALENDAR request body. - - Args: - displayname: Calendar display name - description: Calendar description - timezone: VTIMEZONE component data - supported_components: List of supported component types (VEVENT, VTODO, etc.) - - Returns: - UTF-8 encoded XML bytes - """ - prop = dav.Prop() - - if displayname: - prop += dav.DisplayName(displayname) - - if description: - prop += cdav.CalendarDescription(description) - - if timezone: - prop += cdav.CalendarTimeZone(timezone) - - if supported_components: - sccs = cdav.SupportedCalendarComponentSet() - for comp in supported_components: - sccs += cdav.Comp(comp) - prop += sccs - - # Add resource type - prop += dav.ResourceType() + [dav.Collection(), cdav.Calendar()] - - set_elem = dav.Set() + prop - mkcalendar = cdav.Mkcalendar() + set_elem - - return etree.tostring(mkcalendar.xmlelement(), encoding="utf-8", xml_declaration=True) - - -# Property name to element mapping - - -def _prop_name_to_element(name: str, value: Any | None = None) -> BaseElement | None: - """ - Convert property name string to element object. - - Args: - name: Property name (case-insensitive) - value: Optional value for valued elements - - Returns: - BaseElement instance or None if unknown property - """ - # DAV properties (only those that exist in dav.py) - dav_props: dict[str, Any] = { - "displayname": dav.DisplayName, - "resourcetype": dav.ResourceType, - "getetag": dav.GetEtag, - "current-user-principal": dav.CurrentUserPrincipal, - "owner": dav.Owner, - "sync-token": dav.SyncToken, - "supported-report-set": dav.SupportedReportSet, - } - - # CalDAV properties - caldav_props: dict[str, Any] = { - "calendar-data": cdav.CalendarData, - "calendar-home-set": cdav.CalendarHomeSet, - "calendar-user-address-set": cdav.CalendarUserAddressSet, - "calendar-user-type": cdav.CalendarUserType, - "calendar-description": cdav.CalendarDescription, - "calendar-timezone": cdav.CalendarTimeZone, - "supported-calendar-component-set": cdav.SupportedCalendarComponentSet, - "schedule-inbox-url": cdav.ScheduleInboxURL, - "schedule-outbox-url": cdav.ScheduleOutboxURL, - } - - # Strip Clark notation namespace prefix if present (e.g., "{DAV:}displayname" -> "displayname") - if name.startswith("{") and "}" in name: - name = name.split("}", 1)[1] - - name_lower = name.lower().replace("_", "-") - - # Check DAV properties - if name_lower in dav_props: - cls = dav_props[name_lower] - if value is not None: - try: - return cls(value) - except TypeError: - return cls() - return cls() - - # Check CalDAV properties - if name_lower in caldav_props: - cls = caldav_props[name_lower] - if value is not None: - try: - return cls(value) - except TypeError: - return cls() - return cls() - - return None diff --git a/caldav/protocol/xml_parsers.py b/caldav/protocol/xml_parsers.py deleted file mode 100644 index f6e267a7..00000000 --- a/caldav/protocol/xml_parsers.py +++ /dev/null @@ -1,468 +0,0 @@ -""" -Pure functions for parsing CalDAV XML responses. - -All functions in this module are pure - they take XML bytes in and return -structured data out, with no side effects or I/O. -""" - -import logging -from typing import Any -from urllib.parse import unquote - -from lxml import etree -from lxml.etree import _Element - -from caldav.elements import cdav, dav -from caldav.lib import error -from caldav.lib.url import URL - -from .types import CalendarQueryResult, MultistatusResponse, PropfindResult, SyncCollectionResult - -log = logging.getLogger(__name__) - - -def _parse_multistatus( - body: bytes, - huge_tree: bool = False, -) -> MultistatusResponse: - """ - Parse a 207 Multi-Status response body. - - Args: - body: Raw XML response bytes - huge_tree: Allow parsing very large XML documents - - Returns: - Structured MultistatusResponse with parsed results - - Raises: - XMLSyntaxError: If body is not valid XML - ResponseError: If response indicates an error - """ - parser = etree.XMLParser(huge_tree=huge_tree) - tree = etree.fromstring(body, parser) - - responses: list[PropfindResult] = [] - sync_token: str | None = None - - # Strip to multistatus content - response_elements = _strip_to_multistatus(tree) - - for elem in response_elements: - if elem.tag == dav.SyncToken.tag: - sync_token = elem.text - continue - - if elem.tag != dav.Response.tag: - continue - - href, propstats, status = _parse_response_element(elem) - properties = _extract_properties(propstats) - status_code = _status_to_code(status) if status else 200 - - responses.append( - PropfindResult( - href=href, - properties=properties, - status=status_code, - ) - ) - - return MultistatusResponse(responses=responses, sync_token=sync_token) - - -def _parse_propfind_response( - body: bytes, - status_code: int = 207, - huge_tree: bool = False, -) -> list[PropfindResult]: - """ - Parse a PROPFIND response. - - Args: - body: Raw XML response bytes - status_code: HTTP status code of the response - huge_tree: Allow parsing very large XML documents - - Returns: - List of PropfindResult with properties for each resource - """ - if status_code == 404: - return [] - - if status_code not in (200, 207): - raise error.ResponseError(f"PROPFIND failed with status {status_code}") - - if not body: - return [] - - result = _parse_multistatus(body, huge_tree=huge_tree) - return result.responses - - -def _parse_calendar_query_response( - body: bytes, - status_code: int = 207, - huge_tree: bool = False, -) -> list[CalendarQueryResult]: - """ - Parse a calendar-query REPORT response. - - Args: - body: Raw XML response bytes - status_code: HTTP status code of the response - huge_tree: Allow parsing very large XML documents - - Returns: - List of CalendarQueryResult with calendar data - """ - if status_code not in (200, 207): - raise error.ResponseError(f"REPORT failed with status {status_code}") - - if not body: - return [] - - parser = etree.XMLParser(huge_tree=huge_tree) - tree = etree.fromstring(body, parser) - - results: list[CalendarQueryResult] = [] - response_elements = _strip_to_multistatus(tree) - - for elem in response_elements: - if elem.tag != dav.Response.tag: - continue - - href, propstats, status = _parse_response_element(elem) - status_code_elem = _status_to_code(status) if status else 200 - - calendar_data: str | None = None - etag: str | None = None - - # Extract properties from propstats - for propstat in propstats: - prop = propstat.find(dav.Prop.tag) - if prop is None: - continue - - for child in prop: - if child.tag == cdav.CalendarData.tag: - calendar_data = child.text - elif child.tag == dav.GetEtag.tag: - etag = child.text - - results.append( - CalendarQueryResult( - href=href, - etag=etag, - calendar_data=calendar_data, - status=status_code_elem, - ) - ) - - return results - - -## TODO: the purpose of the xml_parsers was to consolidate common code to be used by sync and async code paths, to avoid duplicated code. Why cannot this code snippet be used for async? The code here is very similar to _parse_calendar_query_response - we should consolidate common code -def _parse_sync_collection_response( - body: bytes, - status_code: int = 207, - huge_tree: bool = False, -) -> SyncCollectionResult: - """ - Parse a sync-collection REPORT response. - - Args: - body: Raw XML response bytes - status_code: HTTP status code of the response - huge_tree: Allow parsing very large XML documents - - Returns: - SyncCollectionResult with changed items, deleted hrefs, and new sync token - """ - if status_code not in (200, 207): - raise error.ResponseError(f"sync-collection failed with status {status_code}") - - if not body: - return SyncCollectionResult() - - parser = etree.XMLParser(huge_tree=huge_tree) - tree = etree.fromstring(body, parser) - - changed: list[CalendarQueryResult] = [] - deleted: list[str] = [] - sync_token: str | None = None - - response_elements = _strip_to_multistatus(tree) - - for elem in response_elements: - if elem.tag == dav.SyncToken.tag: - sync_token = elem.text - continue - - if elem.tag != dav.Response.tag: - continue - - href, propstats, status = _parse_response_element(elem) - status_code_elem = _status_to_code(status) if status else 200 - - # 404 means deleted - if status_code_elem == 404: - deleted.append(href) - continue - - calendar_data: str | None = None - etag: str | None = None - - for propstat in propstats: - prop = propstat.find(dav.Prop.tag) - if prop is None: - continue - - for child in prop: - if child.tag == cdav.CalendarData.tag: - calendar_data = child.text - elif child.tag == dav.GetEtag.tag: - etag = child.text - - changed.append( - CalendarQueryResult( - href=href, - etag=etag, - calendar_data=calendar_data, - status=status_code_elem, - ) - ) - - return SyncCollectionResult( - changed=changed, - deleted=deleted, - sync_token=sync_token, - ) - - -def _parse_calendar_multiget_response( - body: bytes, - status_code: int = 207, - huge_tree: bool = False, -) -> list[CalendarQueryResult]: - """ - Parse a calendar-multiget REPORT response. - - This is the same format as calendar-query, so we delegate to that parser. - - Args: - body: Raw XML response bytes - status_code: HTTP status code of the response - huge_tree: Allow parsing very large XML documents - - Returns: - List of CalendarQueryResult with calendar data - """ - return _parse_calendar_query_response(body, status_code, huge_tree) - - -# Helper functions - - -def _normalize_href(text: str) -> str: - """ - Normalize an href string from a DAV response element. - - Handles the Confluence double-encoding bug (%2540 → %40) and converts - absolute URLs to path-only strings so callers always work with paths. - """ - # Fix for https://github.com/python-caldav/caldav/issues/471 - # Confluence server quotes the user email twice. - if "%2540" in text: - text = text.replace("%2540", "%40") - href = unquote(text) - # Ref https://github.com/python-caldav/caldav/issues/435 - # Some servers return absolute URLs; convert to path. - if ":" in href: - href = unquote(URL(href).path) - return href - - -def _strip_to_multistatus(tree: _Element) -> _Element | list[_Element]: - """ - Strip outer elements to get to the multistatus content. - - The general format is: - - ... - ... - - - But sometimes multistatus and/or xml element is missing. - Returns the element(s) containing responses. - """ - if tree.tag == "xml" and len(tree) > 0 and tree[0].tag == dav.MultiStatus.tag: - return tree[0] - if tree.tag == dav.MultiStatus.tag: - return tree - return [tree] - - -def _parse_response_element( - response: _Element, -) -> tuple[str, list[_Element], str | None]: - """ - Parse a single DAV:response element. - - Returns: - Tuple of (href, propstat elements list, status string) - """ - status: str | None = None - href: str | None = None - propstats: list[_Element] = [] - - for elem in response: - if elem.tag == dav.Status.tag: - status = elem.text - _validate_status(status) - elif elem.tag == dav.Href.tag: - href = _normalize_href(elem.text or "") - elif elem.tag == dav.PropStat.tag: - propstats.append(elem) - - return (href or "", propstats, status) - - -def _extract_properties(propstats: list[_Element]) -> dict[str, Any]: - """ - Extract properties from propstat elements into a dict. - - Args: - propstats: List of propstat elements - - Returns: - Dict mapping property tag to value (text or element) - """ - properties: dict[str, Any] = {} - - for propstat in propstats: - # Check status - skip 404 properties - status_elem = propstat.find(dav.Status.tag) - if status_elem is not None and status_elem.text: - if " 404 " in status_elem.text: - continue - - # Find prop element - prop = propstat.find(dav.Prop.tag) - if prop is None: - continue - - # Extract each property - for child in prop: - tag = child.tag - # Get simple text value or store element for complex values - if len(child) == 0: - properties[tag] = child.text - else: - # For complex elements, store the element itself - # or extract nested text values - properties[tag] = _element_to_value(child) - - return properties - - -def _element_to_value(elem: _Element) -> Any: - """ - Convert an XML element to a Python value. - - For simple elements, returns text content. - For complex elements with children, returns dict or list. - Handles special CalDAV elements like supported-calendar-component-set. - """ - if len(elem) == 0: - return elem.text - - # Special handling for known complex properties - tag = elem.tag - - # supported-calendar-component-set: extract comp names - if tag == cdav.SupportedCalendarComponentSet.tag: - return [child.get("name") for child in elem if child.get("name")] - - # calendar-user-address-set: extract href texts - if tag == cdav.CalendarUserAddressSet.tag: - return [child.text for child in elem if child.tag == dav.Href.tag and child.text] - - # calendar-home-set: extract href text (usually single) - if tag == cdav.CalendarHomeSet.tag: - hrefs = [child.text for child in elem if child.tag == dav.Href.tag and child.text] - return hrefs[0] if len(hrefs) == 1 else hrefs - - # resourcetype: extract child tag names (e.g., collection, calendar) - if tag == dav.ResourceType.tag: - return [child.tag for child in elem] - - # current-user-principal: extract href - if tag == dav.CurrentUserPrincipal.tag: - for child in elem: - if child.tag == dav.Href.tag and child.text: - return child.text - return None - - # Generic handling for elements with children - children_texts = [] - for child in elem: - if child.text: - children_texts.append(child.text) - elif child.get("name"): - # Elements with name attribute (like comp) - children_texts.append(child.get("name")) - elif len(child) == 0: - # Empty element - use tag name - children_texts.append(child.tag) - - if len(children_texts) == 1: - return children_texts[0] - elif children_texts: - return children_texts - - # Fallback: return the element for further processing - return elem - - -def _validate_status(status: str | None) -> None: - """ - Validate a status string like "HTTP/1.1 404 Not Found". - - 200, 201, 207, and 404 are considered acceptable statuses. - - Args: - status: Status string from response - - Raises: - ResponseError: If status indicates an error - """ - if status is None: - return - - acceptable = (" 200 ", " 201 ", " 207 ", " 404 ") - if not any(code in status for code in acceptable): - raise error.ResponseError(status) - - -def _status_to_code(status: str | None) -> int: - """ - Extract status code from status string like "HTTP/1.1 200 OK". - - Args: - status: Status string - - Returns: - Integer status code (defaults to 200 if parsing fails) - """ - if not status: - return 200 - - parts = status.split() - if len(parts) >= 2: - try: - return int(parts[1]) - except ValueError: - pass - - return 200 diff --git a/caldav/response.py b/caldav/response.py index 56923f5f..c7279dff 100644 --- a/caldav/response.py +++ b/caldav/response.py @@ -1,14 +1,13 @@ """ -Base class for DAV response parsing. - -This module contains the shared logic between DAVResponse (sync) and -AsyncDAVResponse (async) to eliminate code duplication. +DAV response parsing: base class, result types and XML parse functions. """ import logging import warnings from collections.abc import Iterable +from dataclasses import dataclass, field from typing import TYPE_CHECKING, Any, cast +from urllib.parse import unquote from lxml import etree from lxml.etree import _Element @@ -18,10 +17,9 @@ from caldav.elements.base import BaseElement from caldav.lib import error from caldav.lib.python_utilities import to_normal_str +from caldav.lib.url import URL if TYPE_CHECKING: - # Protocol for HTTP response objects (works with httpx, niquests, requests) - # Using Any as the type hint to avoid strict protocol matching Response = Any log = logging.getLogger(__name__) @@ -60,16 +58,8 @@ class SyncCollectionResult: sync_token: str | None = None -@dataclass -class MultistatusResponse: - """Parsed multi-status (207) response containing multiple PropfindResults.""" - - responses: list[PropfindResult] = field(default_factory=list) - sync_token: str | None = None - - # --------------------------------------------------------------------------- -# XML parse helpers (previously in protocol/xml_parsers.py) +# XML parse helpers # --------------------------------------------------------------------------- @@ -126,30 +116,6 @@ def _strip_to_multistatus(tree: _Element) -> "_Element | list[_Element]": return [tree] -## TODO: _parse_response_element is a simplified version of DAVResponse._parse_response -## (which adds assertions and handles Stalwart/purelymail quirks). The module-level parse -## functions (_parse_multistatus etc.) use this simpler version because they are pure -## functions with no access to a response instance. If the parse pipeline were refactored -## to work through the tree already stored on self (avoiding the re-parse in _raw_bytes), -## both of these could be unified into a single method. -def _parse_response_element( - response: _Element, -) -> "tuple[str, list[_Element], str | None]": - """Parse a single DAV:response element into (href, propstats, status).""" - status: str | None = None - href: str | None = None - propstats: list[_Element] = [] - for elem in response: - if elem.tag == dav.Status.tag: - status = elem.text - _validate_status(status) - elif elem.tag == dav.Href.tag: - href = _normalize_href(elem.text or "") - elif elem.tag == dav.PropStat.tag: - propstats.append(elem) - return (href or "", propstats, status) - - def _extract_properties(propstats: "list[_Element]") -> "dict[str, Any]": """Extract properties from propstat elements into a flat dict.""" properties: dict[str, Any] = {} @@ -211,135 +177,6 @@ def _element_to_value(elem: _Element) -> Any: return elem -def _parse_multistatus(body: bytes, huge_tree: bool = False) -> MultistatusResponse: - """Parse a 207 Multi-Status response body into a MultistatusResponse.""" - parser = etree.XMLParser(huge_tree=huge_tree) - tree = etree.fromstring(body, parser) - - responses: list[PropfindResult] = [] - sync_token: str | None = None - - for elem in _strip_to_multistatus(tree): - if elem.tag == dav.SyncToken.tag: - sync_token = elem.text - continue - if elem.tag != dav.Response.tag: - continue - href, propstats, status = _parse_response_element(elem) - properties = _extract_properties(propstats) - responses.append( - PropfindResult( - href=href, - properties=properties, - status=_status_to_code(status) if status else 200, - ) - ) - - return MultistatusResponse(responses=responses, sync_token=sync_token) - - -def _parse_propfind_response( - body: bytes, status_code: int = 207, huge_tree: bool = False -) -> list[PropfindResult]: - """Parse a PROPFIND response body into a list of PropfindResult objects.""" - if status_code == 404: - return [] - if status_code not in (200, 207): - raise error.ResponseError(f"PROPFIND failed with status {status_code}") - if not body: - return [] - return _parse_multistatus(body, huge_tree=huge_tree).responses - - -def _parse_calendar_query_response( - body: bytes, status_code: int = 207, huge_tree: bool = False -) -> list[CalendarQueryResult]: - """Parse a calendar-query or calendar-multiget REPORT response.""" - if status_code not in (200, 207): - raise error.ResponseError(f"REPORT failed with status {status_code}") - if not body: - return [] - - parser = etree.XMLParser(huge_tree=huge_tree) - tree = etree.fromstring(body, parser) - results: list[CalendarQueryResult] = [] - - for elem in _strip_to_multistatus(tree): - if elem.tag != dav.Response.tag: - continue - href, propstats, status = _parse_response_element(elem) - calendar_data: str | None = None - etag: str | None = None - for propstat in propstats: - prop = propstat.find(dav.Prop.tag) - if prop is None: - continue - for child in prop: - if child.tag == cdav.CalendarData.tag: - calendar_data = child.text - elif child.tag == dav.GetEtag.tag: - etag = child.text - results.append( - CalendarQueryResult( - href=href, - etag=etag, - calendar_data=calendar_data, - status=_status_to_code(status) if status else 200, - ) - ) - - return results - - -def _parse_sync_collection_response( - body: bytes, status_code: int = 207, huge_tree: bool = False -) -> SyncCollectionResult: - """Parse a sync-collection REPORT response.""" - if status_code not in (200, 207): - raise error.ResponseError(f"sync-collection failed with status {status_code}") - if not body: - return SyncCollectionResult() - - parser = etree.XMLParser(huge_tree=huge_tree) - tree = etree.fromstring(body, parser) - changed: list[CalendarQueryResult] = [] - deleted: list[str] = [] - sync_token: str | None = None - - for elem in _strip_to_multistatus(tree): - if elem.tag == dav.SyncToken.tag: - sync_token = elem.text - continue - if elem.tag != dav.Response.tag: - continue - href, propstats, status = _parse_response_element(elem) - status_code_elem = _status_to_code(status) if status else 200 - if status_code_elem == 404: - deleted.append(href) - continue - calendar_data = None - etag = None - for propstat in propstats: - prop = propstat.find(dav.Prop.tag) - if prop is None: - continue - for child in prop: - if child.tag == cdav.CalendarData.tag: - calendar_data = child.text - elif child.tag == dav.GetEtag.tag: - etag = child.text - changed.append( - CalendarQueryResult( - href=href, - etag=etag, - calendar_data=calendar_data, - status=status_code_elem, - ) - ) - - return SyncCollectionResult(changed=changed, deleted=deleted, sync_token=sync_token) - - class DAVResponse: """ Base class containing shared response parsing logic. @@ -361,6 +198,31 @@ class DAVResponse: def __init__(self, response: "Response", davclient: Any = None) -> None: self._init_from_response(response, davclient) + @classmethod + def from_bytes( + cls, body: bytes, status_code: int = 207, huge_tree: bool = False + ) -> "DAVResponse": + """Construct a DAVResponse from raw bytes — intended for tests.""" + + class _FakeResponse: + headers: dict = {} + status_code = 200 + content = b"" + text = "" + reason = "OK" + + fake = _FakeResponse() + fake.status_code = status_code + fake.content = body + fake.text = body.decode("utf-8", errors="replace") if body else "" + obj = cls.__new__(cls) + obj.huge_tree = huge_tree + obj.davclient = None + obj.results = None + obj._sync_token = None + obj._init_from_response(fake) + return obj + def _init_from_response(self, response: "Response", davclient: Any = None) -> None: """ Initialize response from an HTTP response object. @@ -481,7 +343,7 @@ def _strip_to_multistatus(self) -> _Element | list[_Element]: (The equivalent of this method could probably be found with a simple XPath query, but I'm not much into XPath) """ - return _proto_strip(self.tree) + return _strip_to_multistatus(self.tree) def validate_status(self, status: str) -> None: """ @@ -494,6 +356,102 @@ def validate_status(self, status: str) -> None: """ _validate_status(status) + def _extract_calendar_query_props( + self, propstats: "list[_Element]" + ) -> "tuple[str | None, str | None]": + """Extract (etag, calendar_data) from a list of propstat elements.""" + etag: str | None = None + calendar_data: str | None = None + for propstat in propstats: + prop = propstat.find(dav.Prop.tag) + if prop is None: + continue + for child in prop: + if child.tag == cdav.CalendarData.tag: + calendar_data = child.text + elif child.tag == dav.GetEtag.tag: + etag = child.text + return etag, calendar_data + + def parse_propfind(self) -> "list[PropfindResult]": + """Parse the response body as a PROPFIND multi-status reply.""" + if self.status == 404: + return [] + if self.status not in (200, 207): + raise error.ResponseError(f"PROPFIND failed with status {self.status}") + if self.tree is None: + return [] + results: list[PropfindResult] = [] + for elem in self._strip_to_multistatus(): + if elem.tag == dav.SyncToken.tag: + self._sync_token = elem.text + continue + if elem.tag != dav.Response.tag: + continue + href, propstats, status = self._parse_response(elem) + results.append( + PropfindResult( + href=href, + properties=_extract_properties(propstats), + status=_status_to_code(status) if status else 200, + ) + ) + return results + + def parse_calendar_query(self) -> "list[CalendarQueryResult]": + """Parse the response body as a calendar-query or calendar-multiget REPORT reply.""" + if self.status not in (200, 207): + raise error.ResponseError(f"REPORT failed with status {self.status}") + if self.tree is None: + return [] + results: list[CalendarQueryResult] = [] + for elem in self._strip_to_multistatus(): + if elem.tag != dav.Response.tag: + continue + href, propstats, status = self._parse_response(elem) + etag, calendar_data = self._extract_calendar_query_props(propstats) + results.append( + CalendarQueryResult( + href=href, + etag=etag, + calendar_data=calendar_data, + status=_status_to_code(status) if status else 200, + ) + ) + return results + + def parse_sync_collection(self) -> "SyncCollectionResult": + """Parse the response body as a sync-collection REPORT reply.""" + if self.status not in (200, 207): + raise error.ResponseError(f"sync-collection failed with status {self.status}") + if self.tree is None: + return SyncCollectionResult() + changed: list[CalendarQueryResult] = [] + deleted: list[str] = [] + sync_token: str | None = None + for elem in self._strip_to_multistatus(): + if elem.tag == dav.SyncToken.tag: + sync_token = elem.text + self._sync_token = elem.text + continue + if elem.tag != dav.Response.tag: + continue + href, propstats, status_str = self._parse_response(elem) + status_code = _status_to_code(status_str) if status_str else 200 + if status_code == 404: + deleted.append(href) + continue + etag, calendar_data = self._extract_calendar_query_props(propstats) + changed.append( + CalendarQueryResult( + href=href, + etag=etag, + calendar_data=calendar_data, + status=status_code, + ) + ) + return SyncCollectionResult(changed=changed, deleted=deleted, sync_token=sync_token) + def _parse_response(self, response: _Element) -> tuple[str, list[_Element], Any | None]: """ One response should contain one or zero status children, one diff --git a/caldav/search.py b/caldav/search.py index d5216de5..7557df35 100644 --- a/caldav/search.py +++ b/caldav/search.py @@ -1,21 +1,20 @@ import inspect import logging +from copy import deepcopy from dataclasses import dataclass, field, replace from datetime import datetime, timezone from enum import Enum, auto from typing import TYPE_CHECKING, Any, Optional +from icalendar import Timezone from icalendar.prop import TypesFactory from icalendar_searcher import Searcher from icalendar_searcher.collation import Collation from .calendarobjectresource import CalendarObjectResource, Event, Journal, Todo from .collection import Calendar -from .elements import cdav +from .elements import cdav, dav from .lib import error -from .operations.search_ops import _build_search_xml_query -from .operations.search_ops import _collation_to_caldav as collation_to_caldav -from .operations.search_ops import _filter_search_results as filter_search_results if TYPE_CHECKING: from .calendarobjectresource import ( @@ -26,8 +25,211 @@ _icalendar_types = TypesFactory() -# Re-export for backward compatibility -_collation_to_caldav = collation_to_caldav +def _collation_to_caldav(collation: Collation, case_sensitive: bool = True) -> str: + """Map a Collation enum value to a CalDAV collation identifier string.""" + if collation == Collation.SIMPLE: + return "i;octet" if case_sensitive else "i;ascii-casemap" + elif collation == Collation.UNICODE: + return "i;octet" if case_sensitive else "i;unicode-casemap" + elif collation == Collation.LOCALE: + return "i;ascii-casemap" + return "i;octet" + + +# backward-compat alias used inside this module +collation_to_caldav = _collation_to_caldav + + +def _filter_search_results( + objects: list["CalendarObjectResource"], + searcher: "Searcher", + post_filter: bool | None = None, + split_expanded: bool = True, + server_expand: bool = False, +) -> list["CalendarObjectResource"]: + """Apply client-side filtering and handle recurrence expansion/splitting.""" + if not (post_filter or searcher.expand or (split_expanded and server_expand)): + return objects + + result = [] + for o in objects: + if searcher.expand or post_filter: + try: + filtered = searcher.check_component(o, expand_only=not post_filter) + except ValueError: + filtered = [ + x for x in o.icalendar_instance.subcomponents if not isinstance(x, Timezone) + ] + if not filtered: + continue + else: + filtered = [ + x for x in o.icalendar_instance.subcomponents if not isinstance(x, Timezone) + ] + + i = o.icalendar_instance + tz_ = [x for x in i.subcomponents if isinstance(x, Timezone)] + i.subcomponents = tz_ + + for comp in filtered: + if isinstance(comp, Timezone): + continue + if split_expanded: + new_obj = o.copy(keep_uid=True) + new_i = new_obj.icalendar_instance + new_i.subcomponents = [] + for tz in tz_: + new_i.add_component(tz) + result.append(new_obj) + else: + new_i = i + new_i.add_component(comp) + + if not split_expanded: + result.append(o) + + return result + + +# backward-compat alias +filter_search_results = _filter_search_results + + +def _build_search_xml_query( + searcher: "Searcher", + server_expand: bool = False, + props: list[Any] | None = None, + filters: Any = None, + _hacks: str | None = None, +) -> tuple[Any, type | None]: + """Build a CalDAV calendar-query XML request body.""" + data = cdav.CalendarData() + if server_expand: + if not searcher.start or not searcher.end: + raise error.ReportError("can't expand without a date range") + data += cdav.Expand(searcher.start, searcher.end) + + props_ = [data] if props is None else [data] + list(props) + prop = dav.Prop() + props_ + vcalendar = cdav.CompFilter("VCALENDAR") + + comp_filter = None + comp_class = searcher.comp_class + + if filters: + filters = deepcopy(filters) + if hasattr(filters, "tag") and filters.tag == cdav.CompFilter.tag: + comp_filter = filters + filters = [] + else: + filters = [] + + vNotCompleted = cdav.TextMatch("COMPLETED", negate=True) + vNotCancelled = cdav.TextMatch("CANCELLED", negate=True) + vNeedsAction = cdav.TextMatch("NEEDS-ACTION") + vStatusNotCompleted = cdav.PropFilter("STATUS") + vNotCompleted + vStatusNotCancelled = cdav.PropFilter("STATUS") + vNotCancelled + vStatusNeedsAction = cdav.PropFilter("STATUS") + vNeedsAction + vStatusNotDefined = cdav.PropFilter("STATUS") + cdav.NotDefined() + vNoCompleteDate = cdav.PropFilter("COMPLETED") + cdav.NotDefined() + + if _hacks == "ignore_completed1": + filters.extend([vNoCompleteDate, vStatusNotCompleted, vStatusNotCancelled]) + elif _hacks == "ignore_completed2": + filters.extend([vNoCompleteDate, vStatusNotDefined]) + elif _hacks == "ignore_completed3": + filters.extend([vStatusNeedsAction]) + + if searcher.start or searcher.end: + filters.append(cdav.TimeRange(searcher.start, searcher.end)) + + if searcher.alarm_start or searcher.alarm_end: + filters.append( + cdav.CompFilter("VALARM") + cdav.TimeRange(searcher.alarm_start, searcher.alarm_end) + ) + + comp_mappings = [ + ("event", "VEVENT", Event), + ("todo", "VTODO", Todo), + ("journal", "VJOURNAL", Journal), + ] + + for flag, comp_name, sync_class in comp_mappings: + flagged = getattr(searcher, flag, False) + + if flagged: + if comp_class is not None and comp_class is not sync_class: + raise error.ConsistencyError( + f"inconsistent search parameters - comp_class = {comp_class}, want {sync_class}" + ) + comp_class = sync_class + + if comp_filter and comp_filter.attributes.get("name") == comp_name: + comp_class = sync_class + if ( + flag == "todo" + and not getattr(searcher, "todo", False) + and searcher.include_completed is None + ): + searcher.include_completed = True + setattr(searcher, flag, True) + + if comp_class is sync_class: + if comp_filter: + assert comp_filter.attributes.get("name") == comp_name + else: + comp_filter = cdav.CompFilter(comp_name) + setattr(searcher, flag, True) + + if comp_class and not comp_filter: + raise error.ConsistencyError(f"unsupported comp class {comp_class} for search") + + if _hacks == "no_comp_filter": + comp_filter = None + + for property in searcher._property_operator: + if searcher._property_operator[property] == "undef": + match = cdav.NotDefined() + filters.append(cdav.PropFilter(property.upper()) + match) + else: + value = searcher._property_filters[property] + property_ = property.upper() + if property.lower() == "category": + property_ = "CATEGORIES" + if property.lower() == "categories": + values = value.cats + else: + values = [value] + + for value in values: + if hasattr(value, "to_ical"): + value = value.to_ical() + + collation_str = "i;octet" + if ( + hasattr(searcher, "_property_collation") + and property in searcher._property_collation + ): + case_sensitive = searcher._property_case_sensitive.get(property, True) + collation_str = _collation_to_caldav( + searcher._property_collation[property], case_sensitive + ) + + match = cdav.TextMatch(value, collation=collation_str) + filters.append(cdav.PropFilter(property_) + match) + + if comp_filter and filters: + comp_filter += filters + vcalendar += comp_filter + elif comp_filter: + vcalendar += comp_filter + elif filters: + vcalendar += filters + + filter_elem = cdav.Filter() + vcalendar + root = cdav.CalendarQuery() + [prop, filter_elem] + + return (root, comp_class) def _is_not_defined_supported(features: Any, prop: str) -> bool: diff --git a/docs/design/OPERATIONS_PROTOCOL_AUDIT.md b/docs/design/OPERATIONS_PROTOCOL_AUDIT.md new file mode 100644 index 00000000..7f842b81 --- /dev/null +++ b/docs/design/OPERATIONS_PROTOCOL_AUDIT.md @@ -0,0 +1,165 @@ +# Audit: `operations/` and `protocol/` directories + +**Date:** 2026-04-22 +**Context:** Both directories were created during an attempted sans-IO refactoring +that was never completed. This document assesses what is dead, what is duplicated, +and what should happen next. + +--- + +## `caldav/operations/` — ~85% dead code + +### What is there + +| File | Lines | Status | +|---|---|---| +| `base.py` | 189 | ~30 active (`_normalize_href`), rest test-only | +| `davobject_ops.py` | 293 | Entirely dead in production | +| `calendarobject_ops.py` | 540 | ~10 active (`_quote_uid`), rest dead/duplicate | +| `principal_ops.py` | 162 | ~60 active (`_extract_calendar_home_set_from_results`) | +| `calendarset_ops.py` | 245 | ~40 active (`_extract_calendars_from_propfind_results`) | +| `calendar_ops.py` | 261 | Entirely dead in production | +| `search_ops.py` | 453 | ~180 active, ~270 dead | +| `__init__.py` | 62 | Re-exports unused dataclasses | +| **Total** | **2,205** | **~340 active, ~1,860 dead** | + +### The handful of functions that are actually used + +| Function | Current home | Natural home | +|---|---|---| +| `_quote_uid` | `calendarobject_ops.py` | `calendarobjectresource.py` (already imported there) | +| `_extract_calendars_from_propfind_results` | `calendarset_ops.py` | `collection.py` (called from `_calendars_from_results`) | +| `_extract_calendar_home_set_from_results` | `principal_ops.py` | `davclient.py` / `async_davclient.py` | +| `_normalize_href` | `base.py` | `response.py` (already imported there; also duplicated in `protocol/xml_parsers.py`) | +| `_build_search_xml_query` | `search_ops.py` | `search.py` (reasonable as module-level helper) | +| `_filter_search_results` | `search_ops.py` | `search.py` | +| `_collation_to_caldav` | `search_ops.py` | `search.py` | + +### What is dead + +- `davobject_ops.py` (293 lines): no production callers; logic is reimplemented in the + classes. Tests exercise the functions in isolation, but the main codebase never + calls them. +- `calendar_ops.py` (261 lines): no production callers. `_generate_fake_sync_token` + is duplicated verbatim in `Collection._generate_fake_sync_token()` in `collection.py`, + which is what production code calls. +- ~530 lines of `calendarobject_ops.py`: duplicates class methods in + `calendarobjectresource.py`. `_get_duration`, `_set_duration`, `_find_id_and_path`, + `_calculate_next_recurrence`, `_mark_task_completed`, etc. all have counterparts + in the class that are the ones actually called. +- ~270 lines of `search_ops.py`: `_determine_post_filter_needed`, + `_should_remove_category_filter`, `_get_explicit_contains_properties`, + `_needs_pending_todo_multi_search`, `SearchStrategy` — never called from anywhere, + not even from tests. +- `QuerySpec`, `PropertyData`, `ChildrenQuery`, `ChildData`, `PropertiesResult`, + `CalendarObjectData`, `PrincipalData`, `CalendarObjectInfo` exported from + `__init__.py` — no production callers. +- 6 `test_operations_*.py` test files (2,176 lines total): they only exercise the dead + functions. Actual behavior is covered by integration tests against the classes. + +### What happened + +The intended pattern was: extract logic from classes into pure functions → test in +isolation → re-wire classes to call functions. The extraction happened; the re-wiring +did not. So we now have two implementations of the same logic: the original class +methods (which production uses) and the extracted functions (which only unit tests use). + +### Recommendation: delete the directory + +1. Move the 7 live functions into their natural homes (listed above). +2. Delete `caldav/operations/` entirely. +3. Delete all `tests/test_operations_*.py` files. + +The logic belongs in the classes, not as bare functions. Whether the future direction +is sans-IO or async-first-with-generated-sync, the `operations/` approach adds a +layer of indirection without being the authoritative implementation. + +--- + +## `caldav/protocol/` — foundation of async client, but with duplication + +### What is there + +| File | Lines | Notes | +|---|---|---| +| `types.py` | 221 | `PropfindResult`, `CalendarQueryResult`, etc. | +| `xml_builders.py` | 346 | XML request body builders | +| `xml_parsers.py` | 468 | XML response parsers + 3 utility functions | +| `__init__.py` | 44 | Re-exports `types.py` | +| **Total** | **1,079** | | + +### Who uses it + +`async_davclient.py` is the primary consumer — it uses the builders for all its HTTP +request bodies and the parsers to populate `response.results` with typed objects. + +`davclient.py` (sync) barely touches it: +- `_build_propfind_body` — used in one code path inside `propfind()` +- `_parse_propfind_response` — used in the same code path (late import, conditional) + +`response.py` imports three utility functions from `xml_parsers.py`: +- `_normalize_href` — used when parsing `` elements +- `_validate_status` — used when parsing status strings +- `_strip_to_multistatus` — delegated to from `BaseDAVResponse._strip_to_multistatus()` + +### The duplication problem + +`protocol/xml_builders.py` builds XML bodies from property name strings +(e.g. `"DAV:displayname"`). `davobject.py` builds them from element classes +(`dav.DisplayName()`). These are parallel implementations at different abstraction +levels. `davclient.py` uses the element-class approach; `async_davclient.py` uses the +string approach. They diverge and are not interchangeable. + +`_normalize_href` exists in both `protocol/xml_parsers.py` and +`operations/base.py` with near-identical logic. + +`response.py` is the stateful response object, but it delegates three internal +operations upward to `protocol/xml_parsers.py`, creating an inverted dependency: +the higher-level response object depends on the lower-level protocol module for +what are essentially private helpers. + +### What is dead in `xml_builders.py` + +- `_build_proppatch_body` — not called from `davclient.py` or `async_davclient.py` +- `_build_mkcalendar_body` — not called from anywhere +- `_prop_name_to_element` — not called from anywhere (only used internally by + `_build_proppatch_body`) + +### Recommendation + +The right move depends on the architectural direction: + +**Option A — Dissolve back into consumers (aligns with stated preference for +stateful response objects):** + +1. Move `_normalize_href`, `_validate_status`, `_strip_to_multistatus` into + `response.py` directly — that is where they are used. +2. Move `xml_builders.py` and `xml_parsers.py` into `async_davclient.py` as its + private implementation (or a sibling `_async_davclient_xml.py`). They are + already exclusively its implementation detail. +3. Inline the `types.py` dataclasses into `async_davclient.py`. +4. Remove `davclient.py`'s single use of `_build_propfind_body` / + `_parse_propfind_response` — it should use the same XML-building path as + everything else (via `davobject.py`'s element classes). +5. Delete `caldav/protocol/`. + +**Option B — Keep as async client internals, make the boundary honest:** + +Rename the module to `caldav/_async_xml.py` or similar, drop the "protocol" framing, +and make it explicit that this is the async client's XML implementation, not a +shared abstraction. Remove the dead builder functions. This is a smaller change that +preserves the separation without the false promise of a sans-IO layer. + +Either way: delete the three dead builder functions (`_build_proppatch_body`, +`_build_mkcalendar_body`, `_prop_name_to_element`) immediately. + +--- + +## Summary + +| Directory | Total lines | Live in production | Recommendation | +|---|---|---|---| +| `operations/` | 2,205 | ~340 (7 functions) | Delete; inline live functions into classes | +| `protocol/` | 1,079 | ~800 | Keep as async client internals, or dissolve into consumers | +| `tests/test_operations_*.py` | 2,176 | 0 (tests dead code only) | Delete with `operations/` | +| `tests/test_protocol.py` | 319 | active | Keep (tests `async_davclient.py` behavior) | diff --git a/tests/test_caldav_unit.py b/tests/test_caldav_unit.py index 1dc515fb..6acf7e61 100755 --- a/tests/test_caldav_unit.py +++ b/tests/test_caldav_unit.py @@ -2230,9 +2230,7 @@ def test_add_organizer_no_arg_returns_coroutine_for_async_client(self): async def async_principal(): p = mock.MagicMock() - p._async_get_vcal_address = mock.AsyncMock( - return_value=vCalAddress("mailto:me@example.com") - ) + p.get_vcal_address = mock.AsyncMock(return_value=vCalAddress("mailto:me@example.com")) return p client.principal = async_principal @@ -2254,9 +2252,7 @@ def test_add_organizer_no_arg_async_awaited_sets_organizer(self): async def async_principal(): p = mock.MagicMock() - p._async_get_vcal_address = mock.AsyncMock( - return_value=vCalAddress("mailto:me@example.com") - ) + p.get_vcal_address = mock.AsyncMock(return_value=vCalAddress("mailto:me@example.com")) return p client.principal = async_principal @@ -2275,9 +2271,7 @@ def test_save_with_invites_returns_coroutine_for_async_client(self): async def async_principal(): p = mock.MagicMock() - p._async_get_vcal_address = mock.AsyncMock( - return_value=vCalAddress("mailto:me@example.com") - ) + p.get_vcal_address = mock.AsyncMock(return_value=vCalAddress("mailto:me@example.com")) return p client.principal = async_principal @@ -2298,9 +2292,7 @@ def test_save_with_invites_async_awaited_sets_organizer_and_saves(self): async def async_principal(): p = mock.MagicMock() - p._async_get_vcal_address = mock.AsyncMock( - return_value=vCalAddress("mailto:me@example.com") - ) + p.get_vcal_address = mock.AsyncMock(return_value=vCalAddress("mailto:me@example.com")) return p client.principal = async_principal diff --git a/tests/test_operations_base.py b/tests/test_operations_base.py deleted file mode 100644 index 1c5c1bc5..00000000 --- a/tests/test_operations_base.py +++ /dev/null @@ -1,192 +0,0 @@ -""" -Tests for the operations layer base module. - -These tests verify the Sans-I/O utility functions work correctly -without any network I/O. -""" - -import pytest - -from caldav.operations.base import PropertyData, QuerySpec -from caldav.operations.base import _extract_resource_type as extract_resource_type -from caldav.operations.base import _get_property_value as get_property_value -from caldav.operations.base import _is_calendar_resource as is_calendar_resource -from caldav.operations.base import _is_collection_resource as is_collection_resource -from caldav.operations.base import _normalize_href as normalize_href - - -class TestQuerySpec: - """Tests for QuerySpec dataclass.""" - - def test_query_spec_defaults(self): - """QuerySpec has sensible defaults.""" - spec = QuerySpec(url="/calendars/") - assert spec.url == "/calendars/" - assert spec.method == "PROPFIND" - assert spec.depth == 0 - assert spec.props == () - assert spec.body is None - - def test_query_spec_immutable(self): - """QuerySpec is immutable (frozen).""" - spec = QuerySpec(url="/test") - with pytest.raises(AttributeError): - spec.url = "/other" - - def test_query_spec_with_url(self): - """with_url() returns a new QuerySpec with different URL.""" - spec = QuerySpec(url="/old", method="REPORT", depth=1, props=("displayname",)) - new_spec = spec.with_url("/new") - - assert new_spec.url == "/new" - assert new_spec.method == "REPORT" - assert new_spec.depth == 1 - assert new_spec.props == ("displayname",) - # Original unchanged - assert spec.url == "/old" - - -class TestPropertyData: - """Tests for PropertyData dataclass.""" - - def test_property_data_defaults(self): - """PropertyData has sensible defaults.""" - data = PropertyData(href="/item") - assert data.href == "/item" - assert data.properties == {} - assert data.status == 200 - - def test_property_data_with_properties(self): - """PropertyData can store arbitrary properties.""" - data = PropertyData( - href="/cal/", - properties={ - "{DAV:}displayname": "My Calendar", - "{DAV:}resourcetype": ["collection"], - }, - status=200, - ) - assert data.properties["{DAV:}displayname"] == "My Calendar" - - -class TestNormalizeHref: - """Tests for normalize_href function.""" - - def test_normalize_empty(self): - """Empty href returns empty.""" - assert normalize_href("") == "" - - def test_normalize_double_slashes(self): - """Double slashes are normalized.""" - assert normalize_href("/path//to//resource") == "/path/to/resource" - - def test_normalize_preserves_http(self): - """HTTP URLs preserve double slashes in protocol.""" - result = normalize_href("https://example.com/path") - assert result == "https://example.com/path" - - def test_normalize_with_base_url(self): - """Relative URLs resolved against base.""" - result = normalize_href("/calendars/test/", "https://example.com/dav/") - # Should resolve to full URL - assert "calendars/test" in result - - -class TestExtractResourceType: - """Tests for extract_resource_type function.""" - - def test_extract_list(self): - """Extract list of resource types.""" - props = { - "{DAV:}resourcetype": [ - "{DAV:}collection", - "{urn:ietf:params:xml:ns:caldav}calendar", - ] - } - result = extract_resource_type(props) - assert "{DAV:}collection" in result - assert "{urn:ietf:params:xml:ns:caldav}calendar" in result - - def test_extract_single_value(self): - """Extract single resource type.""" - props = {"{DAV:}resourcetype": "{DAV:}collection"} - result = extract_resource_type(props) - assert result == ["{DAV:}collection"] - - def test_extract_none(self): - """Missing resourcetype returns empty list.""" - props = {"{DAV:}displayname": "Test"} - result = extract_resource_type(props) - assert result == [] - - def test_extract_explicit_none(self): - """Explicit None resourcetype returns empty list.""" - props = {"{DAV:}resourcetype": None} - result = extract_resource_type(props) - assert result == [] - - -class TestIsCalendarResource: - """Tests for is_calendar_resource function.""" - - def test_is_calendar(self): - """Detect calendar resource.""" - props = { - "{DAV:}resourcetype": [ - "{DAV:}collection", - "{urn:ietf:params:xml:ns:caldav}calendar", - ] - } - assert is_calendar_resource(props) is True - - def test_is_not_calendar(self): - """Non-calendar collection.""" - props = {"{DAV:}resourcetype": ["{DAV:}collection"]} - assert is_calendar_resource(props) is False - - def test_empty_props(self): - """Empty properties.""" - assert is_calendar_resource({}) is False - - -class TestIsCollectionResource: - """Tests for is_collection_resource function.""" - - def test_is_collection(self): - """Detect collection resource.""" - props = {"{DAV:}resourcetype": ["{DAV:}collection"]} - assert is_collection_resource(props) is True - - def test_is_not_collection(self): - """Non-collection resource.""" - props = {"{DAV:}resourcetype": []} - assert is_collection_resource(props) is False - - -class TestGetPropertyValue: - """Tests for get_property_value function.""" - - def test_get_exact_key(self): - """Get property with exact key.""" - props = {"{DAV:}displayname": "Test Calendar"} - assert get_property_value(props, "{DAV:}displayname") == "Test Calendar" - - def test_get_simple_key_dav_namespace(self): - """Get property with simple key, DAV namespace.""" - props = {"{DAV:}displayname": "Test Calendar"} - assert get_property_value(props, "displayname") == "Test Calendar" - - def test_get_simple_key_caldav_namespace(self): - """Get property with simple key, CalDAV namespace.""" - props = {"{urn:ietf:params:xml:ns:caldav}calendar-data": "BEGIN:VCALENDAR..."} - assert get_property_value(props, "calendar-data") == "BEGIN:VCALENDAR..." - - def test_get_missing_with_default(self): - """Missing property returns default.""" - props = {"{DAV:}displayname": "Test"} - assert get_property_value(props, "nonexistent", "default") == "default" - - def test_get_missing_no_default(self): - """Missing property returns None by default.""" - props = {} - assert get_property_value(props, "nonexistent") is None diff --git a/tests/test_operations_calendar.py b/tests/test_operations_calendar.py deleted file mode 100644 index 4de71029..00000000 --- a/tests/test_operations_calendar.py +++ /dev/null @@ -1,329 +0,0 @@ -""" -Tests for the Calendar operations module. - -These tests verify the Sans-I/O business logic for Calendar operations -like component detection, sync tokens, and result processing. -""" - -from caldav.operations.calendar_ops import CalendarObjectInfo -from caldav.operations.calendar_ops import ( - _build_calendar_object_url as build_calendar_object_url, -) -from caldav.operations.calendar_ops import ( - _detect_component_type as detect_component_type, -) -from caldav.operations.calendar_ops import ( - _detect_component_type_from_icalendar as detect_component_type_from_icalendar, -) -from caldav.operations.calendar_ops import ( - _detect_component_type_from_string as detect_component_type_from_string, -) -from caldav.operations.calendar_ops import ( - _generate_fake_sync_token as generate_fake_sync_token, -) -from caldav.operations.calendar_ops import _is_fake_sync_token as is_fake_sync_token -from caldav.operations.calendar_ops import _normalize_result_url as normalize_result_url -from caldav.operations.calendar_ops import ( - _process_report_results as process_report_results, -) -from caldav.operations.calendar_ops import ( - _should_skip_calendar_self_reference as should_skip_calendar_self_reference, -) - - -class TestDetectComponentTypeFromString: - """Tests for detect_component_type_from_string function.""" - - def test_detects_vevent(self): - """Detects VEVENT component.""" - data = "BEGIN:VCALENDAR\nBEGIN:VEVENT\nSUMMARY:Test\nEND:VEVENT\nEND:VCALENDAR" - assert detect_component_type_from_string(data) == "Event" - - def test_detects_vtodo(self): - """Detects VTODO component.""" - data = "BEGIN:VCALENDAR\nBEGIN:VTODO\nSUMMARY:Task\nEND:VTODO\nEND:VCALENDAR" - assert detect_component_type_from_string(data) == "Todo" - - def test_detects_vjournal(self): - """Detects VJOURNAL component.""" - data = "BEGIN:VCALENDAR\nBEGIN:VJOURNAL\nSUMMARY:Note\nEND:VJOURNAL\nEND:VCALENDAR" - assert detect_component_type_from_string(data) == "Journal" - - def test_detects_vfreebusy(self): - """Detects VFREEBUSY component.""" - data = "BEGIN:VCALENDAR\nBEGIN:VFREEBUSY\nEND:VFREEBUSY\nEND:VCALENDAR" - assert detect_component_type_from_string(data) == "FreeBusy" - - def test_returns_none_for_unknown(self): - """Returns None for unknown component types.""" - data = "BEGIN:VCALENDAR\nBEGIN:VTIMEZONE\nEND:VTIMEZONE\nEND:VCALENDAR" - assert detect_component_type_from_string(data) is None - - def test_handles_whitespace(self): - """Handles lines with extra whitespace.""" - data = "BEGIN:VCALENDAR\n BEGIN:VEVENT \nSUMMARY:Test\nEND:VEVENT\nEND:VCALENDAR" - assert detect_component_type_from_string(data) == "Event" - - -class TestDetectComponentTypeFromIcalendar: - """Tests for detect_component_type_from_icalendar function.""" - - def test_detects_event(self): - """Detects Event from icalendar object.""" - import icalendar - - cal = icalendar.Calendar() - event = icalendar.Event() - event.add("summary", "Test") - cal.add_component(event) - - assert detect_component_type_from_icalendar(cal) == "Event" - - def test_detects_todo(self): - """Detects Todo from icalendar object.""" - import icalendar - - cal = icalendar.Calendar() - todo = icalendar.Todo() - todo.add("summary", "Task") - cal.add_component(todo) - - assert detect_component_type_from_icalendar(cal) == "Todo" - - def test_returns_none_for_empty(self): - """Returns None for empty calendar.""" - import icalendar - - cal = icalendar.Calendar() - assert detect_component_type_from_icalendar(cal) is None - - def test_returns_none_for_no_subcomponents(self): - """Returns None when no subcomponents attribute.""" - obj = {"test": "value"} - assert detect_component_type_from_icalendar(obj) is None - - -class TestDetectComponentType: - """Tests for detect_component_type function.""" - - def test_detects_from_string(self): - """Detects from string data.""" - data = "BEGIN:VCALENDAR\nBEGIN:VTODO\nSUMMARY:Task\nEND:VTODO\nEND:VCALENDAR" - assert detect_component_type(data) == "Todo" - - def test_detects_from_icalendar(self): - """Detects from icalendar object.""" - import icalendar - - cal = icalendar.Calendar() - cal.add_component(icalendar.Journal()) - - assert detect_component_type(cal) == "Journal" - - def test_returns_none_for_none(self): - """Returns None for None input.""" - assert detect_component_type(None) is None - - -class TestGenerateFakeSyncToken: - """Tests for generate_fake_sync_token function.""" - - def test_generates_deterministic_token(self): - """Same input produces same token.""" - etags_urls = [("etag1", "/url1"), ("etag2", "/url2")] - - token1 = generate_fake_sync_token(etags_urls) - token2 = generate_fake_sync_token(etags_urls) - - assert token1 == token2 - - def test_prefix(self): - """Token starts with 'fake-' prefix.""" - token = generate_fake_sync_token([("etag", "/url")]) - assert token.startswith("fake-") - - def test_different_input_different_token(self): - """Different input produces different token.""" - token1 = generate_fake_sync_token([("etag1", "/url1")]) - token2 = generate_fake_sync_token([("etag2", "/url2")]) - - assert token1 != token2 - - def test_order_independent(self): - """Order of inputs doesn't affect token.""" - etags1 = [("a", "/a"), ("b", "/b")] - etags2 = [("b", "/b"), ("a", "/a")] - - assert generate_fake_sync_token(etags1) == generate_fake_sync_token(etags2) - - def test_uses_url_when_no_etag(self): - """Uses URL as fallback when etag is None.""" - token = generate_fake_sync_token([(None, "/url1"), (None, "/url2")]) - assert token.startswith("fake-") - - def test_empty_list(self): - """Handles empty list.""" - token = generate_fake_sync_token([]) - assert token.startswith("fake-") - - -class TestIsFakeSyncToken: - """Tests for is_fake_sync_token function.""" - - def test_detects_fake_token(self): - """Detects fake sync tokens.""" - assert is_fake_sync_token("fake-abc123") is True - - def test_rejects_real_token(self): - """Rejects tokens without fake- prefix.""" - assert is_fake_sync_token("http://example.com/sync/token123") is False - - def test_handles_none(self): - """Handles None input.""" - assert is_fake_sync_token(None) is False - - def test_handles_non_string(self): - """Handles non-string input.""" - assert is_fake_sync_token(12345) is False - - -class TestNormalizeResultUrl: - """Tests for normalize_result_url function.""" - - def test_quotes_relative_path(self): - """Quotes special characters in relative paths.""" - result = normalize_result_url("/calendars/event with spaces.ics", "/calendars/") - assert "%20" in result - - def test_preserves_full_url(self): - """Preserves full URLs as-is.""" - url = "https://example.com/calendars/event.ics" - result = normalize_result_url(url, "/calendars/") - assert result == url - - -class TestShouldSkipCalendarSelfReference: - """Tests for should_skip_calendar_self_reference function.""" - - def test_skips_exact_match(self): - """Skips when URLs match exactly.""" - assert should_skip_calendar_self_reference("/calendars/work/", "/calendars/work/") is True - - def test_skips_trailing_slash_difference(self): - """Skips when URLs differ only by trailing slash.""" - assert should_skip_calendar_self_reference("/calendars/work", "/calendars/work/") is True - assert should_skip_calendar_self_reference("/calendars/work/", "/calendars/work") is True - - def test_does_not_skip_different_urls(self): - """Does not skip different URLs.""" - assert ( - should_skip_calendar_self_reference("/calendars/work/event.ics", "/calendars/work/") - is False - ) - - -class TestProcessReportResults: - """Tests for process_report_results function.""" - - def test_processes_results(self): - """Processes results into CalendarObjectInfo objects.""" - results = { - "/cal/event1.ics": { - "{urn:ietf:params:xml:ns:caldav}calendar-data": "BEGIN:VCALENDAR\nBEGIN:VEVENT\nEND:VEVENT\nEND:VCALENDAR", - "{DAV:}getetag": '"etag1"', - }, - "/cal/todo1.ics": { - "{urn:ietf:params:xml:ns:caldav}calendar-data": "BEGIN:VCALENDAR\nBEGIN:VTODO\nEND:VTODO\nEND:VCALENDAR", - }, - } - - objects = process_report_results(results, "/cal/") - - assert len(objects) == 2 - - # Find event and todo - event = next(o for o in objects if o.component_type == "Event") - todo = next(o for o in objects if o.component_type == "Todo") - - assert event.etag == '"etag1"' - assert todo.etag is None - - def test_skips_calendar_self_reference(self): - """Filters out calendar self-reference.""" - results = { - "/cal/": { # Calendar itself - should be skipped - "{DAV:}resourcetype": "{DAV:}collection", - }, - "/cal/event.ics": { - "{urn:ietf:params:xml:ns:caldav}calendar-data": "BEGIN:VCALENDAR\nBEGIN:VEVENT\nEND:VEVENT\nEND:VCALENDAR", - }, - } - - objects = process_report_results(results, "/cal/") - - # Only the event should be returned - assert len(objects) == 1 - assert "event" in objects[0].url - - def test_handles_empty_results(self): - """Returns empty list for empty results.""" - assert process_report_results({}, "/cal/") == [] - - -class TestBuildCalendarObjectUrl: - """Tests for build_calendar_object_url function.""" - - def test_builds_url(self): - """Builds calendar object URL from calendar URL and ID.""" - result = build_calendar_object_url("https://example.com/calendars/work/", "event123") - assert result == "https://example.com/calendars/work/event123.ics" - - def test_handles_trailing_slash(self): - """Handles calendar URL with or without trailing slash.""" - result = build_calendar_object_url("https://example.com/calendars/work", "event123") - assert result == "https://example.com/calendars/work/event123.ics" - - def test_doesnt_double_ics(self): - """Doesn't add .ics if already present.""" - result = build_calendar_object_url("https://example.com/calendars/work/", "event123.ics") - assert result == "https://example.com/calendars/work/event123.ics" - assert ".ics.ics" not in result - - def test_quotes_special_chars(self): - """Quotes special characters in object ID.""" - result = build_calendar_object_url("https://example.com/calendars/", "event with spaces") - assert "%20" in result - - -class TestCalendarObjectInfo: - """Tests for CalendarObjectInfo dataclass.""" - - def test_creates_info(self): - """Creates CalendarObjectInfo with all fields.""" - info = CalendarObjectInfo( - url="/calendars/work/event.ics", - data="BEGIN:VCALENDAR...", - etag='"abc123"', - component_type="Event", - extra_props={"custom": "value"}, - ) - - assert info.url == "/calendars/work/event.ics" - assert info.data == "BEGIN:VCALENDAR..." - assert info.etag == '"abc123"' - assert info.component_type == "Event" - assert info.extra_props == {"custom": "value"} - - def test_allows_none_values(self): - """Allows None values for optional fields.""" - info = CalendarObjectInfo( - url="/calendars/work/event.ics", - data=None, - etag=None, - component_type=None, - extra_props={}, - ) - - assert info.data is None - assert info.etag is None - assert info.component_type is None diff --git a/tests/test_operations_calendarobject.py b/tests/test_operations_calendarobject.py deleted file mode 100644 index 34a6efe2..00000000 --- a/tests/test_operations_calendarobject.py +++ /dev/null @@ -1,529 +0,0 @@ -""" -Tests for CalendarObjectResource operations module. - -These tests verify the Sans-I/O business logic for calendar objects -without any network I/O. -""" - -from datetime import datetime, timedelta, timezone - -import icalendar - -from caldav.operations.calendarobject_ops import ( - _copy_component_with_new_uid as copy_component_with_new_uid, -) -from caldav.operations.calendarobject_ops import _extract_relations as extract_relations -from caldav.operations.calendarobject_ops import ( - _extract_uid_from_path as extract_uid_from_path, -) -from caldav.operations.calendarobject_ops import _find_id_and_path as find_id_and_path -from caldav.operations.calendarobject_ops import _generate_uid as generate_uid -from caldav.operations.calendarobject_ops import _generate_url as generate_url -from caldav.operations.calendarobject_ops import _get_due as get_due -from caldav.operations.calendarobject_ops import _get_duration as get_duration -from caldav.operations.calendarobject_ops import ( - _get_non_timezone_subcomponents as get_non_timezone_subcomponents, -) -from caldav.operations.calendarobject_ops import ( - _get_primary_component as get_primary_component, -) -from caldav.operations.calendarobject_ops import ( - _get_reverse_reltype as get_reverse_reltype, -) -from caldav.operations.calendarobject_ops import ( - _has_calendar_component as has_calendar_component, -) -from caldav.operations.calendarobject_ops import ( - _is_calendar_data_loaded as is_calendar_data_loaded, -) -from caldav.operations.calendarobject_ops import _is_task_pending as is_task_pending -from caldav.operations.calendarobject_ops import ( - _mark_task_completed as mark_task_completed, -) -from caldav.operations.calendarobject_ops import ( - _mark_task_uncompleted as mark_task_uncompleted, -) -from caldav.operations.calendarobject_ops import ( - _reduce_rrule_count as reduce_rrule_count, -) -from caldav.operations.calendarobject_ops import _set_duration as set_duration - - -class TestGenerateUid: - """Tests for generate_uid function.""" - - def test_generates_unique_uids(self): - """Each call generates a unique UID.""" - uids = {generate_uid() for _ in range(100)} - assert len(uids) == 100 - - def test_uid_is_string(self): - """UID is a string.""" - assert isinstance(generate_uid(), str) - - -class TestGenerateUrl: - """Tests for generate_url function.""" - - def test_basic_url(self): - """Generates correct URL from parent and UID.""" - url = generate_url("/calendars/user/cal/", "event-123") - assert url == "/calendars/user/cal/event-123.ics" - - def test_adds_trailing_slash(self): - """Adds trailing slash to parent if missing.""" - url = generate_url("/calendars/user/cal", "event-123") - assert url == "/calendars/user/cal/event-123.ics" - - def test_quotes_special_chars(self): - """Special characters in UID are quoted.""" - url = generate_url("/cal/", "event with spaces") - assert "event%20with%20spaces.ics" in url - - def test_double_quotes_slashes(self): - """Slashes in UID are double-quoted.""" - url = generate_url("/cal/", "event/with/slashes") - assert "%252F" in url # %2F is quoted again - - -class TestExtractUidFromPath: - """Tests for extract_uid_from_path function.""" - - def test_extracts_uid(self): - """Extracts UID from .ics path.""" - uid = extract_uid_from_path("/calendars/user/cal/event-123.ics") - assert uid == "event-123" - - def test_returns_none_for_non_ics(self): - """Returns None for non-.ics paths.""" - assert extract_uid_from_path("/calendars/user/cal/") is None - - def test_handles_simple_path(self): - """Handles simple filename.""" - uid = extract_uid_from_path("event.ics") - assert uid == "event" - - -class TestFindIdAndPath: - """Tests for find_id_and_path function.""" - - def test_uses_given_id(self): - """Given ID takes precedence.""" - comp = icalendar.Event() - comp.add("UID", "old-uid") - uid, path = find_id_and_path(comp, given_id="new-uid") - assert uid == "new-uid" - assert comp["UID"] == "new-uid" - - def test_uses_existing_id(self): - """Uses existing_id if no given_id.""" - comp = icalendar.Event() - uid, path = find_id_and_path(comp, existing_id="existing") - assert uid == "existing" - - def test_extracts_from_component(self): - """Extracts UID from component.""" - comp = icalendar.Event() - comp.add("UID", "comp-uid") - uid, path = find_id_and_path(comp) - assert uid == "comp-uid" - - def test_extracts_from_path(self): - """Extracts UID from path.""" - comp = icalendar.Event() - uid, path = find_id_and_path(comp, given_path="event-from-path.ics") - assert uid == "event-from-path" - - def test_generates_new_uid(self): - """Generates new UID if none available.""" - comp = icalendar.Event() - uid, path = find_id_and_path(comp) - assert uid is not None - assert len(uid) > 0 - - def test_generates_path(self): - """Generates path from UID.""" - comp = icalendar.Event() - uid, path = find_id_and_path(comp, given_id="test-uid") - assert path == "test-uid.ics" - - -class TestGetDuration: - """Tests for get_duration function.""" - - def test_from_duration_property(self): - """Gets duration from DURATION property.""" - comp = icalendar.Event() - comp.add("DURATION", timedelta(hours=2)) - assert get_duration(comp) == timedelta(hours=2) - - def test_from_dtstart_dtend(self): - """Calculates duration from DTSTART and DTEND.""" - comp = icalendar.Event() - comp.add("DTSTART", datetime(2024, 1, 1, 10, 0)) - comp.add("DTEND", datetime(2024, 1, 1, 12, 0)) - assert get_duration(comp, "DTEND") == timedelta(hours=2) - - def test_from_dtstart_due(self): - """Calculates duration from DTSTART and DUE (for todos).""" - comp = icalendar.Todo() - comp.add("DTSTART", datetime(2024, 1, 1, 10, 0)) - comp.add("DUE", datetime(2024, 1, 1, 11, 0)) - assert get_duration(comp, "DUE") == timedelta(hours=1) - - def test_date_only_default_one_day(self): - """Date-only DTSTART defaults to 1 day duration.""" - from datetime import date - - comp = icalendar.Event() - comp.add("DTSTART", date(2024, 1, 1)) - assert get_duration(comp) == timedelta(days=1) - - def test_no_duration_returns_zero(self): - """Returns zero if no duration info available.""" - comp = icalendar.Event() - assert get_duration(comp) == timedelta(0) - - -class TestGetDue: - """Tests for get_due function.""" - - def test_from_due_property(self): - """Gets due from DUE property.""" - comp = icalendar.Todo() - due = datetime(2024, 1, 15, 17, 0) - comp.add("DUE", due) - assert get_due(comp) == due - - def test_from_dtend(self): - """Falls back to DTEND.""" - comp = icalendar.Todo() - dtend = datetime(2024, 1, 15, 17, 0) - comp.add("DTEND", dtend) - assert get_due(comp) == dtend - - def test_calculated_from_duration(self): - """Calculates from DTSTART + DURATION.""" - comp = icalendar.Todo() - comp.add("DTSTART", datetime(2024, 1, 15, 10, 0)) - comp.add("DURATION", timedelta(hours=7)) - assert get_due(comp) == datetime(2024, 1, 15, 17, 0) - - def test_returns_none(self): - """Returns None if no due info.""" - comp = icalendar.Todo() - assert get_due(comp) is None - - -class TestSetDuration: - """Tests for set_duration function.""" - - def test_with_dtstart_and_due(self): - """Moves DUE when both set.""" - comp = icalendar.Todo() - comp.add("DTSTART", datetime(2024, 1, 1, 10, 0)) - comp.add("DUE", datetime(2024, 1, 1, 11, 0)) - - set_duration(comp, timedelta(hours=3), movable_attr="DUE") - - assert comp["DUE"].dt == datetime(2024, 1, 1, 13, 0) - - def test_move_dtstart(self): - """Moves DTSTART when specified.""" - comp = icalendar.Todo() - comp.add("DTSTART", datetime(2024, 1, 1, 10, 0)) - comp.add("DUE", datetime(2024, 1, 1, 12, 0)) - - set_duration(comp, timedelta(hours=1), movable_attr="DTSTART") - - assert comp["DTSTART"].dt == datetime(2024, 1, 1, 11, 0) - - def test_adds_duration_if_no_dates(self): - """Adds DURATION property if no dates set.""" - comp = icalendar.Todo() - set_duration(comp, timedelta(hours=2)) - assert comp["DURATION"].dt == timedelta(hours=2) - - -class TestIsTaskPending: - """Tests for is_task_pending function.""" - - def test_needs_action_is_pending(self): - """NEEDS-ACTION status is pending.""" - comp = icalendar.Todo() - comp.add("STATUS", "NEEDS-ACTION") - assert is_task_pending(comp) is True - - def test_in_process_is_pending(self): - """IN-PROCESS status is pending.""" - comp = icalendar.Todo() - comp.add("STATUS", "IN-PROCESS") - assert is_task_pending(comp) is True - - def test_completed_is_not_pending(self): - """COMPLETED status is not pending.""" - comp = icalendar.Todo() - comp.add("STATUS", "COMPLETED") - assert is_task_pending(comp) is False - - def test_cancelled_is_not_pending(self): - """CANCELLED status is not pending.""" - comp = icalendar.Todo() - comp.add("STATUS", "CANCELLED") - assert is_task_pending(comp) is False - - def test_completed_property_is_not_pending(self): - """COMPLETED property means not pending.""" - comp = icalendar.Todo() - comp.add("COMPLETED", datetime.now(timezone.utc)) - assert is_task_pending(comp) is False - - def test_no_status_is_pending(self): - """No status defaults to pending.""" - comp = icalendar.Todo() - assert is_task_pending(comp) is True - - -class TestMarkTaskCompleted: - """Tests for mark_task_completed function.""" - - def test_marks_completed(self): - """Sets STATUS to COMPLETED.""" - comp = icalendar.Todo() - comp.add("STATUS", "NEEDS-ACTION") - ts = datetime(2024, 1, 15, 12, 0, tzinfo=timezone.utc) - - mark_task_completed(comp, ts) - - assert comp["STATUS"] == "COMPLETED" - assert comp["COMPLETED"].dt == ts - - def test_uses_current_time(self): - """Uses current time if not specified.""" - comp = icalendar.Todo() - mark_task_completed(comp) - assert "COMPLETED" in comp - - -class TestMarkTaskUncompleted: - """Tests for mark_task_uncompleted function.""" - - def test_marks_uncompleted(self): - """Removes completion and sets NEEDS-ACTION.""" - comp = icalendar.Todo() - comp.add("STATUS", "COMPLETED") - comp.add("COMPLETED", datetime.now(timezone.utc)) - - mark_task_uncompleted(comp) - - assert comp["STATUS"] == "NEEDS-ACTION" - assert "COMPLETED" not in comp - - -class TestReduceRruleCount: - """Tests for reduce_rrule_count function.""" - - def test_reduces_count(self): - """Reduces COUNT by 1.""" - comp = icalendar.Todo() - comp.add("RRULE", {"FREQ": "WEEKLY", "COUNT": 5}) - - result = reduce_rrule_count(comp) - - assert result is True - # icalendar stores COUNT as list via .get() or int via [] - count = comp["RRULE"].get("COUNT") - count_val = count[0] if isinstance(count, list) else count - assert count_val == 4 - - def test_returns_false_at_one(self): - """Returns False when COUNT reaches 1.""" - comp = icalendar.Todo() - comp.add("RRULE", {"FREQ": "WEEKLY", "COUNT": 1}) - - result = reduce_rrule_count(comp) - - assert result is False - - def test_no_count_returns_true(self): - """Returns True if no COUNT in RRULE.""" - comp = icalendar.Todo() - comp.add("RRULE", {"FREQ": "WEEKLY"}) - - result = reduce_rrule_count(comp) - - assert result is True - - -class TestIsCalendarDataLoaded: - """Tests for is_calendar_data_loaded function.""" - - def test_loaded_with_data(self): - """Returns True with valid data.""" - data = "BEGIN:VCALENDAR\nBEGIN:VEVENT\nEND:VEVENT\nEND:VCALENDAR" - assert is_calendar_data_loaded(data, None, None) is True - - def test_loaded_with_icalendar(self): - """Returns True with icalendar instance.""" - assert is_calendar_data_loaded(None, None, icalendar.Calendar()) is True - - def test_not_loaded_empty(self): - """Returns False with no data.""" - assert is_calendar_data_loaded(None, None, None) is False - - -class TestHasCalendarComponent: - """Tests for has_calendar_component function.""" - - def test_has_vevent(self): - """Returns True for VEVENT.""" - data = "BEGIN:VCALENDAR\nBEGIN:VEVENT\nEND:VEVENT\nEND:VCALENDAR" - assert has_calendar_component(data) is True - - def test_has_vtodo(self): - """Returns True for VTODO.""" - data = "BEGIN:VCALENDAR\nBEGIN:VTODO\nEND:VTODO\nEND:VCALENDAR" - assert has_calendar_component(data) is True - - def test_has_vjournal(self): - """Returns True for VJOURNAL.""" - data = "BEGIN:VCALENDAR\nBEGIN:VJOURNAL\nEND:VJOURNAL\nEND:VCALENDAR" - assert has_calendar_component(data) is True - - def test_no_component(self): - """Returns False for no component.""" - data = "BEGIN:VCALENDAR\nEND:VCALENDAR" - assert has_calendar_component(data) is False - - def test_empty_data(self): - """Returns False for empty data.""" - assert has_calendar_component(None) is False - - -class TestGetNonTimezoneSubcomponents: - """Tests for get_non_timezone_subcomponents function.""" - - def test_filters_timezone(self): - """Filters out VTIMEZONE components.""" - cal = icalendar.Calendar() - cal.add_component(icalendar.Event()) - cal.add_component(icalendar.Timezone()) - cal.add_component(icalendar.Todo()) - - comps = get_non_timezone_subcomponents(cal) - - assert len(comps) == 2 - assert all(not isinstance(c, icalendar.Timezone) for c in comps) - - -class TestGetPrimaryComponent: - """Tests for get_primary_component function.""" - - def test_gets_event(self): - """Gets VEVENT component.""" - cal = icalendar.Calendar() - event = icalendar.Event() - cal.add_component(event) - - assert get_primary_component(cal) is event - - def test_gets_todo(self): - """Gets VTODO component.""" - cal = icalendar.Calendar() - todo = icalendar.Todo() - cal.add_component(todo) - - assert get_primary_component(cal) is todo - - def test_skips_timezone(self): - """Skips VTIMEZONE.""" - cal = icalendar.Calendar() - cal.add_component(icalendar.Timezone()) - event = icalendar.Event() - cal.add_component(event) - - assert get_primary_component(cal) is event - - -class TestCopyComponentWithNewUid: - """Tests for copy_component_with_new_uid function.""" - - def test_copies_with_new_uid(self): - """Creates copy with new UID.""" - comp = icalendar.Event() - comp.add("UID", "old-uid") - comp.add("SUMMARY", "Test Event") - - new_comp = copy_component_with_new_uid(comp, "new-uid") - - assert new_comp["UID"] == "new-uid" - assert new_comp["SUMMARY"] == "Test Event" - assert comp["UID"] == "old-uid" # Original unchanged - - def test_generates_uid(self): - """Generates UID if not provided.""" - comp = icalendar.Event() - comp.add("UID", "old-uid") - - new_comp = copy_component_with_new_uid(comp) - - assert new_comp["UID"] != "old-uid" - assert new_comp["UID"] is not None - - -class TestGetReverseReltype: - """Tests for get_reverse_reltype function.""" - - def test_parent_child(self): - """PARENT reverses to CHILD.""" - assert get_reverse_reltype("PARENT") == "CHILD" - - def test_child_parent(self): - """CHILD reverses to PARENT.""" - assert get_reverse_reltype("CHILD") == "PARENT" - - def test_sibling(self): - """SIBLING reverses to SIBLING.""" - assert get_reverse_reltype("SIBLING") == "SIBLING" - - def test_unknown(self): - """Unknown type returns None.""" - assert get_reverse_reltype("UNKNOWN") is None - - def test_case_insensitive(self): - """Case insensitive matching.""" - assert get_reverse_reltype("parent") == "CHILD" - - -class TestExtractRelations: - """Tests for extract_relations function.""" - - def test_extracts_relations(self): - """Extracts RELATED-TO properties.""" - comp = icalendar.Todo() - comp.add("RELATED-TO", "parent-uid", parameters={"RELTYPE": "PARENT"}) - - relations = extract_relations(comp) - - assert "PARENT" in relations - assert "parent-uid" in relations["PARENT"] - - def test_filters_by_reltype(self): - """Filters by relation type.""" - comp = icalendar.Todo() - comp.add("RELATED-TO", "parent-uid", parameters={"RELTYPE": "PARENT"}) - comp.add("RELATED-TO", "child-uid", parameters={"RELTYPE": "CHILD"}) - - relations = extract_relations(comp, reltypes={"PARENT"}) - - assert "PARENT" in relations - assert "CHILD" not in relations - - def test_default_parent(self): - """Defaults to PARENT if no RELTYPE.""" - comp = icalendar.Todo() - comp.add("RELATED-TO", "some-uid") - - relations = extract_relations(comp) - - assert "PARENT" in relations diff --git a/tests/test_operations_calendarset.py b/tests/test_operations_calendarset.py deleted file mode 100644 index b567123f..00000000 --- a/tests/test_operations_calendarset.py +++ /dev/null @@ -1,277 +0,0 @@ -""" -Tests for the CalendarSet operations module. - -These tests verify the Sans-I/O business logic for CalendarSet operations -like extracting calendar IDs and resolving calendar URLs. -""" - -from caldav.operations.calendarset_ops import CalendarInfo -from caldav.operations.calendarset_ops import ( - _extract_calendar_id_from_url as extract_calendar_id_from_url, -) -from caldav.operations.calendarset_ops import ( - _find_calendar_by_id as find_calendar_by_id, -) -from caldav.operations.calendarset_ops import ( - _find_calendar_by_name as find_calendar_by_name, -) -from caldav.operations.calendarset_ops import ( - _process_calendar_list as process_calendar_list, -) -from caldav.operations.calendarset_ops import ( - _resolve_calendar_url as resolve_calendar_url, -) - - -class TestExtractCalendarIdFromUrl: - """Tests for extract_calendar_id_from_url function.""" - - def test_extracts_id_from_path(self): - """Extracts calendar ID from standard path.""" - url = "/calendars/user/my-calendar/" - assert extract_calendar_id_from_url(url) == "my-calendar" - - def test_extracts_id_without_trailing_slash(self): - """Extracts calendar ID from path without trailing slash.""" - url = "/calendars/user/my-calendar" - assert extract_calendar_id_from_url(url) == "my-calendar" - - def test_extracts_id_from_full_url(self): - """Extracts calendar ID from full URL.""" - url = "https://example.com/calendars/user/work/" - assert extract_calendar_id_from_url(url) == "work" - - def test_returns_none_for_empty_id(self): - """Returns None when ID would be empty.""" - url = "/calendars/user//" - # After stripping trailing slashes and splitting, last part is empty - result = extract_calendar_id_from_url(url) - # Implementation should handle this gracefully - assert result is not None # Actually gets "user" - - def test_handles_root_url(self): - """Handles URLs with minimal path.""" - url = "/calendar/" - assert extract_calendar_id_from_url(url) == "calendar" - - -class TestProcessCalendarList: - """Tests for process_calendar_list function.""" - - def test_processes_children_data(self): - """Processes children data into CalendarInfo objects.""" - children_data = [ - ( - "/calendars/user/work/", - ["{DAV:}collection", "{urn:ietf:params:xml:ns:caldav}calendar"], - "Work", - ), - ( - "/calendars/user/personal/", - ["{DAV:}collection", "{urn:ietf:params:xml:ns:caldav}calendar"], - "Personal", - ), - ] - - result = process_calendar_list(children_data) - - assert len(result) == 2 - assert result[0].url == "/calendars/user/work/" - assert result[0].cal_id == "work" - assert result[0].name == "Work" - assert result[1].cal_id == "personal" - assert result[1].name == "Personal" - - def test_skips_entries_with_no_id(self): - """Skips entries where calendar ID cannot be extracted.""" - children_data = [ - ("/", ["{DAV:}collection"], None), # Root has no meaningful ID - ("/calendars/user/work/", ["{DAV:}collection"], "Work"), - ] - - result = process_calendar_list(children_data) - - # Only the work calendar should be included - assert len(result) == 1 - assert result[0].cal_id == "work" - - def test_handles_empty_list(self): - """Returns empty list for empty input.""" - assert process_calendar_list([]) == [] - - -class TestResolveCalendarUrl: - """Tests for resolve_calendar_url function.""" - - def test_resolves_relative_id(self): - """Resolves a simple calendar ID to full URL.""" - result = resolve_calendar_url( - cal_id="my-calendar", - parent_url="https://example.com/calendars/user/", - client_base_url="https://example.com", - ) - - assert result == "https://example.com/calendars/user/my-calendar/" - - def test_resolves_full_url_under_client(self): - """Handles full URLs that are under client base.""" - result = resolve_calendar_url( - cal_id="https://example.com/calendars/user/work/", - parent_url="https://example.com/calendars/user/", - client_base_url="https://example.com", - ) - - # Should join with client URL - assert "work" in result - - def test_resolves_full_url_different_host(self): - """Handles full URLs with different host.""" - result = resolve_calendar_url( - cal_id="https://other.example.com/calendars/work/", - parent_url="https://example.com/calendars/user/", - client_base_url="https://example.com", - ) - - # Should join with parent URL - assert "work" in result - - def test_quotes_special_characters(self): - """Quotes special characters in calendar ID.""" - result = resolve_calendar_url( - cal_id="calendar with spaces", - parent_url="https://example.com/calendars/", - client_base_url="https://example.com", - ) - - assert "calendar%20with%20spaces" in result - - def test_adds_trailing_slash(self): - """Adds trailing slash to calendar URL.""" - result = resolve_calendar_url( - cal_id="work", - parent_url="https://example.com/calendars/", - client_base_url="https://example.com", - ) - - assert result.endswith("/") - - -class TestFindCalendarByName: - """Tests for find_calendar_by_name function.""" - - def test_finds_calendar_by_name(self): - """Finds a calendar by its display name.""" - calendars = [ - CalendarInfo(url="/cal/work/", cal_id="work", name="Work", resource_types=[]), - CalendarInfo( - url="/cal/personal/", - cal_id="personal", - name="Personal", - resource_types=[], - ), - ] - - result = find_calendar_by_name(calendars, "Personal") - - assert result is not None - assert result.cal_id == "personal" - - def test_returns_none_if_not_found(self): - """Returns None if no calendar matches.""" - calendars = [ - CalendarInfo(url="/cal/work/", cal_id="work", name="Work", resource_types=[]), - ] - - result = find_calendar_by_name(calendars, "NonExistent") - - assert result is None - - def test_handles_empty_list(self): - """Returns None for empty list.""" - assert find_calendar_by_name([], "Any") is None - - def test_handles_none_name(self): - """Handles calendars with None name.""" - calendars = [ - CalendarInfo(url="/cal/work/", cal_id="work", name=None, resource_types=[]), - CalendarInfo( - url="/cal/personal/", - cal_id="personal", - name="Personal", - resource_types=[], - ), - ] - - result = find_calendar_by_name(calendars, "Personal") - - assert result is not None - assert result.cal_id == "personal" - - -class TestFindCalendarById: - """Tests for find_calendar_by_id function.""" - - def test_finds_calendar_by_id(self): - """Finds a calendar by its ID.""" - calendars = [ - CalendarInfo(url="/cal/work/", cal_id="work", name="Work", resource_types=[]), - CalendarInfo( - url="/cal/personal/", - cal_id="personal", - name="Personal", - resource_types=[], - ), - ] - - result = find_calendar_by_id(calendars, "work") - - assert result is not None - assert result.name == "Work" - - def test_returns_none_if_not_found(self): - """Returns None if no calendar matches.""" - calendars = [ - CalendarInfo(url="/cal/work/", cal_id="work", name="Work", resource_types=[]), - ] - - result = find_calendar_by_id(calendars, "nonexistent") - - assert result is None - - def test_handles_empty_list(self): - """Returns None for empty list.""" - assert find_calendar_by_id([], "any") is None - - -class TestCalendarInfo: - """Tests for CalendarInfo dataclass.""" - - def test_creates_calendar_info(self): - """Creates CalendarInfo with all fields.""" - info = CalendarInfo( - url="/calendars/user/work/", - cal_id="work", - name="Work Calendar", - resource_types=[ - "{DAV:}collection", - "{urn:ietf:params:xml:ns:caldav}calendar", - ], - ) - - assert info.url == "/calendars/user/work/" - assert info.cal_id == "work" - assert info.name == "Work Calendar" - assert "{urn:ietf:params:xml:ns:caldav}calendar" in info.resource_types - - def test_allows_none_values(self): - """Allows None values for optional fields.""" - info = CalendarInfo( - url="/calendars/user/work/", - cal_id=None, - name=None, - resource_types=[], - ) - - assert info.cal_id is None - assert info.name is None - assert info.resource_types == [] diff --git a/tests/test_operations_davobject.py b/tests/test_operations_davobject.py deleted file mode 100644 index ae58195f..00000000 --- a/tests/test_operations_davobject.py +++ /dev/null @@ -1,288 +0,0 @@ -""" -Tests for the DAVObject operations module. - -These tests verify the Sans-I/O business logic for DAVObject operations -like getting properties, listing children, and delete validation. -""" - -import pytest - -from caldav.operations.davobject_ops import ( - CALDAV_CALENDAR, - DAV_DISPLAYNAME, - DAV_RESOURCETYPE, -) -from caldav.operations.davobject_ops import ( - _build_children_query as build_children_query, -) -from caldav.operations.davobject_ops import ( - _convert_protocol_results_to_properties as convert_protocol_results_to_properties, -) -from caldav.operations.davobject_ops import ( - _find_object_properties as find_object_properties, -) -from caldav.operations.davobject_ops import ( - _process_children_response as process_children_response, -) -from caldav.operations.davobject_ops import ( - _validate_delete_response as validate_delete_response, -) -from caldav.operations.davobject_ops import ( - _validate_proppatch_response as validate_proppatch_response, -) - - -class TestBuildChildrenQuery: - """Tests for build_children_query function.""" - - def test_builds_query(self): - """Builds a ChildrenQuery with correct defaults.""" - query = build_children_query("/calendars/user/") - assert query.url == "/calendars/user/" - assert query.depth == 1 - assert DAV_DISPLAYNAME in query.props - assert DAV_RESOURCETYPE in query.props - - -class TestProcessChildrenResponse: - """Tests for process_children_response function.""" - - def test_excludes_parent(self): - """Parent URL is excluded from results.""" - props = { - "/calendars/": { - DAV_RESOURCETYPE: ["{DAV:}collection"], - DAV_DISPLAYNAME: "Calendars", - }, - "/calendars/work/": { - DAV_RESOURCETYPE: ["{DAV:}collection", CALDAV_CALENDAR], - DAV_DISPLAYNAME: "Work", - }, - } - children = process_children_response(props, "/calendars/") - assert len(children) == 1 - assert children[0].display_name == "Work" - - def test_filters_by_type(self): - """Filter by resource type works.""" - props = { - "/calendars/": { - DAV_RESOURCETYPE: ["{DAV:}collection"], - DAV_DISPLAYNAME: "Calendars", - }, - "/calendars/work/": { - DAV_RESOURCETYPE: ["{DAV:}collection", CALDAV_CALENDAR], - DAV_DISPLAYNAME: "Work Calendar", - }, - "/calendars/other/": { - DAV_RESOURCETYPE: ["{DAV:}collection"], - DAV_DISPLAYNAME: "Other Collection", - }, - } - children = process_children_response(props, "/calendars/", filter_type=CALDAV_CALENDAR) - assert len(children) == 1 - assert children[0].display_name == "Work Calendar" - - def test_handles_trailing_slash_difference(self): - """Parent with/without trailing slash is handled.""" - props = { - "/calendars": { - DAV_RESOURCETYPE: ["{DAV:}collection"], - DAV_DISPLAYNAME: "Calendars", - }, - "/calendars/work/": { - DAV_RESOURCETYPE: ["{DAV:}collection", CALDAV_CALENDAR], - DAV_DISPLAYNAME: "Work", - }, - } - # Parent has trailing slash, response doesn't - children = process_children_response(props, "/calendars/") - assert len(children) == 1 - assert children[0].display_name == "Work" - - def test_handles_string_resource_type(self): - """Single string resource type is handled.""" - props = { - "/calendars/": { - DAV_RESOURCETYPE: "{DAV:}collection", - DAV_DISPLAYNAME: "Calendars", - }, - "/calendars/work/": { - DAV_RESOURCETYPE: CALDAV_CALENDAR, - DAV_DISPLAYNAME: "Work", - }, - } - children = process_children_response(props, "/calendars/") - assert len(children) == 1 - - def test_handles_none_resource_type(self): - """None resource type is handled.""" - props = { - "/calendars/": { - DAV_RESOURCETYPE: None, - DAV_DISPLAYNAME: "Calendars", - }, - "/calendars/work/": { - DAV_RESOURCETYPE: [CALDAV_CALENDAR], - DAV_DISPLAYNAME: "Work", - }, - } - children = process_children_response(props, "/calendars/") - # Parent excluded, work included - assert len(children) == 1 - - -class TestFindObjectProperties: - """Tests for find_object_properties function.""" - - def test_exact_match(self): - """Exact path match works.""" - props = { - "/calendars/user/": {"prop": "value"}, - } - result = find_object_properties(props, "/calendars/user/") - assert result.properties == {"prop": "value"} - assert result.matched_path == "/calendars/user/" - - def test_trailing_slash_mismatch(self): - """Trailing slash mismatch is handled.""" - props = { - "/calendars/user": {"prop": "value"}, - } - result = find_object_properties(props, "/calendars/user/") - assert result.properties == {"prop": "value"} - assert result.matched_path == "/calendars/user" - - def test_full_url_as_key(self): - """Full URL as properties key works.""" - props = { - "https://example.com/calendars/": {"prop": "value"}, - } - result = find_object_properties(props, "https://example.com/calendars/") - assert result.properties == {"prop": "value"} - - def test_double_slash_workaround(self): - """Double slash in path is normalized.""" - props = { - "/calendars/user/": {"prop": "value"}, - } - result = find_object_properties(props, "/calendars//user/") - assert result.properties == {"prop": "value"} - - def test_single_result_fallback(self): - """Single result is used as fallback.""" - props = { - "/some/other/path/": {"prop": "value"}, - } - result = find_object_properties(props, "/expected/path/") - assert result.properties == {"prop": "value"} - - def test_icloud_principal_workaround(self): - """iCloud /principal/ workaround works.""" - props = { - "/principal/": {"prop": "value"}, - } - result = find_object_properties(props, "/12345/principal/") - assert result.properties == {"prop": "value"} - - def test_no_match_raises(self): - """ValueError raised when no match found.""" - props = { - "/path/a/": {"prop": "a"}, - "/path/b/": {"prop": "b"}, - } - with pytest.raises(ValueError, match="Could not find properties"): - find_object_properties(props, "/path/c/") - - def test_principal_no_warning(self): - """Principal objects don't warn on trailing slash mismatch.""" - props = { - "/principal": {"prop": "value"}, - } - # Should not log warning for principals - result = find_object_properties(props, "/principal/", is_principal=True) - assert result.properties == {"prop": "value"} - - -class TestConvertProtocolResults: - """Tests for convert_protocol_results_to_properties function.""" - - def test_converts_results(self): - """Converts PropfindResult-like objects to dict.""" - - class FakeResult: - def __init__(self, href, properties): - self.href = href - self.properties = properties - - results = [ - FakeResult("/cal/", {DAV_DISPLAYNAME: "Calendar"}), - FakeResult("/cal/event.ics", {DAV_DISPLAYNAME: "Event"}), - ] - converted = convert_protocol_results_to_properties(results) - assert "/cal/" in converted - assert converted["/cal/"][DAV_DISPLAYNAME] == "Calendar" - assert "/cal/event.ics" in converted - - def test_initializes_requested_props(self): - """Requested props initialized to None.""" - - class FakeResult: - def __init__(self, href, properties): - self.href = href - self.properties = properties - - results = [FakeResult("/cal/", {DAV_DISPLAYNAME: "Calendar"})] - converted = convert_protocol_results_to_properties( - results, requested_props=[DAV_DISPLAYNAME, "{DAV:}getetag"] - ) - assert converted["/cal/"][DAV_DISPLAYNAME] == "Calendar" - assert converted["/cal/"]["{DAV:}getetag"] is None - - -class TestValidateDeleteResponse: - """Tests for validate_delete_response function.""" - - def test_accepts_200(self): - """200 OK is accepted.""" - validate_delete_response(200) # No exception - - def test_accepts_204(self): - """204 No Content is accepted.""" - validate_delete_response(204) # No exception - - def test_accepts_404(self): - """404 Not Found is accepted (already deleted).""" - validate_delete_response(404) # No exception - - def test_rejects_500(self): - """500 raises ValueError.""" - with pytest.raises(ValueError, match="Delete failed"): - validate_delete_response(500) - - def test_rejects_403(self): - """403 Forbidden raises ValueError.""" - with pytest.raises(ValueError, match="Delete failed"): - validate_delete_response(403) - - -class TestValidatePropatchResponse: - """Tests for validate_proppatch_response function.""" - - def test_accepts_200(self): - """200 OK is accepted.""" - validate_proppatch_response(200) # No exception - - def test_accepts_207(self): - """207 Multi-Status is accepted.""" - validate_proppatch_response(207) # No exception - - def test_rejects_400(self): - """400 raises ValueError.""" - with pytest.raises(ValueError, match="PROPPATCH failed"): - validate_proppatch_response(400) - - def test_rejects_403(self): - """403 Forbidden raises ValueError.""" - with pytest.raises(ValueError, match="PROPPATCH failed"): - validate_proppatch_response(403) diff --git a/tests/test_operations_principal.py b/tests/test_operations_principal.py deleted file mode 100644 index 03df5a2d..00000000 --- a/tests/test_operations_principal.py +++ /dev/null @@ -1,242 +0,0 @@ -""" -Tests for the Principal operations module. - -These tests verify the Sans-I/O business logic for Principal operations -like URL sanitization and vCalAddress creation. -""" - -from caldav.operations.principal_ops import PrincipalData -from caldav.operations.principal_ops import _create_vcal_address as create_vcal_address -from caldav.operations.principal_ops import ( - _extract_calendar_user_addresses as extract_calendar_user_addresses, -) -from caldav.operations.principal_ops import ( - _sanitize_calendar_home_set_url as sanitize_calendar_home_set_url, -) -from caldav.operations.principal_ops import ( - _should_update_client_base_url as should_update_client_base_url, -) -from caldav.operations.principal_ops import ( - _sort_calendar_user_addresses as sort_calendar_user_addresses, -) - - -class TestSanitizeCalendarHomeSetUrl: - """Tests for sanitize_calendar_home_set_url function.""" - - def test_returns_none_for_none(self): - """Returns None if input is None.""" - assert sanitize_calendar_home_set_url(None) is None - - def test_quotes_at_in_path(self): - """Quotes @ character in path URLs (owncloud quirk).""" - url = "/remote.php/dav/calendars/user@example.com/" - result = sanitize_calendar_home_set_url(url) - assert "%40" in result - assert "@" not in result - - def test_preserves_full_urls(self): - """Does not quote @ in full URLs.""" - url = "https://example.com/dav/calendars/user@example.com/" - result = sanitize_calendar_home_set_url(url) - # Full URLs should be returned as-is - assert result == url - - def test_preserves_already_quoted(self): - """Does not double-quote already quoted URLs.""" - url = "/remote.php/dav/calendars/user%40example.com/" - result = sanitize_calendar_home_set_url(url) - assert result == url - # Should not have double-encoding like %2540 - assert "%2540" not in result - - def test_preserves_normal_path(self): - """Preserves paths without special characters.""" - url = "/calendars/default/" - result = sanitize_calendar_home_set_url(url) - assert result == url - - -class TestSortCalendarUserAddresses: - """Tests for sort_calendar_user_addresses function.""" - - def test_sorts_by_preference(self): - """Sorts addresses by preferred attribute (highest first).""" - - class FakeElement: - def __init__(self, text, preferred=0): - self.text = text - self._preferred = preferred - - def get(self, key, default=0): - if key == "preferred": - return self._preferred - return default - - addresses = [ - FakeElement("mailto:secondary@example.com", preferred=0), - FakeElement("mailto:primary@example.com", preferred=1), - FakeElement("mailto:tertiary@example.com", preferred=0), - ] - - result = sort_calendar_user_addresses(addresses) - - assert result[0].text == "mailto:primary@example.com" - # Other two maintain relative order (stable sort) - - def test_handles_missing_preferred(self): - """Handles elements without preferred attribute.""" - - class FakeElement: - def __init__(self, text): - self.text = text - - def get(self, key, default=0): - return default - - addresses = [ - FakeElement("mailto:a@example.com"), - FakeElement("mailto:b@example.com"), - ] - - # Should not raise, treats missing as 0 - result = sort_calendar_user_addresses(addresses) - assert len(result) == 2 - - -class TestExtractCalendarUserAddresses: - """Tests for extract_calendar_user_addresses function.""" - - def test_extracts_text(self): - """Extracts text from address elements.""" - - class FakeElement: - def __init__(self, text, preferred=0): - self.text = text - self._preferred = preferred - - def get(self, key, default=0): - if key == "preferred": - return self._preferred - return default - - addresses = [ - FakeElement("mailto:primary@example.com", preferred=1), - FakeElement("mailto:secondary@example.com", preferred=0), - ] - - result = extract_calendar_user_addresses(addresses) - - assert result == ["mailto:primary@example.com", "mailto:secondary@example.com"] - - def test_returns_empty_for_empty_list(self): - """Returns empty list for empty input.""" - assert extract_calendar_user_addresses([]) == [] - - -class TestCreateVcalAddress: - """Tests for create_vcal_address function.""" - - def test_creates_vcal_address(self): - """Creates vCalAddress with all parameters.""" - result = create_vcal_address( - display_name="John Doe", - address="mailto:john@example.com", - calendar_user_type="INDIVIDUAL", - ) - - assert str(result) == "mailto:john@example.com" - assert result.params["cn"] == "John Doe" - assert result.params["cutype"] == "INDIVIDUAL" - - def test_creates_without_display_name(self): - """Creates vCalAddress without display name.""" - result = create_vcal_address( - display_name=None, - address="mailto:john@example.com", - ) - - assert str(result) == "mailto:john@example.com" - assert "cn" not in result.params - - def test_creates_without_cutype(self): - """Creates vCalAddress without calendar user type.""" - result = create_vcal_address( - display_name="John", - address="mailto:john@example.com", - calendar_user_type=None, - ) - - assert str(result) == "mailto:john@example.com" - assert result.params["cn"] == "John" - assert "cutype" not in result.params - - -class TestShouldUpdateClientBaseUrl: - """Tests for should_update_client_base_url function.""" - - def test_returns_false_for_none(self): - """Returns False for None URL.""" - assert should_update_client_base_url(None, "example.com") is False - - def test_returns_false_for_same_host(self): - """Returns False when hostname matches.""" - assert ( - should_update_client_base_url( - "https://example.com/calendars/", - "example.com", - ) - is False - ) - - def test_returns_true_for_different_host(self): - """Returns True when hostname differs (iCloud load balancing).""" - assert ( - should_update_client_base_url( - "https://p123-caldav.icloud.com/calendars/", - "caldav.icloud.com", - ) - is True - ) - - def test_returns_false_for_relative_path(self): - """Returns False for relative paths (no host to compare).""" - assert ( - should_update_client_base_url( - "/calendars/user/", - "example.com", - ) - is False - ) - - -class TestPrincipalData: - """Tests for PrincipalData dataclass.""" - - def test_creates_principal_data(self): - """Creates PrincipalData with all fields.""" - data = PrincipalData( - url="/principals/user/", - display_name="John Doe", - calendar_home_set_url="/calendars/user/", - calendar_user_addresses=["mailto:john@example.com"], - ) - - assert data.url == "/principals/user/" - assert data.display_name == "John Doe" - assert data.calendar_home_set_url == "/calendars/user/" - assert data.calendar_user_addresses == ["mailto:john@example.com"] - - def test_allows_none_values(self): - """Allows None values for optional fields.""" - data = PrincipalData( - url=None, - display_name=None, - calendar_home_set_url=None, - calendar_user_addresses=[], - ) - - assert data.url is None - assert data.display_name is None - assert data.calendar_home_set_url is None - assert data.calendar_user_addresses == [] diff --git a/tests/test_protocol.py b/tests/test_protocol.py index b7e1fbd8..c3a55375 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -1,7 +1,7 @@ """ -Unit tests for Sans-I/O protocol layer. +Unit tests for protocol XML builders and response parsers. -These tests verify protocol logic without any HTTP mocking required. +These tests verify XML building and parsing logic without any HTTP mocking. All tests are pure - they test data transformations only. """ @@ -9,76 +9,14 @@ import pytest -from caldav.protocol import ( - DAVMethod, - DAVRequest, - DAVResponse, - MultistatusResponse, - SyncCollectionResult, -) -from caldav.protocol.xml_builders import ( - _build_calendar_multiget_body as build_calendar_multiget_body, -) -from caldav.protocol.xml_builders import ( - _build_calendar_query_body as build_calendar_query_body, -) -from caldav.protocol.xml_builders import _build_mkcalendar_body as build_mkcalendar_body -from caldav.protocol.xml_builders import _build_propfind_body as build_propfind_body -from caldav.protocol.xml_builders import ( - _build_sync_collection_body as build_sync_collection_body, -) -from caldav.protocol.xml_parsers import ( - _parse_calendar_query_response as parse_calendar_query_response, -) -from caldav.protocol.xml_parsers import _parse_multistatus as parse_multistatus -from caldav.protocol.xml_parsers import ( - _parse_propfind_response as parse_propfind_response, -) -from caldav.protocol.xml_parsers import ( - _parse_sync_collection_response as parse_sync_collection_response, -) - - -class TestDAVTypes: - """Test core DAV types.""" - - def test_dav_request_immutable(self): - """DAVRequest should be immutable (frozen dataclass).""" - request = DAVRequest( - method=DAVMethod.GET, - url="https://example.com/", - headers={}, - ) - with pytest.raises(AttributeError): - request.url = "https://other.com/" - - def test_dav_request_with_header(self): - """with_header should return new request with added header.""" - request = DAVRequest( - method=DAVMethod.GET, - url="https://example.com/", - headers={"Accept": "text/html"}, - ) - new_request = request.with_header("Authorization", "Bearer token") - - # Original unchanged - assert "Authorization" not in request.headers - # New has both headers - assert new_request.headers["Accept"] == "text/html" - assert new_request.headers["Authorization"] == "Bearer token" +from caldav.base_client import BaseDAVClient - def test_dav_response_ok(self): - """ok property should return True for 2xx status codes.""" - assert DAVResponse(status=200, headers={}, body=b"").ok - assert DAVResponse(status=201, headers={}, body=b"").ok - assert DAVResponse(status=207, headers={}, body=b"").ok - assert not DAVResponse(status=404, headers={}, body=b"").ok - assert not DAVResponse(status=500, headers={}, body=b"").ok - - def test_dav_response_is_multistatus(self): - """is_multistatus should return True only for 207.""" - assert DAVResponse(status=207, headers={}, body=b"").is_multistatus - assert not DAVResponse(status=200, headers={}, body=b"").is_multistatus +build_calendar_multiget_body = BaseDAVClient._build_calendar_multiget_body +build_calendar_query_body = BaseDAVClient._build_calendar_query_body +build_mkcalendar_body = BaseDAVClient._build_mkcalendar_body +build_propfind_body = BaseDAVClient._build_propfind_body +build_sync_collection_body = BaseDAVClient._build_sync_collection_body +from caldav.response import DAVResponse, SyncCollectionResult class TestXMLBuilders: @@ -149,8 +87,8 @@ def test_build_mkcalendar_body(self): class TestXMLParsers: """Test XML parsing functions.""" - def test_parse_multistatus_simple(self): - """Parse simple multistatus response.""" + def test_parse_propfind_simple(self): + """Parse simple multistatus response via DAVResponse.""" xml = b""" @@ -164,15 +102,14 @@ def test_parse_multistatus_simple(self): """ - result = parse_multistatus(xml) + results = DAVResponse.from_bytes(xml).parse_propfind() - assert isinstance(result, MultistatusResponse) - assert len(result.responses) == 1 - assert result.responses[0].href == "/calendars/user/" - assert "{DAV:}displayname" in result.responses[0].properties + assert len(results) == 1 + assert results[0].href == "/calendars/user/" + assert "{DAV:}displayname" in results[0].properties - def test_parse_multistatus_with_sync_token(self): - """Parse multistatus with sync-token.""" + def test_parse_propfind_with_sync_token(self): + """parse_propfind populates DAVResponse.sync_token when present.""" xml = b""" @@ -185,8 +122,9 @@ def test_parse_multistatus_with_sync_token(self): token-456 """ - result = parse_multistatus(xml) - assert result.sync_token == "token-456" + response = DAVResponse.from_bytes(xml) + response.parse_propfind() + assert response.sync_token == "token-456" def test_parse_propfind_response(self): """Parse PROPFIND response.""" @@ -203,14 +141,14 @@ def test_parse_propfind_response(self): """ - results = parse_propfind_response(xml, status_code=207) + results = DAVResponse.from_bytes(xml).parse_propfind() assert len(results) == 1 assert results[0].href == "/calendars/" def test_parse_propfind_404_returns_empty(self): """PROPFIND 404 should return empty list.""" - results = parse_propfind_response(b"", status_code=404) + results = DAVResponse.from_bytes(b"", status_code=404).parse_propfind() assert results == [] def test_parse_calendar_query_response(self): @@ -234,7 +172,7 @@ def test_parse_calendar_query_response(self): """ - results = parse_calendar_query_response(xml, status_code=207) + results = DAVResponse.from_bytes(xml).parse_calendar_query() assert len(results) == 1 assert results[0].href == "/cal/event.ics" @@ -261,7 +199,7 @@ def test_parse_sync_collection_response(self): new-token """ - result = parse_sync_collection_response(xml, status_code=207) + result = DAVResponse.from_bytes(xml).parse_sync_collection() assert isinstance(result, SyncCollectionResult) assert len(result.changed) == 1 @@ -297,20 +235,17 @@ def test_parse_complex_properties(self): """ - results = parse_propfind_response(xml, status_code=207) + results = DAVResponse.from_bytes(xml).parse_propfind() assert len(results) == 1 props = results[0].properties - # Simple property assert props["{DAV:}displayname"] == "My Calendar" - # resourcetype - list of child tags resourcetype = props["{DAV:}resourcetype"] assert "{DAV:}collection" in resourcetype assert "{urn:ietf:params:xml:ns:caldav}calendar" in resourcetype - # supported-calendar-component-set - list of component names components = props["{urn:ietf:params:xml:ns:caldav}supported-calendar-component-set"] assert components == ["VEVENT", "VTODO", "VJOURNAL"] From 7f6fa5676f236357058848412d30172f5c130731 Mon Sep 17 00:00:00 2001 From: Tobias Brox Date: Fri, 24 Apr 2026 08:18:09 +0200 Subject: [PATCH 15/17] chore: deal with code review concerns --- caldav/async_davclient.py | 1 - caldav/davobject.py | 5 +++-- caldav/response.py | 16 ++++++---------- tests/test_caldav_unit.py | 13 +++---------- 4 files changed, 12 insertions(+), 23 deletions(-) diff --git a/caldav/async_davclient.py b/caldav/async_davclient.py index 7cef0cc5..285cf590 100644 --- a/caldav/async_davclient.py +++ b/caldav/async_davclient.py @@ -72,7 +72,6 @@ def auth_flow(self, request): from caldav.lib import error from caldav.lib.python_utilities import to_wire from caldav.lib.url import URL - from caldav.requests import HTTPBearerAuth from caldav.response import CalendarQueryResult, DAVResponse, PropfindResult diff --git a/caldav/davobject.py b/caldav/davobject.py index 98a64102..f2a2383b 100644 --- a/caldav/davobject.py +++ b/caldav/davobject.py @@ -107,8 +107,9 @@ def is_async_client(self) -> bool: """ if self.client is None: return False - # Use string check to avoid circular imports - return type(self.client).__name__ == "AsyncDAVClient" + from caldav.async_davclient import AsyncDAVClient + + return isinstance(self.client, AsyncDAVClient) def children( self, type: str | None = None diff --git a/caldav/response.py b/caldav/response.py index c7279dff..0b27d5b9 100644 --- a/caldav/response.py +++ b/caldav/response.py @@ -470,7 +470,7 @@ def _parse_response(self, response: _Element) -> tuple[str, list[_Element], Any error.assert_(status) self.validate_status(status) elif elem.tag == dav.Href.tag: - assert not href + error.assert_(not href) href = _normalize_href(elem.text or "") elif elem.tag == dav.PropStat.tag: propstats.append(elem) @@ -507,8 +507,6 @@ def _parse_scheduling_response_objects(self, parent) -> dict: attendee - potentially with error status for all or some of the wanted attendees. - TODO: some asserts here - should make better error handling - Returns: Dict with: * email addresses -> FreeBusy status (raw data) @@ -517,9 +515,9 @@ def _parse_scheduling_response_objects(self, parent) -> dict: """ self.objects = {} self.objects["errors"] = {} - assert self.tree.tag == cdav.ScheduleResponse.tag + error.assert_(self.tree.tag == cdav.ScheduleResponse.tag) for response in self.tree: - assert response.tag == cdav.Response.tag + error.assert_(response.tag == cdav.Response.tag) parsed_response = self._parse_scheduling_response(response) for x in parsed_response: if x.endswith(":err"): @@ -531,8 +529,6 @@ def _parse_scheduling_response_objects(self, parent) -> dict: def _parse_scheduling_response(self, response) -> dict[str, str]: """ - TODO: lots of asserts here - should make better error handling - Parses one attendee response from a RFC6638 freebusy scheduling request Returns: @@ -547,7 +543,7 @@ def _parse_scheduling_response(self, response) -> dict[str, str]: for x in response: if x.tag == cdav.Recipient.tag: if len(x) == 1: - assert x[0].tag == dav.Href.tag + error.assert_(x[0].tag == dav.Href.tag) recipient = x[0].text else: recipient = x.text @@ -557,8 +553,8 @@ def _parse_scheduling_response(self, response) -> dict[str, str]: calendar_data = x.text else: raise error.DAVError(f"unexpected attribute {x.tag}") - assert recipient - assert status + error.assert_(recipient) + error.assert_(status) if not status.startswith("2.0"): ret[f"{recipient}:err"] = status if calendar_data: diff --git a/tests/test_caldav_unit.py b/tests/test_caldav_unit.py index 6acf7e61..77971381 100755 --- a/tests/test_caldav_unit.py +++ b/tests/test_caldav_unit.py @@ -1995,10 +1995,7 @@ def test_get_object_by_uid_returns_coroutine_for_async_client(self): xml_response = self._make_multistatus(ev1) client = MockedDAVClient(xml_response) - # Pretend the client is async by patching the type name - client.__class__ = type( - "AsyncDAVClient", (MockedDAVClient,), {"__module__": AsyncDAVClient.__module__} - ) + client.__class__ = type("MockedAsyncDAVClient", (MockedDAVClient, AsyncDAVClient), {}) calendar = Calendar(client, url="/calendar/") assert calendar.is_async_client uid = "20010712T182145Z-123401@example.com" @@ -2016,9 +2013,7 @@ def test_get_object_by_uid_async_returns_correct_object(self): from caldav.async_davclient import AsyncDAVClient client = MockedDAVClient("") - client.__class__ = type( - "AsyncDAVClient", (MockedDAVClient,), {"__module__": AsyncDAVClient.__module__} - ) + client.__class__ = type("MockedAsyncDAVClient", (MockedDAVClient, AsyncDAVClient), {}) calendar = Calendar(client, url="/calendar/") uid = "20010712T182145Z-123401@example.com" @@ -2072,9 +2067,7 @@ def _make_async_client_and_calendar(self): from caldav.async_davclient import AsyncDAVClient client = MockedDAVClient("") - client.__class__ = type( - "AsyncDAVClient", (MockedDAVClient,), {"__module__": AsyncDAVClient.__module__} - ) + client.__class__ = type("MockedAsyncDAVClient", (MockedDAVClient, AsyncDAVClient), {}) calendar = Calendar(client, url="/calendar/") return client, calendar From 2ac895aeac6760cb6eea8e2ae05badcce498dec7 Mon Sep 17 00:00:00 2001 From: Tobias Brox Date: Fri, 24 Apr 2026 10:53:30 +0200 Subject: [PATCH 16/17] docs: contrib, async, ai-policy, changelog, misc Funding.json is some kind of industry standard on how to beg for funding. Added it. Fixes https://github.com/python-caldav/caldav/issues/608 aka 95b9f5e. The funding file was partially generated with Claude Code - I asked the AI to help reading the specs and setting up the json structure accordingly. The async support has proven a lot more fragile than what I had hoped for, so it's appropriate to add some warnings in the async documentation. Git commit messages should now follow the industry standard. CHANGELOG prepared for v3.2.0 release The commit is predominantly human-written, with the following exceptions: * The code review document was AI-generated, human-updated * Changelog was AI-maintained, but most of it has been rewritten by hand prompt: Make a code review of all changes since v3.0.0 followup-prompt: write the review to a file under docs/design/ followup-prompt: The code review was not committed. Commit, then work on the code duplication in response.py prompt: the ChangeLog should be maintained Assisted-By: Claude Sonnet 4.6 --- AI-POLICY.md | 36 +- CHANGELOG.md | 399 +++------------------ CONTRIBUTING.md | 2 +- README.md | 2 + docs/design/ASYNC_DUAL_MODE.md | 20 ++ docs/design/README.md | 2 +- docs/design/V3_CODE_REVIEW_v3.0_to_v3.2.md | 316 ++++++++++++++++ docs/source/async.rst | 9 +- docs/source/http-libraries.rst | 3 +- funding.json | 47 +++ 10 files changed, 452 insertions(+), 384 deletions(-) create mode 100644 docs/design/ASYNC_DUAL_MODE.md create mode 100644 docs/design/V3_CODE_REVIEW_v3.0_to_v3.2.md create mode 100644 funding.json diff --git a/AI-POLICY.md b/AI-POLICY.md index c8490251..f8d4c720 100644 --- a/AI-POLICY.md +++ b/AI-POLICY.md @@ -4,9 +4,6 @@ The most important rule: be honest and inform about it! -Keep a log of the prompts used - prompts should preferably be included in the -git commits. - Tools should generally be used for improving the quality of the project, not for rapidly adding new features. @@ -20,7 +17,7 @@ large for being included in the commit message, etc. Keep it clear what is human-written vs what is AI-written. In a feature-branch, separate AI-commits and human-commits is preferable. Those should most often be squashed together before including it in -the main branch, with a notice in the commit message on what parts o +the main branch, with a notice in the commit message on what parts of the commit is AI-generated. ## Transparency matters @@ -85,6 +82,11 @@ rewritten. adding value to the project. You should at least do a quick QA on the AI-answer and acknowledge that it was generated by the AI. +* Most AI policies warns about potential copyright infringements. I + can hardly think it's any risk wrg of contributions to the Python + CalDAV library. In particular, if your changeset consists of lots + of minor changes to existing code, then it's nothing to worry about. + * The Contributors Guidelines aren't strongly enforced on this project as of 2026-02, and I can hardly see cases where the AI would break the Code of Conduct, but at the end of the day, it's **YOUR** @@ -94,7 +96,7 @@ rewritten. The maintainer started playing with Claude Code in the end of 2025 - and [blogged about it](https://www.redpill-linpro.com/techblog/2026/03/20/from-luddite-to-vibe-coder.html) -Releases 2.2.6 - 3.2.0 has been heavily assisted by Claude - which is pretty obvious when looking into the commit messages. My experiences has been mixed - sometimes it seems to be doing a better and faster job than me, other times it seems to be making a mess a lot faster than what I can do it. Despite (or because of?) using Claude extensively, I spent much more time on it than estimated. +Releases 2.2.6 - 3.2.0 has been heavily assisted by Claude - which is pretty obvious when looking into the commit messages. My experiences has been mixed - sometimes it seems to be doing a better and faster job than me, other times it seems to be making a mess a lot faster than what I can do it. Despiteof (or because of?) using Claude extensively, I spent much more time on the 3.0.0-release than estimated. Lots of time and efforts have been spent on doing QA on the changes, fixing up things and/or asking Claude to do a better job. The surge of issues reported after the 3.0-release is probably unrelated to the AI usage - it's a result of trying to shoehorn both async and API changes into it without breaking backward compatbility and without duplicating too much code. The CHANGELOG.md entry for 3.0 explicitly declared a caveat: "there are massive code changes in version 3.0, so if you're using the Python CalDAV client library in some sharp production environment, I would recommend to wait for two months before upgrading". @@ -104,19 +106,17 @@ Generated changes and human-made changes are often mixed up. I prefer "logical" ## Future plans of GenAI-usage -Post-3.2.0 and until further notice I will try to go more back to the old ways for doing the "core development tasks" - new features and complex refactoring. If nothing else, it's important for maintaining my brain cells, coding skills and making sure all the changes sticks to my memory. The new policy is that GenAI-tools should be used mainly for improving quality, not speeding up the development. +Post-3.2.0 and until further notice I will try to go back to the old ways for doing the "core development tasks" - new features and complex refactoring. If nothing else, it's important for maintaining my brain cells, coding skills and making sure all the changes sticks to my memory. The new policy is that GenAI-tools should be used mainly for improving quality, not speeding up the development. -I still intend to use GenAI heavily for certain tasks, like: +I still intend to use GenAI heavily for certain tasks, particularly anything that is either "mundane and tedious" or unrelated to the "the working end" of the library. Examples: -* Minor bugfixes - with test code. The bugfix itself may often be a simple one-line change, but debugging and writing up the tests is tedious work. -* Maintaining the integration test framework. It's hard work, even when using Claude. Thanks to Claude I've now been able to put up an extensive "battery" of test servers that I'm checking regularly towards. This is something I've started on several times since 2013 but except for the two integrated python servers I never managed to get any lasting solutions. It's very useful to be able to easily test the library towards a wide range of servers - the majority of the bug reports are compatibility issues. The more servers I have for testing every release, the less troubles will be discovered downstream. -* Other CI-related frameworks and "boiler plate" for things like automated testing of code embedded in the documentation, QA on the commit messages before I push my git commits out from my laptop, etc. It increases quality, although being quite outside the "core business" of the CalDAV library. Doing it manually (and reading through all the documentation out there) would have stolen lots of valuable time that could have been used for coding. -* Writing up test code. I've always thought that "test driven development" is a good idea (write test code first, then the logic), but it's quite often both tedious and difficult. Claude can make them really fast. It still needs some QA, care should be taken to ensure it's testing the right thing. -* Code reviews. The more "eyes" looking into the software, the better - it seems Claude is equally good at spotting the problems and mistakes in my code as I'm on spotting the problems and mistakes in the code Claude generates. -* Debugging. It's easy to get stuck and spend tons of time on debugging - sometimes (but not always) Claude can find them easily. -* Various mundane and tedious work (i.e. "I left a TODO-note in the code over there, could you have look into it and eliminate it?"). -* Development of the companion caldav-server-checker tool - writing up checks to discover various server issues may be really tedious and time-consuming, and (most of the time) easy for Claude to get right. The alternative to using GenAI would probably be to have half as many checks. I find those checks very useful. +* Code reviews. I think there should be a policy that all changesets and releases should go through AI-driven code review. By itself it sounds like a good idea, though one should be aware of the risk is that this comes *instead* of human reviews rather than as an addition. +* Writing up test code. I do believe "test driven development" is a good idea (write test code first, then the logic), but writing tests may be both tedious and difficult. Claude can make them really fast, though it still needs some QA, care should be taken to ensure it's testing the right thing. +* Debugging. It's easy to get stuck and spend tons of time on debugging - sometimes (but not always) Claude can find them easily. (Best approach is sometimes to do manual debugging in parallell with AI-driven debugging ... sometimes I "win", othertimes the AI "wins"). +* Minor bugfixes ... the bugfix itself may be a one line changeset, but tests and debugging takes time. +* Maintaining the integration test framework. I've consistently failed setting up and maintaining a "battery" of caldav servers from 2013 to 2025, thanks to Claude we now have it in place. It's important, a majority of issues reported are about compatibility problems, the more servers I have for testing every release, the less troubles will be discovered downstream. +* Setting up CI-related automated QA tests, pipelines etc +* The companion caldav-server-checker tool is quite suitable for GenAI-work - it's a bit like test code, writing up the checks to discover various server issues is rather tedious and time-consuming. Without AI-help I would probably have covered less than half of the "features" that are now tested for. * Investigations of different architectural choices - like with the async work I had claude develop different design approaches and chose the one that I felt most comfortable with (though I'm still not sure that I did the right choice). -* Reading RFCs and quickly give a pointer to the relevant sections, or verifying that the code is according to the standards or not. - -I will do some research on how to log prompts and chat. +* Reading RFCs and quickly give a pointer to the relevant sections, or verifying that the code is according to the standards or not (but care should be taken - I've seen Claude hallucinating completely wrong RFC references). +* Various other mundane and tedious work (i.e. "I left a TODO-note in the code over there, could you have look into it and eliminate it?"). diff --git a/CHANGELOG.md b/CHANGELOG.md index 1f97124a..e5eba4b6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,30 +8,57 @@ As of v3.x, **niquests** is used for HTTP communication. It's a backward-compati This file should adhere to [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), but I do have some extra sections in it. Notably an executive summary at the top, "Breaking Changes" or "Potentially Breaking Changes", list of GitHub issues/pull requests closed/merged, information on changes in the test framework, list of tests run, my work effort, credits to people assisting, an overview of how much time I've spent on each release, and an overview of calendar servers the release has been tested towards. -Changelogs prior to v2.0 is pruned, but was available in the v2.x releases +Changelogs prior to v3.0 is pruned, but was available in the v3.1 release This project should adhere to [Semantic Versioning](https://semver.org/spec/v2.0.0.html), though for pre-releases PEP 440 takes precedence. -## [Unreleased] +## [3.2.0] - 2026-04-24 -### Removed +The two most significant news in v3.2 are **relatively well-tested support for scheduling** (RFC6638) and **better-tested support for async**. Care should still be taken, those features are backed by many tests, but lacks testing for how well they support real-world use-case scenarios. While async support was added in version 3.0, it was not well-enough tested. Still only a fraction of all the integration tests for sync usage has been duplicated in the async integration test, I expect to release 3.2.1 with symmetric async integration tests before 2025-07. -* Compatibility feature `search.text.by-uid` has been removed. `get_object_by_uid()` already has a client-side fallback (via `_hacks="insist"`) that works on any server, so the guard was no longer needed. Closes https://github.com/python-caldav/caldav/issues/586 +### Added + +* `add_organizer()` now accepts an optional explicit *organizer* argument (a `Principal`, `vCalAddress`, or email string) +* Complete support for **Schedule-Tag** (RFC 6638 §3.2–3.3) and **Etag**. Headers from upstream will be catched and stored in the properties. If those properties exists, `If-Schedule-Tag-Match` or `If-Match` headers will be sent. A `ScheduleTagMismatchError` or `EtagMismatchError` will be raised on 412. + +### Changed + +* **httpx deprecation** - earlier, in async mode, if httpx was installed it would be used (while niquests is listed in the requirements). This have been reversed - now httpx will be used if it's installed while niquest isn't installed. httpx seems like a dead end, destroyed by drama and intrigues, and now even flagged as a supply chain risk on Reddit. See https://github.com/python-caldav/caldav/issues/611#issuecomment-4278875543 +* **SEQUENCE property assumed to default to 0** when absent (RFC 5546 §2.1.4). `save()` then inserts `SEQUENCE:1` unless the `increase_seqno` attribute is set to False. ### Fixed -* Reusing a `CalDAVSearcher` across multiple `search()` calls could yield inconsistent results: the first call would return only pending tasks (correct), but subsequent calls would change behaviour because `icalendar_searcher.Searcher.check_component()` mutated the `include_completed` field from `None` to `False` as a side-effect. Fixed by passing a copy with `include_completed` already resolved to `filter_search_results()`, leaving the original searcher object unchanged. Fixes https://github.com/python-caldav/caldav/issues/650 -* `_resolve_properties()` would crash with `UnboundLocalError` in production mode when a server returned an empty or unrecognisable PROPFIND response (the response paths did not match the request URI and there was more than one or zero paths returned). Fixed by returning `{}` instead of falling through to an unbound variable. Related: https://github.com/pycalendar/calendar-cli/issues/114 -* `Calendar.get_supported_components()` - * raised `KeyError` when the server did not include the `supported-calendar-component-set` property in its response. RFC 4791 section 5.2.3 states this property is optional and that its absence means all component types are accepted; the method now returns the RFC default `["VEVENT", "VTODO", "VJOURNAL"]` in that case, trimmed by any known server limitations from the compatibility hints (e.g. if `save-load.todo` is `unsupported`, `VTODO` is excluded). Fixes https://github.com/python-caldav/caldav/issues/653 - * async path returned an unawaited coroutine instead of the actual result. -* `accept_invite()` (and `decline_invite()`, `tentatively_accept_invite()`) now fall back to the client username as the attendee email address when the server does not expose the `calendar-user-address-set` property (RFC6638 §2.4.1). A `NotFoundError` with a descriptive message is raised when the username is also not an email address. Fixes https://github.com/python-caldav/caldav/issues/399 +* Bug with inconsistent `search()`-results - https://github.com/python-caldav/caldav/issues/650 +* Compatibility fixing: + * `_resolve_properties()` would crash for some disbehaving servers. https://github.com/pycalendar/calendar-cli/issues/114 + * `Calendar.get_supported_components()` would crash for some servers. https://github.com/python-caldav/caldav/issues/653 + * Fallback code for `accept_invite()`, `decline_invite()` and `tentatively_accept_invite()` when the server does not expose the `calendar-user-address-set` property. https://github.com/python-caldav/caldav/issues/399 +* Quite some code-paths with IO was async-unaware - found and fixed quite many of those. Some places duplicating code seems to be most trivial - but it's something I really want to avoid. There were already places in the code where the async and sync behaviour differed. I've done quite some refactoring to reduce the amount of duplicated code. +* Done some work on `get_object_by_uid()`, aligning it with the rest of the search API. Closes https://github.com/python-caldav/caldav/issues/586 + +### AI transparency + +I've been experimenting with Claude Code over the last few months, concerns have been raised that it may have negatively affected code quality - and indeed, this is probably a major reason why the async support in v3.0 was simply not good enough. I've been working a bit more on the [[AI-POLICY.md]], some of the directions for the future looks like this: + +* All work involving *new features* should primarily be done by hand (AI-assistance allowed for discussing different design decisions, reviewing and fixing trivial bugs in the new code, dealing with trivial TODO-nodes in the handwritten code, etc). +* All prompts should be logged. +* Prompts should be included in the commit message. +* Model and other relevant information on the AI-usage should be included. +* Commit messages should include information on what and how much is AI-generated (with default being "all" or "none" dependent on the commit message trailer) +* Commit messages should include information on why AI was used. +* The AI should be used for Code Review for every release. + +The 3.2-release may not be fully up to those standards, as they were made while working on 3.2. + +The branch v3.2-development contains "raw" commits, most of the commits are either AI-written (including commit message) or human-written. I've done quite some work trying to squash the commits into fewer commits, in the main branch all the recent commit messages are handwritten, and most of the commits have some notes on how much is AI-generated and why AI-generation was chosen. The manual walk-through of all the commits has been tedious, but useful for QA-purposes. I'm considering this to be the way forward. + +I have all relatively fresh communication with Claude in JSON-files, and I was considering to embed them into the repository for increased transparency. Everything considered, I think it would involve too much noise, so I've skipped it as for now. If you want it, I will publish it. ### Housekeeping +* **GitHub exit strategy**: Issues are now mirrored in the git repository itself using the [git-bug package](https://github.com/git-bug/git-bug). I'm not intending to leave GitHub for the foreseeable future, but I don't want to be locked-in or dependent on GitHub - this is a first step towards an "exit strategy". +* **Code quality**: reduced ruff ignore list (https://github.com/python-caldav/caldav/issues/634) — removed unused imports (`copy`, `lxml.etree`, `CalendarSet`, `cdav/dav` re-exports, `Optional`, `timezone`, `Event`/`Todo` type stubs), replaced bare `except:` clauses with specific exception types (`KeyError`, `AttributeError`, `Exception` where broad catching is intentional), and removed unused local variables. * Added `funding.json` (https://fundingjson.org/) at the repository root. Closes https://github.com/python-caldav/caldav/issues/608 -* Code quality: reduced ruff ignore list (https://github.com/python-caldav/caldav/issues/634) — removed unused imports (`copy`, `lxml.etree`, `CalendarSet`, `cdav/dav` re-exports, `Optional`, `timezone`, `Event`/`Todo` type stubs), replaced bare `except:` clauses with specific exception types (`KeyError`, `AttributeError`, `Exception` where broad catching is intentional), and removed unused local variables. -* Sync `_put()` now updates `self.url` from the `Location` header on a 302 redirect, mirroring the existing async behaviour. ### Test framework, compatibility hints, documentation, examples @@ -48,6 +75,9 @@ This project should adhere to [Semantic Versioning](https://semver.org/spec/v2.0 * Calendar owner example (`examples/calendar_owner_examples.py`) demonstrating how to retrieve the owner of a calendar via `DAV:owner` and resolve their calendar-user address. `testFindCalendarOwner` now exercises the full owner → principal → `get_vcal_address()` chain. Closes https://github.com/python-caldav/caldav/issues/544 * `testInviteAndRespond` implemented end-to-end: organizer creates an event, invites an attendee, attendee accepts, and the organizer verifies the updated `PARTSTAT`. Per-server compatibility flags applied for known quirks (Baikal, Cyrus, SOGo). * Multi-user RFC 6638 scheduling tests wired into the Docker server setup for Cyrus and Baikal (pre-populated `user1`–`user3`/`user1`–`user5`). +* Internal refactoring: `caldav/operations/` and `caldav/protocol/` packages deleted; functionality consolidated into `response.py`, `collection.py`, `search.py`, and `BaseDAVClient` static methods. No user-visible API changes. +* Compatibility feature `search.text.by-uid` has been removed. `get_object_by_uid()` already has a client-side fallback (via `_hacks="insist"`) that works on any server, so the guard was no longer needed. Closes https://github.com/python-caldav/caldav/issues/586 +* **`scheduling.auto-schedule` compatibility flag**: True when the server auto-processes incoming iTIP REQUEST messages and places the event directly into the attendee's calendar (RFC 6638 SCHEDULE-AGENT=SERVER). Used by `_reply_to_invite_request()` to choose the right update strategy. ## [3.1.0] - 2026-03-19 @@ -397,348 +427,3 @@ In addition, lots of time spent on things that aren't covered by the roadmap: * Responding fast to inbound issues and pull requests * Communication and collaboration * The release itself (running tests towards lots of servers with quirks - like having to wait for several minutes from an event is edited until it can be found through a search operation - looking through and making sure the CHANGELOG is complete, etc) is quite tedious and easily takes several days - weeks if it's needed to tweak on workarounds and compatbility hints to get the tests passing. - -## [2.2.6] - 2026-02-01 - -### Fixed - -* Fixed potential IndexError in URL path joining when path is empty -* Fixed NameError in search.py caused by missing import of `logging` module, which was masking actual errors when handling malformed iCalendar data. https://github.com/python-caldav/caldav/issues/614 - -### Changed - -* Updated example files to use the recommended `get_davclient()` factory function instead of `DAVClient()` directly - -### Test Framework - -* Added deptry for dependency verification in CI - -### GitHub Pull Requests Merged - -* #607 - Add deptry for dependency verification -* #605 - Update examples to use get_davclient() instead of DAVClient() - -### GitHub Issues Closed - -* #612 - Export get_davclient from caldav package -* #614 - Missing import logging in search.py causes NameError masking actual errors - -(2.2.4 is without niquests in the dependencies. 2.2.5 is with niquests. 2.2.6 is with niquests and a tiny CHANGELOG-fix) - -### Added - -* `get_davclient` is now exported from the `caldav` package, allowing `from caldav import get_davclient`. https://github.com/python-caldav/caldav/issues/612 - -## [2.2.3] - 2025-12-06] -### Fixed - -* Some servers did not support the combination of HTTP/2-multiplexing and authentication. Two workarounds fixed; baikal will specifically not use multiplexing, and an attempt to authenticate without multiplexing will be made upon authentication problems. Fixes https://github.com/python-caldav/caldav/issues/564 -* The DTSTAMP is mandatory in icalendar data. The `vcal.fix`-scrubber has been updated to make up a DTSTAMP if it's missing. Fixes https://github.com/python-caldav/caldav/issues/504 - -## [2.2.2] - 2025-12-04] -2.2.1 is released with requests support (mispelled riquests in 2.2.0), 2.2.2 with niquests support - -## [2.2.1] - [2025-12-04] - -Highlights: - -* New ways to set up client connections: - - For cloud-based services, it should suffice to pass username, password and the name of the service, no URL needed (though, just some few providers supported so far) - - If the username is in email format, then it's generally not needed to pass a URL. -* v2.2 comes with lots of workarounds around lack of feature support in the servers - notably the sync-token API will work also towards servers not supporting sync-tokens. In some cases lack of server functionality is detected, but as for now it may be needed to specify what server one is user through the `features` configuration flag. -* v2.2 supports more complex searches. Client-side filtering will be utilized for the things that aren't supported on the server side. - -### Potentially Breaking Changes - -(More information on the changes in the Changed section) - -* **Search results may differ** due to workarounds for various server compatibility problems. For some use cases this may be a breaking change. https://xkcd.com/1172/ -* **New dependencies**. As far as I understand the SemVer standard, new dependencies can be added without increasing the major version number - but for some scenarios where it's hard to add new dependencies, this may be a breaking change. - - The python-dns package is used for RFC6764 discovery. This is a well-known package, so the security impact should be low. This library is only used when doing such a recovery. If anyone minds this dependency, I can change the project so this becomes an optional dependency. - - Some code has been split out into a new package - `icalendar-searcher`. so this may also break if you manage the dependencies manually. As this package was made by the maintainer of the CalDAV package, the security impact of adding this dependency should be low. -* Potentially major **performance problems**: rather than throwing errors, the sync-token-API may now fetch the full calendar. This change is intended to be un-breaking, but for people having very big calendars and syncing them to a mobile device with limited memory, bandwidth, CPU and battery, this change may be painful. (If a servers is marked to have "fragile" support for sync-tokens, the fallback will apply to those servers too). -* **Very slow test suite** due to lots of docker-containers spun up with verious server implementations. See the "Test Suite" section below. - -### Changed - -* Transparent handling of calendar servers not supporting sync-tokens. The API will yield the same result, albeit with more bandwidth and memory consumption. -* I'm still working on "compatibility hints". Unfortunately, documentation is still missing. -* **Major refactoring!** Some of the logic has been pushed out of the CalDAV package and into a new package, icalendar-searcher. New logic for doing client-side filtering of search results have also been added to that package. This refactoring enables possibilities for more advanced search queries as well as client-side filtering. - * For advanced search queries, it's needed to create a `caldav.CalDAVSearcher` object, add filters and do a `searcher.search(cal)` instead of doing `cal.search(...)`. -* **Server compatibility improvements**: Significant work-arounds added for inconsistent CalDAV server behavior, aiming for consistent search results regardless of the server in use. Many of these work-arounds require proper server compatibility configuration via the `features` / `compatibility_hints` system. This may be a **breaking change** for some use cases, as backward-bug-compatibility is not preserved - searches may return different results if the previous behavior was relying on server quirks. - -### Fixed - -* As noted above, quite some changes have been done to searches. One may argue if this is breaking changes, changes or bugfixes. At least github issues #434, #461, #566 and #509 has been closed in the process. -* A minor bug in the FeatureSet constructor was fixed, sometimes information could be lost. -* Downgraded a CRITICAL error message to INFO, for some conditions that clearly wasn't CRITICAL (HTML error responses from server or wrong content-type given, when XML was expected) -* Probably some other minor bug fixes (though, most of the bugs fixed in this release was introduced after 2.1.2) -* A user managed to trigger a crash bug in the search in https://github.com/python-caldav/caldav/issues/587 - this has indirectly been fixed through the refactorings. - -### Added - -* **New ways to configure the client connection, new parameters** - - **RFC 6764 DNS-based service discovery**: Automatic CalDAV/CardDAV service discovery using DNS SRV/TXT records and well-known URIs. Users can now provide just a domain name or email address (e.g., `DAVClient(username='user@example.com')`) and the library will automatically discover the CalDAV service endpoint. The discovery process follows RFC 6764 specification. This involves a new required dependency: `dnspython` for DNS queries. DNS-based discovery can be disabled in the davclient connection settings, but I've opted against implementing a fallback if the dns library is not installed. - - Use `features: posteo` instead of `url: https://posteo.de:8443/` in the connection configuration. - - Use `features: nextcloud` and `url: my.nextcloud.provider.eu` instead of `url: https://my.nextcloud.provider.eu/remote.php/dav` - - Or even easier, use `features: nextcloud` and `username: tobixen@example.com` - - New `require_tls` parameter (default: `True`) prevents DNS-based downgrade attacks - - The client connection parameter `features` may now simply be a string label referencing a well-known server or cloud solution - like `features: posteo`. https://github.com/python-caldav/caldav/pull/561 - - The client connection parameter `url` is no longer needed when referencing a well-known cloud solution. https://github.com/python-caldav/caldav/pull/561 - * The client connection parameter `url` may contain just the domain name (without any slashes). It may then either look up the URL path in the known caldav server database, or through RFC6764 -* **New interface for searches** `mysearcher = caldav.CalDAVSearcher(...) ; mysearcher.add_property_filter(...) ; mysearcher.search(calendar)`. It's a bit harder to use, but opens up the possibility to do more complicated searches. -* **Collation support for CalDAV text-match queries (RFC 4791 § 9.7.5)**: CalDAV searches may now pass different collation attributes to the server, enabling case-insensitive searches. (but more work on this may be useful, see https://github.com/python-caldav/caldav/issues/567). The `CalDAVSearcher.add_property_filter()` method now accepts `case_sensitive` and `collation` parameters. Supported collations include: - - `i;octet` (case-sensitive, binary comparison) - default - - `i;ascii-casemap` (case-insensitive for ASCII characters, RFC 4790) - - `i;unicode-casemap` (Unicode case-insensitive, RFC 5051 - server support may vary) -* Client-side filtering method: `CalDAVSearcher.filter()` provides comprehensive client-side filtering, expansion, and sorting of calendar objects with full timezone preservation support. -* Example code: New `examples/collation_usage.py` demonstrates case-sensitive and case-insensitive calendar searches. - -### Security - -There is a major security flaw with the RFC6764 discovery. If the DNS is not trusted (public hotspot, for instance), someone can highjack the connection by spoofing the service records. The protocol also allows to downgrade from https to http. Utilizing this it may be possible to steal the credentials. Mitigations: - * DNSSEC is the ultimate soluion, but DNSSEC is not widely used. I tried implementing robust DNSSEC validation, but it was too complicated. - * Require TLS. By default, connections through the autodiscovery is required to use TLS. - * Decline domain change. If acme.com forwards to caldav.acme.com, it will be accepted, if it forward to evil.hackers.are.us the connection is declined. - -Also, the RFC6764 discovery may not always be robust, causing fallbacks and hence a non-deterministic behaviour. - -### Deprecated - -* `Event.expand_rrule` will be removed in some future release, unless someone protests. -* `Event.split_expanded` too. Both of them were used internally, now it's not. It's dead code, most likely nobody and nothing is using them. - -### GitHub Issues Closed - -- #574 - SECURITY: check domain name on auto-discovery (2025-11-29) - https://github.com/python-caldav/caldav/issues/574 - fixes issues introduced after previous release -- #532 - Replace compatibility flags list with compatibility matrix dict (2025-11-10) https://github.com/python-caldav/caldav/issues/532 - this process is not completely done, a new issue has been raised for mopping up the rest -- #402 - Server compatibility hints (2025-12-03) https://github.com/python-caldav/caldav/issues/402 - sort of duplicate of #532 -- #463 - Try out paths to find caldav base URL (2025-11-10) https://github.com/python-caldav/caldav/issues/463 - sort of solved through the compatbility hints file. -- #461 - Path handling error with non-standard URL formats (2025-12-02) https://github.com/python-caldav/caldav/issues/461 - the issue ended up identifying the need to work around missing server-side support for sync-token, this has been fixed -- #434 - Search event with summary (2025-11-27) https://github.com/python-caldav/caldav/issues/434 - the new search interface contains work-arounds for server-side incompatibilities as well as advanced client-side filtering -- #401 - Some server needs explicit event or task when doing search (2025-07-19) https://github.com/python-caldav/caldav/issues/401 - code now contains clean workarounds for fetching everything regardless of server side support -- #102 - Support for RFC6764 - find CalDAV URL through DNS lookup (created 2020, closed 2025-11-27) - https://github.com/python-caldav/caldav/issues/102 -- #311 - Google calendar - make authentication simpler and document it (created 2023, closed 2025-06-16) - https://github.com/python-caldav/caldav/issues/311 - no work on Google has been done, but user-contributed examples and documentation has been refactored, polished and published. -- #372 - Server says "Forbidden" when creating event with timezone (created 2024, closed 2025-12-03) - https://github.com/python-caldav/caldav/issues/372 - it's outside the scope supporting the old dateutil.tz objects in the CalDAV library. Checks have been added to the caldav-server-checker script to verify that the new-style Timezone objects work. -- #351 - `calendar.search`-method with timestamp filters yielding too much (created 2023, closed 2025-12-02) - https://github.com/python-caldav/caldav/issues/351 the new search interface may do client-side filtering -- #340 - 507 error during collection sync (created 2023, closed 2025-12-03) - https://github.com/python-caldav/caldav/issues/340 - this should be fixed by the new sync-tokens workaround -- #587 - Calendar.search broken with TypeError: Calendar.search() got multiple values for argument 'sort_keys' (created 2025-12-04, closed 2025-12-04) - https://github.com/python-caldav/caldav/issues/587 - this bug has indirectly been fixed through the refactorings. - -### GitHub Pull Requests Merged - -- #584 - Bedework server support (2025-12-04) - https://github.com/python-caldav/caldav/pull/584 -- #583 - Transparent fallback for servers not supporting sync tokens (2025-12-02) - https://github.com/python-caldav/caldav/pull/583 -- #582 - Fix docstrings in Principal and Calendar classes (2025-12-02) - https://github.com/python-caldav/caldav/pull/582 -- #581 - SOGo server support (2025-12-02) - https://github.com/python-caldav/caldav/pull/581 -- #579 - Sync-tokens compatibility feature flags (2025-11-29) - https://github.com/python-caldav/caldav/pull/579 -- #578 - Docker server testing cyrus (2025-12-02) - https://github.com/python-caldav/caldav/pull/578 -- #576 - Add RFC 6764 domain validation to prevent DNS hijacking attacks (2025-11-29) - https://github.com/python-caldav/caldav/pull/576 -- #575 - Add automated Nextcloud CalDAV/CardDAV testing framework (2025-11-29) - https://github.com/python-caldav/caldav/pull/575 -- #573 - Add Baikal Docker test server framework for CI/CD (2025-11-28) - https://github.com/python-caldav/caldav/pull/573 -- #570 - Add RFC 6764 DNS-based service discovery (2025-11-27) - https://github.com/python-caldav/caldav/pull/570 -- #569 - Improved substring search (2025-11-27) - https://github.com/python-caldav/caldav/pull/569 -- #566 - More compatibility work (2025-11-27) - https://github.com/python-caldav/caldav/pull/566 -- #563 - Refactoring search and filters (2025-11-19) - https://github.com/python-caldav/caldav/pull/563 -- #561 - Connection details in the server hints (2025-11-10) - https://github.com/python-caldav/caldav/pull/561 -- #560 - Python 3.14 support (2025-11-09) - https://github.com/python-caldav/caldav/pull/560 - -### Test Framework - -* **Automated Docker testing framework** using Docker containers, if docker is available. - * Cyrus, NextCloud and Baikal added so far. - * For all of those, automated setups with a well-known username/password combo was a challenge. I had planned to add more servers, but this proved to be too much work. - * The good thing is that test coverage is increased a lot for every pull request, I hope this will relieving me of a lot of pain learning that the tests fails towards real-world servers when trying to do a release. - * The bad thing is that the test runs takes a lot more time. Use `pytest -k Radicale` or `pytest -k Xandikos` - or run the tests in an environment not having access to docker if you want a quicker test run - or set up a local `conf_private.py` where you specify what servers to test. It may also be a good idea to run `start.sh` and `stop.sh` in `tests/docker-test-servers/*` manually so the container can stay up for the whole duration of the testing rather than being taken up and down for every test. - * **Docker volume cleanup**: All teardown functions should automatically prune ephemeral Docker volumes to prevent `/var/lib/docker/volumes` from filling up with leftover test data. This applies to Cyrus, Nextcloud, and Baikal test servers. -* Since the new search code now can work around different server quirks, quite some of the test code has been simplified. Many cases of "make a search, if server supports this, then assert correct number of events returned" could be collapsed to "make a search, then assert correct number of events returned" - meaning that **the library is tested rather than the server**. -* Some of the old "compatibility_flags" that is used by the test code has been moved into the new "features"-structure in `caldav/compatibility_hints.py`. Use the package caldav-server-checker to check the feature-set of your CalDAV server (though, as for now the last work done is on a separate branch. A relase will be made soon). -* Note, the `testCheckCompatibility` will be run if and only if the caldav-server-checker package is installed and available. If the package is installed, the version of it has to correspond exactly to the caldav version - and even then, it may break for various reasons (the caldav server tester is still under development, no stable release exists yet). The corresponding version of the package has not been released yet (it's even not merged to the main branch). I hope to improve on this somehow before the next release. It can be a very useful test - if the compatibility configuration is wrong, tests may break or be skipped for the wrong reasons. - -### Time Spent - -(The "Time Spent"-section was missing from the 2.1-release, so this includes everything since 2.0) - -The maintainer has spent around 230 hours since version 2.0.0, plus paid some money for AI-assistance from Claude. This time includes work on the two sub-projects icalendar-searcher and caldav-server-tester (not released yet). - -The estimation given at the road map was 28h for "Server checker and server compatibility hints project", 8h for "Maintain and expand the test server list", and 12h for "Outstanding issues slated for v3.0". Including the Claude efforts, consider this to be 5x as much time as estimated. - -Some few reasons of the overrun: - -* Quite much of this time has been put down into the caldav-server-tester project, and the icalendar-search project also took me a few days to complete. -* "Let's make sure to support both case-sensitive and case-insensitive search" may sound like a simple task, but collations is a major tarpit! Now I know that the correct uppercase version of "istanbul" depends on the locale used ... -* The test framework with docker contained servers was also a major tarpit. "Why not just spin up server X in a docker container" - it sounded trivial, but then come the hard realites: - - Most of the servers needs some extra configuration to get a test user with well-known username and password in place - - Some servers are optimized for manual "installation and configuration", rather than automated setup with an epheremal disk volume. - - Some servers have external requirements, like a stand-alone database server, requiring significant amounts of configuration for connecting the database and the calendar server (database username, password, connection details, +++) - - Docker services in the "GitHub Actions" that I use for automated external testing has to be set up completely different and independently from the local tests. This is also a tarpit as I cannot inspect and debug problems so easily, every test run takes very long time and generates several megabytes of logs. - - Luckily, with the new caldav-server-tester script it's easy to get the compatibility configuration readily set up. In theory. In practice, I need to do quite some work on the caldav-server-tester to correctly verify all the unique quirks of the new server. - - In practice, the test suite will still be breaking, requiring lots of debugging figuring out of the problems. -* Quite many other rabbit holes and tarpits have been found on the way, but I digress. This is quite a bit outside the scope of a CHANGELOG. - -### Credits - -The following contributors (by GitHub username) have assisted by reporting issues, submitting pull requests and provided feedback: - -@ArtemIsmagilov, @cbcoutinho, @cdce8p, @dieterbahr, @dozed, @Ducking2180, @edel-macias-cubix, @erahhal, @greve, @jannistpl, @julien4215, @Kreijstal, @lbt, @lothar-mar, @mauritium, @moi90, @niccokunzmann, @oxivanisher, @paramazo, @pessimo, @Savvasg35, @seanmills1020, @siderai, @slyon, @smurfix, @soundstorm, @thogitnet, @thomasloven, @thyssentishman, @ugniusslev, @whoamiafterall, @yuwash, @zealseeker, @Zhx-Chenailuoding, @Zocker1999NET, @SashankBhamidi, @Claude and @tobixen - -### Test runs before release - -Local docker containers and python server instances: - -* Radicale -* Xandikos -* Nextcloud -* Baikal -* Cyrus -* SOGo -* Bedework - -External servers tested: - -* eCloud (NextCloud) -* Zimbra -* Synology -* Posteo -* Baikal -* Robur - -Servers and platforms not tested this time: - -* PurelyMail (partly tested - but test runs takes EXTREMELY long time due to the search-cache server peculiarity, and the test runs still frequently fails in non-deterministic ways). -* GMX (It throws authorization errors, didn't figure out of it yet) -* DAViCal (my test server is offline) - -I should probably look more into the breakages with PurelyMail and GMX. - -Those servers ought to be tested, but I'm missing accounts/capacity to do it at the moment: - -* Google -* iCloud -* FastMail -* calendar.mail.ru -* Lark -* all-inkl.com -* OX - -## [2.1.2] - 2025-11-08 - -Version 2.1.0 comes without niquests in the dependency file. Version 2.1.2 come with niquests in the dependency file. Also fixed up some minor mistakes in the CHANGELOG. - -## [2.1.1] - 2025-11-08 [YANKED] - -Version 2.1.0 comes without niquests in the dependency file. Version 2.1.1 should come with niquests in the dependency file, but I made a mistake. - -## [2.1.0] - 2025-11-08 - -I'm working on a [caldav compatibility checker](https://github.com/tobixen/caldav-server-tester) side project. While doing so, I'm working on redefining the "compatibility matrix". This should only affect the test code. **If you maintain a file `tests/conf_private.py`, chances are that the latest changesets will break** Since "running tests towards private CalDAV servers" is not considered to be part of the public API, I deem this to be allowed without bumping the major version number. If you are affected and can't figure out of it, reach out by email, GitHub issue or GitHub discussions. (Frankly, I'm interessted if anyone except me uses this, so feel free to reach out also if you can figure out of it). - -As always, the new release comes with quite some bugfixes, compatibility fixes and workarounds improving the support for various calendar servers observed in the wild. - -### Potentially Breaking Changes - -* As mentioned above, if you maintain a file `tests/conf_private.py`, chances are that your test runs will break. Does anyone except me maintain a `tests/conf_private.py`-file? Please reach out by email, GitHub issues or GitHub discussions. - -### Changed - -* The search for pending tasks will not do send any complicated search requests to the server if it's flagged that the server does not support such requests. (automatically setting such flags will come in a later version) -* If the server is flagged not to support MKCALENDAR but supporting MKCOL instead (baikal), then it will use MKCOL when creating a calendar. (automatically setting such flags will come in a later version) -* In 1.5.0, I moved the compability matrix from the tests directory and into the project itself - now I'm doing a major overhaul of it. This change is much relevant for end users yet - but already now it's possible to configure "compatibility hints" when setting up the davclient, and the idea is that different kind of workarounds may be applied depending on the compatibility-matrix. Search without comp-type is wonky on many servers, now the `search`-method will automatically deliver a union of a search of the three different comp-types if a comp-type is not set in the parameters *and* it's declared that the compatibility matrix does not work. In parallel I'm developing a stand-alone tool caldav-server-tester to check the compatibility of a caldav server. https://github.com/python-caldav/caldav/issues/532 / https://github.com/python-caldav/caldav/pull/537 -* Littered the code with `try: import niquests as requests except: import requests`, making it easier to flap between requests and niquests. -* Use the "caldav" logger consistently instead of global logging. https://github.com/python-caldav/caldav/pull/543 - fixed by Thomas Lovden - -### Fixes - -* A search without filtering on comp-type on a calendar containing a mix of events, journals and tasks should return a mix of such. (All the examples in the RFC includes the comp-type filter, so many servers does not support this). There were a bug in the auto-detection of comp-type, so tasks would typically be wrapped as events or vice-versa. https://github.com/python-caldav/caldav/pull/540 -* Tweaks to support upcoming version 7 of the icalendar library. -* Compatibility-tweaks for baikal, but as for now manual intervention is needed - see https://github.com/python-caldav/caldav/pull/556 and https://github.com/python-caldav/caldav/issues/553 -* @thyssentishman found a missing import after the old huge `objects.py` was broken up in smaller files. Which again highlights that I probably have some dead, moot code in the project. https://github.com/python-caldav/caldav/pull/554 -* Bugfix on authentication - things broke on Baikal if authentication method (i.e. digest) was set in the config. I found a quite obvious bug, I did not investigate why the test code has been passing on all the other servers. Weird thing. -* Bugfix in the `davclient.principals`-method, allowing it to work on more servers - https://github.com/python-caldav/caldav/pull/559 -* Quite some compatibility-fixing of the test code - -### Added - -* Support for creating a `CalendarObjectResource` from an icalendar `Event`, `Todo` etc, and not only `Calendar`. Arguably a bugfix as it would be silently accepted and throw some arbitrary error, very confusing for end users. https://github.com/python-caldav/caldav/issues/546 - -### Other - -* Example code: Basic usage examples have been brushed up, thanks to David Greaves - https://github.com/python-caldav/caldav/pull/534 -* PEP 639 conforming license expression in the pyproject.toml, thanks to Marc Mueller - https://github.com/python-caldav/caldav/pull/538 - -## [2.0.1] - 2025-06-24 -Due to feedback we've fallen back from niquests to requests again. - -### Changes - -* I was told in https://github.com/python-caldav/caldav/issues/530 that the niquests dependency makes it impossible to package the library, so I've reverted the requests -> niquests changeset. - -## [2.0.0] - 2025-06-23 - -Here are the most important changes in 2.0: - -* Version 2.0 drops support for old python versions and replaces requests 2.x with niquests 3.x, a fork of requests. -* Major overhaul of the documentation -* Support for reading configuration from a config file or environmental variables - I didn't consider that to be within the scope of the caldav library, but why not - why should every application reinvent some configuration file format, and if an end-user have several applications based on python-caldav, why should he need to configure the caldav credentials explicitly for each of them? -* New method `davclient.principals()` to search for other principals on the server - and from there it's possible to do calendar searches and probe what calendars one have access to. If the server will allow it. - -### Deprecated - -* `calendar.date_search` - use `calendar.search` instead. (this one has been deprecated for a while, but only with info-logging). This is almost a drop-in replacement, except for two caveats: - * `date_search` does by default to recurrence-expansion when doing searches on closed time ranges. The default value is `False` in search (this gives better consistency - no surprise differences when changing between open-ended and closed-ended searches, but it's recommended to use `expand=True` when possible). - * In `calendar.search`, `split_expanded` is set to `True`. This may matter if you have any special code for handling recurrences in your code. If not, probably the recurrences that used to be hidden will now be visible in your search results. -* I introduced the possibility to set `expand='server'` and `expand='client'` in 1.x to force through expansion either at the server side or client side (and the default was to try server side with fallback to client side). The four possible values "`True`/`False`/`client`/`server`" does not look that good in my opinion so the two latter is now deprecated, a new parameter `server_expand=True` will force server-side expansion now (see also the Changes section) -* The `event.instance` property currently yields a vobject. For quite many years people have asked for the python vobject library to be replaced with the python icalendar objects, but I haven't been able to do that due to backward compatibility. In version 2.0 deprecation warnings will be given whenever someone uses the `event.instance` property. In 3.0, perhaps `event.instance` will yield a `icalendar` instance. Old test code has been updated to use `.vobject_instance` instead of `.instance`. -* `davclient.auto_conn` that was introduced just some days ago has already been renamed to `davclient.get_davclient`. - -### Added - -* `event.component` is now an alias for `event.icalendar_component`. -* `get_davclient` (earlier called `auto_conn`) is more complete now - https://github.com/python-caldav/caldav/pull/502 - https://github.com/python-caldav/caldav/issues/485 - https://github.com/python-caldav/caldav/pull/507 - * It can read from environment (including environment variable for reading from test config and for locating the config file). - * It can read from a config file. New parameter `check_config_file`, defaults to true - * It will probe default locations for the config file (`~/.config/caldav/calendar.conf`, `~/.config/caldav/calendar.yaml`, `~/.config/caldav/calendar.json`, `~/.config/calendar.conf`, `/etc/calendar.conf`, `/etc/caldav/calendar.conf` as for now) - * Improved tests (but no test for configuration section inheritance yet). - * Documentation, linked up from the reference section of the doc. - * It's allowable with a yaml config file, but the yaml module is not included in the dependencies yet ... so late imports as for now, and the import is wrapped in a try/except-block -* New method `davclient.principals()` will return all principals on server - if server permits. It can also do server-side search for a principal with a given user name - if server permits - https://github.com/python-caldav/caldav/pull/514 / https://github.com/python-caldav/caldav/issues/131 -* `todo.is_pending` returns a bool. This was an internal method, but is now promoted to a public method. Arguably, it belongs to icalendar library and not here. Piggybacked in through https://github.com/python-caldav/caldav/pull/526 -* Support for shipping `auth_type` in the connection parameters. With this it's possible to avoid an extra 401-request just to probe the authentication types. https://github.com/python-caldav/caldav/pull/529 / https://github.com/python-caldav/caldav/issues/523 -* If a server returns a HTML page together with the 401, there will now be another warning encouraging the user to use the new `auth_type` parameter. https://github.com/python-caldav/caldav/pull/522 / https://github.com/python-caldav/caldav/issues/517, by edel-macias-cubix. - -### Documentation and examples - -* Documentation has been through a major overhaul. -* Added some information on how to connect to Google in the doc and examples. -* Looked through and brushed up the examples, two of them are now executed by the unit tests. Added a doc section on the examples. -* Documentation issues https://github.com/python-caldav/caldav/issues/253 https://github.com/python-caldav/caldav/issues/311 https://github.com/python-caldav/caldav/issues/119 has been closed - -### Fixed - -* Support for Lark/Feishu got broken in the 1.6-release. Issue found and fixed by Hongbin Yang (github user @zealseeker) in https://github.com/python-caldav/caldav/issues/505 and https://github.com/python-caldav/caldav/pull/506 - -### Changed - -* https://github.com/python-caldav/caldav/issues/477 / https://github.com/python-caldav/caldav/pull/527 - vobject has been removed from the dependency list. If you are using `event.vobject_instance` then you need to include the vobject dependency explicitly in your project. -* The request library has been in a feature freeze for ages and may seem like a dead end. There exists a fork of the project niquests, we're migrating to that one. This means nothing except for one additional dependency. (httpx was also considered, but it's not a drop-in replacement for the requests library, and it's a risk that such a change will break compatibility with various other servers - see https://github.com/python-caldav/caldav/issues/457 for details). Work by @ArtemIsmagilov, https://github.com/python-caldav/caldav/pull/455. -* Expanded date searches (using either `event.search(..., expand=True)` or the deprecated `event.date_search`) will now by default do a client-side expand. This gives better consistency and probably improved performance, but makes 2.0 bug-incompatible with 1.x. -* To force server-side expansion, a new parameter server_expand can be used - -### Removed - -If you disagree with any of this, please raise an issue and I'll consider if it's possible to revert the change. - -* Support for python 3.7 and 3.8 -* Dependency on the requests library. -* The `calendar.build_date_search_query` was ripped out. (it was deprecated for a while, but only with info-logging - however, this was an obscure internal method, probably not used by anyone?) - -### Changes in test framework - -* Proxy test has been rewritten. https://github.com/python-caldav/caldav/issues/462 / https://github.com/python-caldav/caldav/pull/514 -* Some more work done on improving test coverage -* Fixed a test issue that would break arbitrarily doe to clock changes during the test run - https://github.com/python-caldav/caldav/issues/380 / https://github.com/python-caldav/caldav/pull/520 -* Added test code for some observed problem that I couldn't reproduce - https://github.com/python-caldav/caldav/issues/397 - https://github.com/python-caldav/caldav/pull/521 -* Wrote up some test code to improve code coverage - https://github.com/python-caldav/caldav/issues/93 - https://github.com/python-caldav/caldav/pull/526 - -### Time Spent - -The maintainer has spent around 49 hours totally since 1.6. That is a bit above estimate. For one thing, the configuration file change was not in the original road map for 2.0. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f4bbae22..6110246c 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -15,7 +15,7 @@ The types used should (as for now) be one of: * "refactor" - a code change in the codebase that is neither a bugfix or a feature, but makes the code more readable, shorter, better or more maintainable. * "test" - fixes, additions or improvements that only affects the test code or the test framework. The commit may include documentation. * "docs" - changes that *only* is done to the documentation, documentation framework - this includes minor typo fixes as well as new documentation, and it includes both the user documentation under `docs/source`, other documentation files (including CHANGELOG) as well as inline comments and docstrings in the code itself. -* "other" - if nothing of the above fits +* "chore" - if nothing of the above fits The `compatibility_hints.py` has been moved from the test directory to the codebase not so very long ago. Some special rules here: diff --git a/README.md b/README.md index 906fcdba..123486c6 100644 --- a/README.md +++ b/README.md @@ -56,6 +56,8 @@ Other documentation: * [Apache License](COPYING.APACHE) * [GPL license](COPYING.GPL) +There is also a directory [docs/design](docs/design) containing lots of documents, mostly AI-generated, containing things like design decisions and other things that neither is deemed important enough to have a document on the root of the project nor deemed to be "user documentation". + The package is published at [Pypi](https://pypi.org/project/caldav) ## HTTP Libraries diff --git a/docs/design/ASYNC_DUAL_MODE.md b/docs/design/ASYNC_DUAL_MODE.md new file mode 100644 index 00000000..a6021468 --- /dev/null +++ b/docs/design/ASYNC_DUAL_MODE.md @@ -0,0 +1,20 @@ +(This document was human-written) + +As of 3.x, quite some methods in classes like Calendar, Event, etc are dual-mode sync/async. They will return an awaitable coroutine when run in async mode, and a value when run in sync mode. (I haven't decided how to do this in 4.x. Claude suggests to write async first and then auto-generate the sync code. I still think it may be worth doing more research into the "Sans-IO" code pattern). + +Claude seems happy with copying code and making async-versions of the sync code. (and then later, when adding new features or fixing bugs, it will be done on only on the sync version - or only on the async version, whatever is in focus that day). I really hate code duplication - but for the methods that are mixing many I/O-calls with a bit of data processing, this seems to be the only trivial option. **Such methods should be marked up with inline comments, warning that the code is duplicated and that any changes should be mirrored in the other code path**. + +As for 3.x, for a method `foo` doing some preparations, some IO and then some processing of the data, those rules should be applied: + +* `foo` should *always* do the + `if self.is_async_client: return self._async_foo(...)`-logic +* `foo` should have type hints telling it may return an awaitable coroutine +* `self._async_foo` should never be called upon other places +* For methods containing significant amount of preparation logic (like, two or + more code lines) before doing any IO, the + `if self.is_async_client: return self._async_foo(...)`-logic + should be moved to the last possible point in the method. +* For methods containing significant amount of processing logic after doing the IO, + split the logic out in a `_post_foo`-method. + +Now, some of the methods may return cached data if it exists, avoiding IO-operations. A wrapper class method `BaseDAVClient._value_or_coroutine` has been made to deal with this so that async users can always expect to get a coroutine. diff --git a/docs/design/README.md b/docs/design/README.md index e673e98a..cc65c090 100644 --- a/docs/design/README.md +++ b/docs/design/README.md @@ -1,6 +1,6 @@ # CalDAV Design Documents -End-user documentation belongs under `../srv` and more or less important stuff belongs under the project root. "Everything else" may be thrown into this directory. +End-user documentation belongs under `../source` and more or less important stuff belongs under the project root. "Everything else" may be thrown into this directory. Admittedly it's quite much junk in this folder. diff --git a/docs/design/V3_CODE_REVIEW_v3.0_to_v3.2.md b/docs/design/V3_CODE_REVIEW_v3.0_to_v3.2.md new file mode 100644 index 00000000..2b28bbb9 --- /dev/null +++ b/docs/design/V3_CODE_REVIEW_v3.0_to_v3.2.md @@ -0,0 +1,316 @@ +# Code Review: v3.0.0 → v3.2 (current) + +**Date:** April 2026 +**Reviewer:** Claude Sonnet 4.6 (AI-assisted review) +**Branch:** v3.2-preparations (71 commits since tag v3.0.0) +**Scope:** `git diff v3.0.0..HEAD` — 120 files, ~10 600 insertions, ~8 100 deletions + +--- + +## Overview + +The major themes of this release cycle are: + +1. **Async (dual-mode) expansion** — many more methods made async-aware via the + `is_async_client` branch + `_async_*` companion pattern +2. **Architecture consolidation** — the `operations/` and `protocol/` sub-packages were + deleted; their code was absorbed into `base_client.py`, `response.py`, `search.py`, + and `collection.py` +3. **RFC 6638 scheduling support** — freebusy, invite accept/decline/tentative, organizer + handling, Schedule-Tag / ETag conditional PUT +4. **New user-facing APIs** — `get_icalendar_component()`, `edit_icalendar_component()`, + `etag` / `schedule_tag` properties, `get_calendars()` multi-server support +5. **Bug fixes and compatibility** — OX App Suite, Nextcloud 33, numerous async fixes, + `get_supported_components()`, UUID v1→v4 + +--- + +## Architecture + +### Positive: `operations/` and `protocol/` removal + +Merging ~2 000 lines of thin wrappers back into the main classes was the right call. +The indirection added complexity without benefit. + +### Concern: `response.py` is now a God-module + +**Status:** Partially fixes, code duplication have been dealt with and squashd together with earlier work on this. + +`response.py` grew from ~200 to ~900 lines and now contains result dataclasses, six XML +parse functions, *and* the `DAVResponse` class with its own parse path. The file itself +notes: + +```python +## TODO: _parse_response_element is a simplified version of DAVResponse._parse_response +## ... both of these could be unified into a single method. +``` + +There are now **two separate XML parse pipelines** that can diverge: + +- Module-level `_parse_propfind_response()` used by the protocol-style methods + (`parse_propfind()`, `parse_calendar_query()`) +- `DAVResponse._parse_response()` used by the legacy `_find_objects_and_props()` / + `expand_simple_props()` + +This is a technical-debt trap. The TODO comment acknowledges it but there is no target +version. + +### Concern: `base_client.py` XML builders are `@staticmethod`s on a class + +**Status:** Ignored as for now. + +The `_build_propfind_body`, `_build_calendar_query_body`, etc. methods are `@staticmethod` +on `BaseDAVClient`. They do not use `self` or `cls`; they could be module-level functions. +Placing them on the class forces any caller to either hold a client reference or write +`BaseDAVClient._build_*()`. + +--- + +## The Dual-Mode Async Pattern + +### Positive + +The "branch early, return coroutine, companion `_async_*` method" pattern is consistent +across the codebase. The `_value_or_coroutine` hook for cache hits is a clever trick. + +### Major concern: `is_async_client` uses a string class-name comparison + +**Status:** Fixed + +```python +# davobject.py:110 +return type(self.client).__name__ == "AsyncDAVClient" +``` + +This is a string comparison against a class name. If `AsyncDAVClient` is subclassed or +renamed, this silently falls back to sync mode with no error. The existing TODO in +`docs/source/async.rst` (and `ASYNC_DESIGN_CRITIQUE.md`) calls this out. At minimum this +should use `isinstance()` or a class-level flag attribute. + +### Concern: dual-mode return types are misleading to type checkers + +**Status:** Ignored as for now. + +Methods like `get_calendars()` are typed as `list[Calendar] | Coroutine[...]`. In +practice sync callers get a list and async callers get a coroutine — but nothing in the +type system enforces that the caller actually awaits it. A sync caller who accidentally +uses an async client gets a coroutine silently dropped on the floor. The pattern is +pragmatically reasonable for v3.x, but the design docs are correct that this needs a +proper solution in v4.0. + +### Potential bug: `freebusy_request()` passes an unawaited coroutine + +**Status:** **Needs attention in 3.2.1** + +```python +# collection.py +outbox = self.schedule_outbox() # returns a coroutine in async mode, not an outbox +caldavobj = FreeBusy(...) +... +if self.is_async_client: + return self._async_freebusy_request(outbox, caldavobj) +``` + +`outbox` is an unawaited coroutine here; `_async_freebusy_request` does +`outbox = await outbox` to materialise it. That works, but it is non-obvious — a +variable named `outbox` holds a coroutine. The inline comment acknowledges this is messy. + +### Bug: `_async_complete` with RRULE silently drops `save()` + +**Status:** **Needs attention in 3.2.1** + +```python +# calendarobjectresource.py — comment in _async_complete: +# _complete_recurring_* methods are sync-only for now; they internally +# call self.save() which would return an unawaited coroutine in async mode. +``` + +If a user calls `complete()` on a recurring VTODO with an async client the completion is +computed but never written to the server. This is a **silent data-loss bug**. It should +be a `raise NotImplementedError(...)` with a clear message rather than a silent no-op. + +--- + +## RFC 6638 Scheduling — New Feature + +### Positive + +- `add_organizer()` now accepts an explicit `organizer` argument and properly replaces + existing ORGANIZER fields (`_set_organizer` refactor is clean). +- Schedule-Tag / ETag conditional PUT in `_put()` follows RFC 6638 correctly. +- `accept_invite()` / `decline_invite()` / `tentatively_accept_invite()` handles both + auto-scheduling and non-auto-scheduling servers. + +### Concern: bare `assert` in `_parse_scheduling_response_objects()` + +**Status:** Fixed + +```python +assert self.tree.tag == cdav.ScheduleResponse.tag +assert response.tag == cdav.Response.tag +``` + +These are in a production code path. Python's `-O` flag strips `assert` statements. +Use `error.assert_()` or explicit `if … raise` like the rest of the codebase does. + +### Concern: repeated ETag / Schedule-Tag update block + +**Status:** Should be fixed, perhaps in 3.2.1. This code was hand-written by the guy who hates duplicated code. + +```python +## consider refactoring - this is repeated many places now +if "Etag" in r.headers: + self.props[dav.GetEtag.tag] = r.headers["Etag"] +if "Schedule-Tag" in r.headers: + self.props[cdav.ScheduleTag.tag] = r.headers["Schedule-Tag"] +``` + +This block appears at least four times in `calendarobjectresource.py`. Extract to a +`_update_tags_from_response(r)` helper method. + +--- + +## `response.py` — New Parse Functions + +### Positive + +`_normalize_href`, `_validate_status`, `_status_to_code` are cleaner than what they +replaced. The dataclasses (`PropfindResult`, `CalendarQueryResult`, etc.) are a good +model for structured parse results. + +### Concern: `_element_to_value` fallback returns a raw lxml element + +**Status:** Ignored as for now. Should consider this. + +```python +# end of _element_to_value() +return elem # returns an lxml _Element as a "value" +``` + +Returning a raw `_Element` as a property value is surprising and will confuse callers +expecting strings or lists of strings. This path should at minimum log a warning. + +### Concern: `_strip_to_multistatus` return type is opaque + +**Status:** Ignored as for now. Should consider this. + +```python +def _strip_to_multistatus(tree: _Element) -> "_Element | list[_Element]": +``` + +Returning either a single element or a list works because iterating an `_Element` iterates +its children — but the two cases iterate differently, and the type hint is misleading. A +brief comment explaining why both types are iterable in the same `for` loop would help +future maintainers. + +--- + +## `davobject.py` — Refactoring + +### Positive + +- `_async_get_properties` now delegates to `_post_get_properties()`, removing ~30 lines of + duplicated logic. +- `get_property()` cache hit goes through `_value_or_coroutine`, correctly returning a + coroutine in async mode. + +### Concern: `_resolve_properties` dead-code path + +**Status:** Ignored as for now. Should consider this. + +```python +error.assert_(False) +return {} # newly added +``` + +`return {}` after `error.assert_(False)` is unreachable in debug mode (the assert raises) +but reached in production mode (assert is a no-op). The intent is apparently "return a +safe fallback dict". The `return {}` should be the actual fallback; `error.assert_(False)` +should be `log.warning(...)` instead. As written, production mode silently returns an +empty dict while debug mode raises — the opposite of what you want. + +--- + +## `config.py` — Test Server Registry + +### Positive + +Priority-based server ordering with `_collect_test_servers` is cleaner than the previous +if-chain. + +### Minor: `_ConfiguredServer.start()` is a no-op, `is_accessible()` ignores reachability + +**Status:** Ignored as for now. Should consider this. + +```python +def start(self) -> None: + self._started = True # external — assumed already running +``` + +`is_accessible()` always returns `True` if `url` is non-empty, even if the server is +actually unreachable. Acceptable for external servers (the user configured them), but +worth documenting as a known limitation. + +--- + +## `collection.py` — Helper Functions + +### Positive + +`_extract_calendars_from_propfind_results`, `_is_calendar_resource`, `_quote_url_path`, +etc. brought in from `operations/calendarset_ops.py` are now module-level helpers in +`collection.py`, which is logical. + +### Concern: `_extract_calendar_id_from_url` swallows all exceptions + +**Status:** Ignored as for now. Should consider this. + +```python +except Exception: + log.error(f"Calendar has unexpected url {url}") +return None +``` + +Returns `None` on any URL parsing error, causing the calendar to be silently skipped +(`if not cal_id: continue`). A user with a server that returns pathological URLs gets no +calendar list and no actionable error message. Log the exception itself, or re-raise with +context. + +--- + +## Minor / Style + +| Item | Assessment | +|---|---| +| `except:` → `except KeyError:` in `error.py` | Good | +| `super(ClassName, self).__init__()` → `super().__init__()` throughout | Good cleanup | +| `Optional[X]` → `X \| None` throughout | Correct for Python ≥ 3.10 | +| `isinstance(obj, (str, bytes))` → `isinstance(obj, str \| bytes)` | Correct | +| `uuid.uuid1()` → `uuid.uuid4()` in `freebusy_request` | Correct; v1 leaks MAC address | +| `## double-hash` vs `# single-hash` comment style used inconsistently | Cosmetic; worth picking one | +| Unused import `niquests` removed from `async_davclient.py` | Good | + +--- + +## Test Coverage Notes + +- RFC 6638 scheduling is well-covered by the new integration test framework. +- `_async_complete` with recurring events (the silent-save bug) has **no async test + coverage** for the RRULE path. +- `_element_to_value` fallback branch (returning a raw `_Element`) is likely untested. +- `_ConfiguredServer` in `config.py` has no unit tests. + +--- + +## Issues by Severity + +| Severity | Location | Issue | +|---|---|---| +| **Bug** | `calendarobjectresource.py` `_async_complete` | RRULE path silently drops `save()` in async mode — should raise `NotImplementedError` | +| **Bug** | `response.py` `_parse_scheduling_response_objects` | Bare `assert` stripped by `-O`; use `error.assert_()` | +| **Design** | `davobject.py` `is_async_client` | String class-name comparison is fragile; use `isinstance()` or a class flag | +| **Design** | `response.py` | Two parallel XML parse pipelines can diverge; the TODO needs a target version | +| **Design** | `calendarobjectresource.py` | Repeated ETag/Schedule-Tag update block (≥4 copies); extract to helper | +| **Minor** | `response.py` `_element_to_value` | Returning raw `_Element` as fallback is surprising | +| **Minor** | `davobject.py` `_resolve_properties` | `return {}` after `error.assert_(False)` has opposite behaviour in debug vs production | +| **Minor** | `collection.py` `_extract_calendar_id_from_url` | Swallows exceptions silently; log the exception or re-raise | diff --git a/docs/source/async.rst b/docs/source/async.rst index cf95c624..a0593574 100644 --- a/docs/source/async.rst +++ b/docs/source/async.rst @@ -2,22 +2,21 @@ Async API ==================== -The caldav library provides an async-first API for use with Python's +The caldav library provides an async API for use with Python's ``asyncio``. This is useful when you need to: * Make concurrent requests to the server * Integrate with async web frameworks (FastAPI, aiohttp, etc.) * Build responsive applications that don't block on I/O -======= Caveats ======= -Async IO was introduced in version 3.0, 2026-03-03, without being tested in any production environments, and it was done by a developer not having much experience with async usage. Rough edges are to be expected. Test it very well in a staging environment before using it in production environments. It's probably a good idea to wait some few releases before using it in sharp production settings. +Async IO was introduced in version 3.0, 2026-03-03, without being tested in any production environments, and it was done by a developer not having much experience with async usage, and probably with a bit too much trust in AI-assistance. The quality of the async code has been lifted significantly in 3.2, but rough edges are still to be expected. Test it very well in a staging environment before using it in production environments. It's probably a good idea to wait until version 4.0 before using it in very sharp production settings. -A "Sans-IO" design pattern was followed, in a hope that it would make it possible to have one library serve both the async and sync use case through relatively similar APIs without duplicating too much code. In retro-perspective I'm not sure this was the best idea for the CalDAV library. Be aware that there are still exists code paths that works well with the sync code but will blow up if you try using it with the async code. +We've ended up with some hybrid design pattern inspired by "Sans-IO". There is a dual `DAVClient` vs `AsyncDAVClient` with a common baseclass. On the other classes, all methods that involves or may involve IO will deliver an awaitable coroutine in async mode. I'm not sure that the current dsign is the best, and the design may be revisited and shaken up in 4.0. (Claude suggests that a async-first-generate-sync is the best option for CalDAV). -Async works very well when it's crispy clear what operations causes API calls. I've been a bit careless with the old sync library, there are many places where an API call is not expected, but anyway there are things like ``self.load(only_if_unloaded=True)`` buried in the code. Works well with sync, not so much with the async code. In a 4.0-version (perhaps 2027?) there may be some major changes to the API. +There may still be sharp edges - as of v3.2, there is 40kb of async integration test code compared to 164kb of sync integration test code, so most likely some of the less travelled code paths will blow up when using it in async mode. File an issue, and I'll prioritize fixing it! I will work more on this in an upcoming version 3.2.1. Quick Start =========== diff --git a/docs/source/http-libraries.rst b/docs/source/http-libraries.rst index 1f274ce2..9b9296a8 100644 --- a/docs/source/http-libraries.rst +++ b/docs/source/http-libraries.rst @@ -25,8 +25,7 @@ decisions, it may have been due to personal conflicts - or, perhaps the quality of the code was found not to be good enough. It works for me. I've had Claude to do a code review of niquests and urllib3 - it gave a thumbs-up for Niquests, while urllib3.future could benefit from -some refactoring (claude also recommends shedding backward -compatibility). +some refactoring. I see some possible reasons why one would like to avoid niquests: * Many projects are already dependent on requests and/or httpx, and one diff --git a/funding.json b/funding.json new file mode 100644 index 00000000..6e962bc2 --- /dev/null +++ b/funding.json @@ -0,0 +1,47 @@ +{ + "$schema": "https://fundingjson.org/schema/v1.1.0.json", + "version": "v1.1.0", + "entity": { + "type": "individual", + "role": "owner", + "name": "Tobias Brox", + "email": "funding@plann.no", + "description": "Independent open source developer maintaining python-caldav (a CalDAV client library used by Home Assistant and many other projects) and plann (a CalDAV-based CLI for task and calendar management). Funding helps prioritise features and fixes requested by donors.", + "webpageUrl": { + "url": "https://plann.no" + } + }, + "funding": { + "channels": [ + { + "guid": "github-sponsors", + "type": "payment-provider", + "address": "https://github.com/sponsors/tobixen", + "description": "GitHub Sponsors" + }, + { + "guid": "bank-iban", + "type": "bank", + "address": "DE90100110012753397999" + }, + { + "guid": "btc", + "type": "other", + "address": "bc1qd63vwz0v77r5us4c8kn4l9mx9cw9pgjd47mlf9", + "description": "Bitcoin" + } + ], + "plans": [ + { + "guid": "general", + "status": "active", + "name": "General support", + "description": "Any contribution is welcome. Quite a lot of the work here is done on hobby basis - I do have to prioritize my a day job. Additional funding allows me to take out unpaid vacation days and work more with this library. Funding from YOU would motivate me to work on YOUR pet issues or feature requests - but please don't have high expectances after throwing money at me - better to negotiate in advance and pay me after the feature/bugfix is implemented", + "amount": 0, + "currency": "EUR", + "frequency": "monthly", + "channels": ["github-sponsors", "bank-iban", "btc"] + } + ] + } +} From b1609565888c21ba2ff3039dabf8b4ecd1cdf815 Mon Sep 17 00:00:00 2001 From: Tobias Brox Date: Fri, 24 Apr 2026 17:44:12 +0200 Subject: [PATCH 17/17] chore: posteo compatibility observation --- caldav/compatibility_hints.py | 1 + 1 file changed, 1 insertion(+) diff --git a/caldav/compatibility_hints.py b/caldav/compatibility_hints.py index 3f730e63..b6afc612 100644 --- a/caldav/compatibility_hints.py +++ b/caldav/compatibility_hints.py @@ -1313,6 +1313,7 @@ def dotted_feature_set_list(self, compact=False): ## foo ... "full" observed for the next two, 70938dc1cbb6a839978eee4315699746d38ee5f0/3cae24cf99da1702b851b5a74a9b88c8e5317dad, 2026-02-17 ## bar ... 3cae24cf99da1702b851b5a74a9b88c8e5317dad was probably the rotten commit, ungraceful again in be26d42b1ca3ff3b4fd183761b4a9b024ce12b84 / 537a23b145487006bb987dee5ab9e00cdebb0492 'search.comp-type.optional': {'support': 'ungraceful'}, + 'search.recurrences.includes-implicit.infinite-scope': False, #'search.text.case-sensitive': {'support': 'unsupported'}, ## Comment from claude: ## Text search precondition check returns unexpected results on posteo