Skip to content

Commit

Permalink
Rewrite DAV storages' encoding behavior
Browse files Browse the repository at this point in the history
This is more explicit than the old behavior. See
Kozea/Radicale#128 for the discussion that led
to this.
  • Loading branch information
untitaker committed Dec 7, 2014
1 parent 07de8a0 commit 5546560
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 38 deletions.
26 changes: 1 addition & 25 deletions tests/storage/dav/test_main.py
Expand Up @@ -20,8 +20,7 @@

import vdirsyncer.exceptions as exceptions
from vdirsyncer.storage.base import Item
from vdirsyncer.storage.dav import CaldavStorage, CarddavStorage, \
_normalize_href
from vdirsyncer.storage.dav import CaldavStorage, CarddavStorage

from .. import StorageTests, format_item

Expand Down Expand Up @@ -183,26 +182,3 @@ class TestCarddavStorage(DavStorageTests):
@pytest.fixture
def item_template(self, request):
return VCARD_TEMPLATE


@pytest.mark.parametrize('base,path', [
('http://example.com/', ''),
('http://example.com/L%C3%98/', '/L%C3%98'),
('http://example.com/LØ/', '/L%C3%98'),
])
def test_normalize_href(base, path):
assert _normalize_href(base, 'asdf') == path + '/asdf'

assert _normalize_href(base, 'hahah') == path + '/hahah'

assert _normalize_href(base, 'whoops@vdirsyncer.vcf') == \
path + '/whoops@vdirsyncer.vcf'

assert _normalize_href(base, 'whoops%40vdirsyncer.vcf') == \
path + '/whoops@vdirsyncer.vcf'

assert _normalize_href(base, 'wh%C3%98ops@vdirsyncer.vcf') == \
path + '/wh%C3%98ops@vdirsyncer.vcf'

assert _normalize_href(base, 'whØops@vdirsyncer.vcf') == \
path + '/wh%C3%98ops@vdirsyncer.vcf'
26 changes: 13 additions & 13 deletions vdirsyncer/storage/dav.py
Expand Up @@ -24,19 +24,18 @@
CALDAV_DT_FORMAT = '%Y%m%dT%H%M%SZ'


def _normalize_href(base, href, decoding_rounds=1):
def _normalize_href(base, href):
'''Normalize the href to be a path only relative to hostname and
schema.'''
if not href:
raise ValueError(href)
x = utils.urlparse.urljoin(base, href)
x = utils.urlparse.urlsplit(x).path
return x

for i in range(decoding_rounds):
x = utils.compat.urlunquote(x)

x = utils.compat.urlquote(x, '/@')
return x
_encode_href = utils.compat.urlquote
_decode_href = utils.compat.urlunquote


class Discover(object):
Expand Down Expand Up @@ -305,7 +304,7 @@ def get_multi(self, hrefs):
for href in uniq(hrefs):
if href != self._normalize_href(href):
raise exceptions.NotFoundError(href)
href_xml.append('<D:href>{}</D:href>'.format(href))
href_xml.append('<D:href>{}</D:href>'.format(_encode_href(href)))
if not href_xml:
return ()

Expand All @@ -320,8 +319,8 @@ def get_multi(self, hrefs):
rv = []
hrefs_left = set(hrefs)
for element in root.iter('{DAV:}response'):
href = self._normalize_href(
element.find('{DAV:}href').text)
href = self._normalize_href(_decode_href(
element.find('{DAV:}href').text))
raw = element \
.find('{DAV:}propstat') \
.find('{DAV:}prop') \
Expand Down Expand Up @@ -358,7 +357,7 @@ def _put(self, href, item, etag):

response = self.session.request(
'PUT',
href,
_encode_href(href),
data=item.raw.encode('utf-8'),
headers=headers
)
Expand Down Expand Up @@ -388,7 +387,7 @@ def delete(self, href, etag):

self.session.request(
'DELETE',
href,
_encode_href(href),
headers=headers
)

Expand Down Expand Up @@ -420,7 +419,8 @@ def _dav_query(self, xml):

contenttype = prop.find('{DAV:}getcontenttype').text

href = self._normalize_href(element.find('{DAV:}href').text)
href = _decode_href(self._normalize_href(
element.find('{DAV:}href').text))
etag = prop.find('{DAV:}getetag').text
if not etag:
raise ValueError('Server did not return an etag for item {}. '
Expand Down Expand Up @@ -639,8 +639,8 @@ def list(self):

# Decode twice because ownCloud encodes twice.
# See https://github.com/owncloud/contacts/issues/581
href = self._normalize_href(element.find('{DAV:}href').text,
decoding_rounds=2)
href = self._normalize_href(
_decode_href(_decode_href(element.find('{DAV:}href').text)))
etag = prop.find('{DAV:}getetag').text

if href in hrefs:
Expand Down

0 comments on commit 5546560

Please sign in to comment.