Skip to content

Commit

Permalink
Merge 0b11c99 into 4f10c05
Browse files Browse the repository at this point in the history
  • Loading branch information
rossengeorgiev committed Feb 23, 2019
2 parents 4f10c05 + 0b11c99 commit c3fa309
Show file tree
Hide file tree
Showing 2 changed files with 106 additions and 12 deletions.
50 changes: 50 additions & 0 deletions tests/test_vdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,21 @@
import vdf


class testcase_helpers_escapes(unittest.TestCase):
# https://github.com/ValveSoftware/source-sdk-2013/blob/0d8dceea4310fde5706b3ce1c70609d72a38efdf/sp/src/tier1/utlbuffer.cpp#L57-L68
esc_chars_raw = "aa\n\t\v\b\r\f\a\\?\"'bb"
esc_chars_escaped = 'aa\\n\\t\\v\\b\\r\\f\\a\\\\\\?\\"\\\'bb'

def test_escape(self):
self.assertEqual(vdf._escape(self.esc_chars_raw), self.esc_chars_escaped)

def test_unescape(self):
self.assertEqual(vdf._unescape(self.esc_chars_escaped), self.esc_chars_raw)

def test_escape_unescape(self):
self.assertEqual(vdf._unescape(vdf._escape(self.esc_chars_raw)), self.esc_chars_raw)


class testcase_helpers_load(unittest.TestCase):
def setUp(self):
self.f = StringIO()
Expand Down Expand Up @@ -140,6 +155,7 @@ def test_keyvalue_pairs(self):
}

self.assertEqual(vdf.loads(INPUT), EXPECTED)
self.assertEqual(vdf.loads(INPUT, escaped=False), EXPECTED)

def test_keyvalue_open_quoted(self):
INPUT = (
Expand All @@ -157,6 +173,7 @@ def test_keyvalue_open_quoted(self):
}

self.assertEqual(vdf.loads(INPUT), EXPECTED)
self.assertEqual(vdf.loads(INPUT, escaped=False), EXPECTED)

def test_multi_keyvalue_pairs(self):
INPUT = '''
Expand Down Expand Up @@ -188,6 +205,7 @@ def test_multi_keyvalue_pairs(self):
}

self.assertEqual(vdf.loads(INPUT), EXPECTED)
self.assertEqual(vdf.loads(INPUT, escaped=False), EXPECTED)

def test_deep_nesting(self):
INPUT = '''
Expand Down Expand Up @@ -236,6 +254,7 @@ def test_deep_nesting(self):
}

self.assertEqual(vdf.loads(INPUT), EXPECTED)
self.assertEqual(vdf.loads(INPUT, escaped=False), EXPECTED)

def test_comments_and_blank_lines(self):
INPUT = '''
Expand Down Expand Up @@ -274,6 +293,7 @@ def test_comments_and_blank_lines(self):
}

self.assertEqual(vdf.loads(INPUT), EXPECTED)
self.assertEqual(vdf.loads(INPUT, escaped=False), EXPECTED)

def test_hash_key(self):
INPUT = '#include "asd.vdf"'
Expand All @@ -285,6 +305,7 @@ def test_hash_key(self):
EXPECTED = {'#base': 'asd.vdf'}

self.assertEqual(vdf.loads(INPUT), EXPECTED)
self.assertEqual(vdf.loads(INPUT, escaped=False), EXPECTED)

def test_wierd_symbols_for_unquoted(self):
INPUT = 'a asd.vdf\nb language_*lol*\nc zxc_-*.sss//'
Expand All @@ -295,6 +316,7 @@ def test_wierd_symbols_for_unquoted(self):
}

self.assertEqual(vdf.loads(INPUT), EXPECTED)
self.assertEqual(vdf.loads(INPUT, escaped=False), EXPECTED)

def test_merge_multiple_keys_on(self):
INPUT = '''
Expand All @@ -313,6 +335,7 @@ def test_merge_multiple_keys_on(self):
EXPECTED = {'a': {'a': '3', 'b': '2', 'c': '4'}}

self.assertEqual(vdf.loads(INPUT, merge_duplicate_keys=True), EXPECTED)
self.assertEqual(vdf.loads(INPUT, escaped=False, merge_duplicate_keys=True), EXPECTED)

def test_merge_multiple_keys_off(self):
INPUT = '''
Expand All @@ -331,6 +354,33 @@ def test_merge_multiple_keys_off(self):
EXPECTED = {'a': {'a': '3', 'c': '4'}}

self.assertEqual(vdf.loads(INPUT, merge_duplicate_keys=False), EXPECTED)
self.assertEqual(vdf.loads(INPUT, escaped=False, merge_duplicate_keys=False), EXPECTED)

def test_escape_before_last(self):
INPUT = r'''
"aaa\\" "1"
"1" "bbb\\"
'''

EXPECTED = {
"aaa\\": "1",
"1": "bbb\\",
}

self.assertEqual(vdf.loads(INPUT), EXPECTED)

def test_escape_before_last_unescaped(self):
INPUT = r'''
"aaa\\" "1"
"1" "bbb\\"
'''

EXPECTED = {
"aaa\\\\": "1",
"1": "bbb\\\\",
}

self.assertEqual(vdf.loads(INPUT, escaped=False), EXPECTED)


class testcase_VDF_other(unittest.TestCase):
Expand Down
68 changes: 56 additions & 12 deletions vdf/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,36 @@ def strip_bom(line):
def strip_bom(line):
return line.lstrip(BOMS if isinstance(line, str) else BOMS_UNICODE)


def parse(fp, mapper=dict, merge_duplicate_keys=True):
# string escaping
_unescape_char_map = {
r"\n": "\n",
r"\t": "\t",
r"\v": "\v",
r"\b": "\b",
r"\r": "\r",
r"\f": "\f",
r"\a": "\a",
r"\\": "\\",
r"\?": "?",
r"\"": "\"",
r"\'": "\'",
}
_escape_char_map = {v: k for k, v in _unescape_char_map.items()}

def _re_escape_match(m):
return _escape_char_map[m.group()]

def _re_unescape_match(m):
return _unescape_char_map[m.group()]

def _escape(text):
return re.sub(r"[\n\t\v\b\r\f\a\\\?\"']", _re_escape_match, text)

def _unescape(text):
return re.sub(r"(\\n|\\t|\\v|\\b|\\r|\\f|\\a|\\\\|\\\?|\\\"|\\')", _re_unescape_match, text)

# parsing and dumping for KV1
def parse(fp, mapper=dict, merge_duplicate_keys=True, escaped=True):
"""
Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a VDF)
to a Python object.
Expand All @@ -51,10 +79,10 @@ def parse(fp, mapper=dict, merge_duplicate_keys=True):
stack = [mapper()]
expect_bracket = False

re_keyvalue = re.compile(r'^("(?P<qkey>(?:\\.|[^\\"])+)"|(?P<key>#?[a-z0-9\-\_]+))'
re_keyvalue = re.compile(r'^("(?P<qkey>(?:\\.|[^\\"])+)"|(?P<key>#?[a-z0-9\-\_\\\?]+))'
r'([ \t]*('
r'"(?P<qval>(?:\\.|[^\\"])*)(?P<vq_end>")?'
r'|(?P<val>[a-z0-9\-\_\*\.]+)'
r'|(?P<val>[a-z0-9\-\_\\\?\*\.]+)'
r'))?',
flags=re.I)

Expand Down Expand Up @@ -98,6 +126,9 @@ def parse(fp, mapper=dict, merge_duplicate_keys=True):
key = match.group('key') if match.group('qkey') is None else match.group('qkey')
val = match.group('val') if match.group('qval') is None else match.group('qval')

if escaped:
key = _unescape(key)

# we have a key with value in parenthesis, so we make a new dict obj (level deeper)
if val is None:
if merge_duplicate_keys and key in stack[-1]:
Expand All @@ -120,7 +151,7 @@ def parse(fp, mapper=dict, merge_duplicate_keys=True):
except StopIteration:
raise SyntaxError("vdf.parse: unexpected EOF (open value quote?)")

stack[-1][key] = val
stack[-1][key] = _unescape(val) if escaped else val

# exit the loop
break
Expand Down Expand Up @@ -155,19 +186,21 @@ def load(fp, **kwargs):
return parse(fp, **kwargs)


def dumps(obj, pretty=False):
def dumps(obj, pretty=False, escaped=True):
"""
Serialize ``obj`` to a VDF formatted ``str``.
"""
if not isinstance(obj, dict):
raise TypeError("Expected data to be an instance of``dict``")
if not isinstance(pretty, bool):
raise TypeError("Expected pretty to be bool")
raise TypeError("Expected pretty to be of type bool")
if not isinstance(escaped, bool):
raise TypeError("Expected escaped to be of type bool")

return ''.join(_dump_gen(obj, pretty))
return ''.join(_dump_gen(obj, pretty, escaped))


def dump(obj, fp, pretty=False):
def dump(obj, fp, pretty=False, escaped=True):
"""
Serialize ``obj`` as a VDF formatted stream to ``fp`` (a
``.write()``-supporting file-like object).
Expand All @@ -176,28 +209,39 @@ def dump(obj, fp, pretty=False):
raise TypeError("Expected data to be an instance of``dict``")
if not hasattr(fp, 'write'):
raise TypeError("Expected fp to have write() method")
if not isinstance(pretty, bool):
raise TypeError("Expected pretty to be of type bool")
if not isinstance(escaped, bool):
raise TypeError("Expected escaped to be of type bool")

for chunk in _dump_gen(obj, pretty):
for chunk in _dump_gen(obj, pretty, escaped):
fp.write(chunk)


def _dump_gen(data, pretty=False, level=0):
def _dump_gen(data, pretty=False, escaped=True, level=0):
indent = "\t"
line_indent = ""

if pretty:
line_indent = indent * level

for key, value in data.items():
if escaped and isinstance(key, string_type):
key = _escape(key)

if isinstance(value, dict):
yield '%s"%s"\n%s{\n' % (line_indent, key, line_indent)
for chunk in _dump_gen(value, pretty, level+1):
for chunk in _dump_gen(value, pretty, escaped, level+1):
yield chunk
yield "%s}\n" % line_indent
else:
if escaped and isinstance(value, string_type):
value = _escape(value)

yield '%s"%s" "%s"\n' % (line_indent, key, value)


# binary VDF
class BASE_INT(int_type):
def __repr__(self):
return "%s(%d)" % (self.__class__.__name__, self)
Expand Down

0 comments on commit c3fa309

Please sign in to comment.