Skip to content

Commit

Permalink
Merge cdbe451 into e4752c3
Browse files Browse the repository at this point in the history
  • Loading branch information
jacebrowning committed Sep 3, 2021
2 parents e4752c3 + cdbe451 commit 675fa2b
Show file tree
Hide file tree
Showing 4 changed files with 38 additions and 3 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
@@ -1,3 +1,7 @@
# 0.15.2 (2021-09-03)

- Fixed `ValueError` when loading enum values from TOML files.

# 0.15.1 (2021-07-01)

- Fixed handling of no file extension to use YAML by default.
Expand Down
4 changes: 2 additions & 2 deletions datafiles/formats.py
Expand Up @@ -9,7 +9,7 @@

import log

from . import types
from . import types, utils


_REGISTRY: Dict[str, type] = {}
Expand Down Expand Up @@ -66,7 +66,7 @@ def extensions(cls):
def deserialize(cls, file_object):
import tomlkit

return tomlkit.loads(file_object.read())
return utils.dictify(tomlkit.loads(file_object.read()))

@classmethod
def serialize(cls, data):
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
@@ -1,7 +1,7 @@
[tool.poetry]

name = "datafiles"
version = "0.15.1"
version = "0.15.2"
description = "File-based ORM for dataclasses."

license = "MIT"
Expand Down
31 changes: 31 additions & 0 deletions tests/test_extended_converters.py
Expand Up @@ -3,6 +3,8 @@
# pylint: disable=unused-variable


from enum import Enum

import pytest

from datafiles import datafile
Expand Down Expand Up @@ -111,3 +113,32 @@ def with_extra_newlines(sample, expect):
sample.datafile.save()

expect(sample.text) == "abc\ndef\n"


def describe_enum():
def as_toml(expect):
class FileOutputType(Enum):
IN_MESSAGE = 1
FILESYSTEM = 2

@datafile('../tmp/sample.toml')
class Sample:
path_type: FileOutputType = FileOutputType.IN_MESSAGE

sample = Sample()
sample.path_type = FileOutputType.FILESYSTEM

expect(read('tmp/sample.toml')) == dedent(
"""
path_type = 2
"""
)

write(
'tmp/sample.toml',
"""
path_type = 1
""",
)

expect(sample.path_type) == FileOutputType.IN_MESSAGE

0 comments on commit 675fa2b

Please sign in to comment.