Skip to content

Commit

Permalink
fixes #12152
Browse files Browse the repository at this point in the history
  • Loading branch information
Araq committed Sep 10, 2019
1 parent ee36931 commit 2f10f5f
Show file tree
Hide file tree
Showing 2 changed files with 28 additions and 0 deletions.
4 changes: 4 additions & 0 deletions changelog.md
Expand Up @@ -47,6 +47,10 @@ type
- Added `system.getOsFileHandle` which is usually more useful
than `system.getFileHandle`. This distinction is only meaningful on
Windows.
- Added a `json.parseJsonFragments` iterator that can be used to speedup
JSON processing substantially when there are JSON fragments separated
by whitespace.


## Library changes

Expand Down
24 changes: 24 additions & 0 deletions lib/pure/json.nim
Expand Up @@ -841,10 +841,27 @@ proc parseJson(p: var JsonParser): JsonNode =
raiseParseErr(p, "{")

when not defined(js):
iterator parseJsonFragments*(s: Stream, filename: string = ""): JsonNode =
## Parses from a stream `s` into `JsonNodes`. `filename` is only needed
## for nice error messages.
## The JSON fragments are separated by whitespace. This can be substantially
## faster than the comparable loop
## ``for x in splitWhitespace(s): yield parseJson(x)``.
## This closes the stream `s` after it's done.
var p: JsonParser
p.open(s, filename)
try:
discard getTok(p) # read first token
while p.tok != tkEof:
yield p.parseJson()
finally:
p.close()

proc parseJson*(s: Stream, filename: string = ""): JsonNode =
## Parses from a stream `s` into a `JsonNode`. `filename` is only needed
## for nice error messages.
## If `s` contains extra data, it will raise `JsonParsingError`.
## This closes the stream `s` after it's done.
var p: JsonParser
p.open(s, filename)
try:
Expand Down Expand Up @@ -1778,3 +1795,10 @@ when isMainModule:
)

doAssert(obj == to(%obj, type(obj)))

const fragments = """[1,2,3] {"hi":3} 12 [] """
var res = ""
for x in parseJsonFragments(newStringStream(fragments)):
res.add($x)
res.add " "
doAssert res == fragments

0 comments on commit 2f10f5f

Please sign in to comment.