Skip to content

Commit

Permalink
Merge branch 'release/v1.8.0'
Browse files Browse the repository at this point in the history
  • Loading branch information
maoe committed Jun 19, 2020
2 parents ab228a6 + bda504f commit a436732
Show file tree
Hide file tree
Showing 12 changed files with 275 additions and 118 deletions.
6 changes: 3 additions & 3 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,15 +8,15 @@
#
# For more information, see https://github.com/haskell-CI/haskell-ci
#
# version: 0.10.1
# version: 0.10.2
#
version: ~> 1.0
language: c
os: linux
dist: xenial
env:
global:
- INFLUXDB_VERSION=1.7.10
- INFLUXDB_VERSION=1.8.0
git:
# whether to recursively clone submodules
submodules: false
Expand Down Expand Up @@ -171,5 +171,5 @@ script:
- rm -f cabal.project.local
- ${CABAL} v2-build $WITHCOMPILER --disable-tests --disable-benchmarks all

# REGENDATA ("0.10.1",["influxdb.cabal","--ghc-head","--travis-patches=.travis/influxdb.patch"])
# REGENDATA ("0.10.2",["influxdb.cabal","--ghc-head","--travis-patches=.travis/influxdb.patch"])
# EOF
2 changes: 1 addition & 1 deletion .travis/influxdb.patch
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ index c9bac24..2fcb7ab 100644
dist: xenial
+env:
+ global:
+ - INFLUXDB_VERSION=1.7.10
+ - INFLUXDB_VERSION=1.8.0
git:
# whether to recursively clone submodules
submodules: false
Expand Down
15 changes: 15 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,20 @@
# Revision history for influxdb

## v1.8.0 - 2020-06-19

This release reworked the `QueryResuls` type class. There are some breaking changes:

* `parseResults` has been deprecated. `QueryResults` has now `parseMeasurement` method.
* `Decoder` has been monomorphized so that it can be used with lens. The original `Decoder` type has been renamed to `SomeDecoder`.
* `QueryParams` has now `decoder` field.
* `parseResults` and `parseResultsWith` had been using `lenientDecoder` and it caused some unintuitive behavior ([#64](https://github.com/maoe/influxdb-haskell/issues/64), [#66](https://github.com/maoe/influxdb-haskell/issues/66)). Now they use `strictDecoder` instead.
* `parseErrorObject` now doesn't fail. It returns the error message of a response.
* `parseQueryField` which has been deprecated is now deleted.
* `QueryResults` instance for `ShowSeries` was broken. This is fixed.
* The constructor of `Decoder`, `parseResultsWith`, and `parseResultsWithDecoder` have been hidden from the top-level module. They're still available from `Database.InfluxDB.JSON`.

See [#68](https://github.com/maoe/influxdb-haskell/pull/68/files) for how to migrate your code from v1.7.x to v1.8.x.

## v1.7.1.6 - 2020-06-03

* Relax upper version bound for doctest
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
[![Hackage CI](https://matrix.hackage.haskell.org/api/v2/packages/influxdb/badge)](https://matrix.hackage.haskell.org/package/influxdb)
[![Gitter](https://badges.gitter.im/maoe/influxdb-haskell.svg)](https://gitter.im/maoe/influxdb-haskell?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge)

Currently this library is tested against InfluxDB 1.7.
Currently this library is tested against InfluxDB 1.8.

## Getting started

Expand Down
3 changes: 1 addition & 2 deletions examples/random-points.hs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE ViewPatterns #-}
import Data.Foldable
Expand Down Expand Up @@ -76,7 +75,7 @@ data Row = Row
} deriving Show

instance QueryResults Row where
parseResults prec = parseResultsWith $ \_ _ columns fields -> do
parseMeasurement prec _ _ columns fields = do
rowTime <- getField "time" columns fields >>= parsePOSIXTime prec
String name <- getField "value" columns fields
rowValue <- case name of
Expand Down
16 changes: 16 additions & 0 deletions hie.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
cradle:
cabal:
- path: "./src"
component: "lib:influxdb"

- path: "./tests"
component: "influxdb:test:doctests"

- path: "./tests"
component: "influxdb:test:regressions"

- path: "./examples/random-points.hs"
component: "influxdb:exe:influx-random-points"

- path: "./examples/write-udp.hs"
component: "influxdb:exe:influx-write-udp"
4 changes: 3 additions & 1 deletion influxdb.cabal
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
cabal-version: 1.24
name: influxdb
version: 1.7.1.6
version: 1.8.0
synopsis: Haskell client library for InfluxDB
description:
@influxdb@ is an InfluxDB client library for Haskell.
Expand Down Expand Up @@ -115,10 +115,12 @@ test-suite regressions
base
, containers
, influxdb
, lens
, tasty
, tasty-hunit
, time
, raw-strings-qq >= 1.1 && < 1.2
, vector
ghc-options: -Wall -threaded
hs-source-dirs: tests
default-language: Haskell2010
Expand Down
8 changes: 3 additions & 5 deletions src/Database/InfluxDB.hs
Original file line number Diff line number Diff line change
Expand Up @@ -45,20 +45,18 @@ module Database.InfluxDB
, QueryParams
, queryParams
, authentication
, decoder

-- ** Parsing results
, QueryResults(..)
, parseResultsWith
, parseResultsWithDecoder
, Decoder(..)
, Decoder
, lenientDecoder
, strictDecoder
, getField
, getTag
, parseJSON
, parseUTCTime
, parsePOSIXTime
, parseQueryField

-- *** Re-exports from tagged
, Tagged(..)
Expand Down Expand Up @@ -200,7 +198,7 @@ data CPUUsage = CPUUsage
, cpuIdle, cpuSystem, cpuUser :: Double
} deriving Show
instance QueryResults CPUUsage where
parseResults prec = parseResultsWithDecoder strictDecoder $ \_ _ columns fields -> do
parseMeasurement prec _name _tags columns fields = do
time <- getField "time" columns fields >>= parseUTCTime prec
cpuIdle <- getField "idle" columns fields >>= parseJSON
cpuSystem <- getField "system" columns fields >>= parseJSON
Expand Down
113 changes: 64 additions & 49 deletions src/Database/InfluxDB/JSON.hs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ViewPatterns #-}
module Database.InfluxDB.JSON
Expand All @@ -12,6 +13,7 @@ module Database.InfluxDB.JSON

-- ** Decoder settings
, Decoder(..)
, SomeDecoder(..)
, strictDecoder
, lenientDecoder

Expand All @@ -24,7 +26,6 @@ module Database.InfluxDB.JSON
, parseUTCTime
, parsePOSIXTime
, parseRFC3339
, parseQueryField
-- ** Utility functions
, parseResultsObject
, parseSeriesObject
Expand All @@ -34,8 +35,10 @@ module Database.InfluxDB.JSON
import Control.Applicative
import Control.Exception
import Control.Monad
import qualified Control.Monad.Fail as Fail
import Data.Foldable
import Data.Maybe
import Prelude
import qualified Control.Monad.Fail as Fail

import Data.Aeson
import Data.HashMap.Strict (HashMap)
Expand All @@ -52,69 +55,100 @@ import qualified Data.Vector as V

import Database.InfluxDB.Types

-- | Parse a JSON response with the 'lenientDecoder'. This can be useful to
-- implement the 'Database.InfluxDB.Query.parseResults' method.
-- | Parse a JSON response with the 'strictDecoder'.
parseResultsWith
:: (Maybe Text -> HashMap Text Text -> Vector Text -> Array -> A.Parser a)
-- ^ A parser that takes
-- ^ A parser that parses a measurement. A measurement consists of
--
-- 1. an optional name of the series
-- 2. a map of tags
-- 3. an array of field names
-- 4. an array of values
--
-- to construct a value.
-> Value
-- 3. an array of field keys
-- 4. an array of field values
-> Value -- ^ JSON response
-> A.Parser (Vector a)
parseResultsWith = parseResultsWithDecoder lenientDecoder
parseResultsWith = parseResultsWithDecoder strictDecoder

-- | Parse a JSON response with the specified decoder settings.
parseResultsWithDecoder
:: Decoder a
:: Decoder
-> (Maybe Text -> HashMap Text Text -> Vector Text -> Array -> A.Parser a)
-- ^ A parser that takes
-- ^ A parser that parses a measurement. A measurement consists of
--
-- 1. an optional name of the series
-- 2. a map of tags
-- 3. an array of field names
-- 4. an array of values
--
-- to construct a value.
-> Value
-- 3. an array of field keys
-- 4. an array of field values
-> Value -- ^ JSON response
-> A.Parser (Vector a)
parseResultsWithDecoder Decoder {..} row val0 = success
parseResultsWithDecoder (Decoder SomeDecoder {..}) row val0 = do
r <- foldr1 (<|>)
[ Left <$> parseErrorObject val0
, Right <$> success
]
case r of
Left err -> fail err
Right vec -> return vec
where
success = do
results <- parseResultsObject val0

(join -> series) <- V.forM results $ \val ->
parseSeriesObject val <|> parseErrorObject val
(join -> series) <- V.forM results $ \val -> do
r <- foldr1 (<|>)
[ Left <$> parseErrorObject val
, Right <$> parseSeriesObject val
]
case r of
Left err -> fail err
Right vec -> return vec
values <- V.forM series $ \val -> do
(name, tags, columns, values) <- parseSeriesBody val
decodeFold $ V.forM values $ A.withArray "values" $ \fields -> do
assert (V.length columns == V.length fields) $ return ()
decodeEach $ row name tags columns fields
return $! join values

-- | Decoder settings
data Decoder a = forall b. Decoder
-- | A decoder to use when parsing a JSON response.
--
-- Use 'strictDecoder' if you want to fail the entire decoding process if
-- there's any failure. Use 'lenientDecoder' if you want the decoding process
-- to collect only successful results.
newtype Decoder = Decoder (forall a. SomeDecoder a)

-- | @'SomeDecoder' a@ represents how to decode a JSON response given a row
-- parser of type @'A.Parser' a@.
data SomeDecoder a = forall b. SomeDecoder
{ decodeEach :: A.Parser a -> A.Parser b
-- ^ How to decode each row. For example 'optional' can be used to turn parse
-- ^ How to decode each row.
--
-- For example 'optional' can be used to turn parse
-- failrues into 'Nothing's.
, decodeFold :: A.Parser (Vector b) -> A.Parser (Vector a)
-- ^ How to aggregate rows into the resulting vector.
--
-- For example when @b ~ 'Maybe' a@, one way to aggregate the values is to
-- return only 'Just's.
}

-- | A decoder that fails immediately if there's any parse failure.
strictDecoder :: Decoder a
strictDecoder = Decoder
--
-- 'strictDecoder' is defined as follows:
--
-- @
-- strictDecoder :: Decoder
-- strictDecoder = Decoder $ SomeDecoder
-- { decodeEach = id
-- , decodeFold = id
-- }
-- @
strictDecoder :: Decoder
strictDecoder = Decoder $ SomeDecoder
{ decodeEach = id
, decodeFold = id
}

-- | A decoder that ignores parse failures and returns only successful results.
lenientDecoder :: Decoder a
lenientDecoder = Decoder
lenientDecoder :: Decoder
lenientDecoder = Decoder $ SomeDecoder
{ decodeEach = optional
, decodeFold = \p -> do
bs <- p
Expand Down Expand Up @@ -166,10 +200,8 @@ parseSeriesBody = A.withObject "series" $ \obj -> do
return (name, tags, columns, values)

-- | Parse the common JSON structure used in failure response.
parseErrorObject :: A.Value -> A.Parser a
parseErrorObject = A.withObject "error" $ \obj -> do
message <- obj .: "error"
fail $ T.unpack message
parseErrorObject :: A.Value -> A.Parser String
parseErrorObject = A.withObject "error" $ \obj -> obj .: "error"

-- | Parse either a POSIX timestamp or RFC3339 formatted timestamp as 'UTCTime'.
parseUTCTime :: Precision ty -> A.Value -> A.Parser UTCTime
Expand Down Expand Up @@ -207,20 +239,3 @@ parseRFC3339 val = A.withText err
fmt, err :: String
fmt = "%FT%X%QZ"
err = "RFC3339-formatted timestamp"

-- | Parse a 'QueryField'.
parseQueryField :: A.Value -> A.Parser QueryField
parseQueryField val = case val of
A.Number sci ->
return $! either FieldFloat FieldInt $ Sci.floatingOrInteger sci
A.String txt ->
return $! FieldString txt
A.Bool b ->
return $! FieldBool b
A.Null ->
return FieldNull
_ -> fail $ "parseQueryField: expected a flat data structure, but got "
++ show val
{-# DEPRECATED parseQueryField
"This function parses numbers in a misleading way. Use 'parseJSON' instead."
#-}
Loading

0 comments on commit a436732

Please sign in to comment.