Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

-n

Move api-tools to subdirectory, add .cabal and related files
  • Loading branch information...
commit 86f71a969375e37f63e7819d1c426d190a387391 0 parents
Adam Gundry adamgundry authored
Showing with 6,101 additions and 0 deletions.
  1. +30 −0 LICENSE
  2. +2 −0  Setup.hs
  3. +173 −0 api-tools.cabal
  4. +95 −0 main/Data/API/MigrationTool.hs
  5. +219 −0 src/Data/API/API.hs
  6. +166 −0 src/Data/API/API/DSL.hs
  7. +14 −0 src/Data/API/API/Gen.hs
  8. +1,191 −0 src/Data/API/Changes.hs
  9. +296 −0 src/Data/API/Doc/Call.hs
  10. +109 −0 src/Data/API/Doc/Dir.hs
  11. +40 −0 src/Data/API/Doc/Subst.hs
  12. +112 −0 src/Data/API/Doc/Types.hs
  13. +425 −0 src/Data/API/JSON.hs
  14. +254 −0 src/Data/API/Markdown.hs
  15. +368 −0 src/Data/API/Parse.y
  16. +190 −0 src/Data/API/Scan.x
  17. +40 −0 src/Data/API/TH.hs
  18. +72 −0 src/Data/API/Tools.hs
  19. +56 −0 src/Data/API/Tools/Combinators.hs
  20. +238 −0 src/Data/API/Tools/Datatypes.hs
  21. +59 −0 src/Data/API/Tools/Enum.hs
  22. +101 −0 src/Data/API/Tools/Example.hs
  23. +223 −0 src/Data/API/Tools/JSON.hs
  24. +37 −0 src/Data/API/Tools/JSONTests.hs
  25. +18 −0 src/Data/API/Tools/Lens.hs
  26. +151 −0 src/Data/API/Tools/QuickCheck.hs
  27. +29 −0 src/Data/API/Tools/SafeCopy.hs
  28. +381 −0 src/Data/API/Types.hs
  29. +95 −0 tests/Data/API/Test/DSL.hs
  30. +84 −0 tests/Data/API/Test/Gen.hs
  31. +71 −0 tests/Data/API/Test/JSON.hs
  32. +16 −0 tests/Data/API/Test/Main.hs
  33. +155 −0 tests/Data/API/Test/Migration.hs
  34. +591 −0 tests/Data/API/Test/MigrationData.hs
30 LICENSE
@@ -0,0 +1,30 @@
+Copyright (c) 2013-2014, Iris Connect
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+
+ * Neither the name of Iris Connect nor the names of other
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
2  Setup.hs
@@ -0,0 +1,2 @@
+import Distribution.Simple
+main = defaultMain
173 api-tools.cabal
@@ -0,0 +1,173 @@
+Name: api-tools
+Version: 0.1
+Synopsis: DSL for generating API boilerplate and docs
+Description: api-tools provides a compact DSL for describing an API.
+ It uses Template Haskell to generate the
+ corresponding data types and assorted tools for
+ working with it, including Aeson and QuickCheck
+ instances for converting between JSON and the
+ generated types and writing unit tests.
+Homepage: http://github.com/iconnect/api-tools
+License: BSD3
+License-file: LICENSE
+Author: Chris Dornan
+Maintainer: chrisd@irisconnect.co.uk
+Copyright: (c) Iris Connect 2013-2014
+Category: Network, Web, Cloud, Distributed Computing
+Build-type: Simple
+
+Cabal-version: >=1.10
+
+Source-repository head
+ type: git
+ location: https://github.com/iconnect/api-tools.git
+
+Library
+ Hs-Source-Dirs: src
+
+ Exposed-modules:
+ Data.API.API
+ Data.API.API.DSL
+ Data.API.API.Gen
+ Data.API.Changes
+ Data.API.Doc.Call
+ Data.API.Doc.Dir
+ Data.API.Doc.Subst
+ Data.API.Doc.Types
+ Data.API.JSON
+ Data.API.Markdown
+ Data.API.Parse
+ Data.API.Tools
+ Data.API.Tools.Combinators
+ Data.API.Tools.Datatypes
+ Data.API.Tools.Enum
+ Data.API.Tools.Example
+ Data.API.Tools.JSON
+ Data.API.Tools.JSONTests
+ Data.API.Tools.Lens
+ Data.API.Tools.QuickCheck
+ Data.API.Tools.SafeCopy
+ Data.API.Types
+
+ Other-modules:
+ Data.API.Scan
+ Data.API.TH
+
+ Build-depends:
+ Cabal >= 1.9.2 ,
+ QuickCheck >= 2.5.1,
+ aeson >= 0.6.2,
+ aeson-pretty <= 0.7 ,
+ array >= 0.4,
+ attoparsec >= 0.10.4,
+ base == 4.*,
+ base64-bytestring == 1.0.*,
+ bytestring >= 0.9 && < 0.11,
+ case-insensitive >= 1.0,
+ containers >= 0.4,
+ lens >= 3.8.7,
+ old-locale >= 1.0.0.4,
+ regex-compat >= 0.95 ,
+ safe >= 0.3.3,
+ safecopy >= 0.8.1,
+ time >= 1.4,
+ template-haskell >= 2.7,
+ text >= 0.11,
+ unordered-containers >= 0.2.3.0 ,
+ vector >= 0.10.0.1
+
+ Build-tools:
+ alex,
+ happy
+
+ GHC-Options:
+ -Wall
+ -fwarn-tabs
+
+ Default-Language: Haskell2010
+
+
+Executable migration-tool
+ Hs-Source-Dirs: main
+
+ Main-is: Data/API/MigrationTool.hs
+
+ Build-depends:
+ api-tools >= 0.1,
+ Cabal >= 1.9.2 ,
+ QuickCheck >= 2.5.1,
+ aeson >= 0.6.2,
+ aeson-pretty <= 0.7 ,
+ array >= 0.4,
+ attoparsec >= 0.10.4,
+ base == 4.*,
+ base64-bytestring == 1.0.*,
+ bytestring >= 0.9 && < 0.11,
+ case-insensitive >= 1.0,
+ containers >= 0.4,
+ lens >= 3.8.7,
+ old-locale >= 1.0.0.4,
+ regex-compat >= 0.95 ,
+ safe >= 0.3.3,
+ safecopy >= 0.8.1,
+ time >= 1.4,
+ template-haskell >= 2.7,
+ text >= 0.11,
+ unordered-containers >= 0.2.3.0 ,
+ vector >= 0.10.0.1
+
+ GHC-Options:
+ -main-is Data.API.MigrationTool
+ -Wall
+ -fwarn-tabs
+
+ Default-Language: Haskell2010
+
+
+Test-Suite test-api-tools
+ Hs-Source-Dirs: tests
+
+ Type: exitcode-stdio-1.0
+
+ Main-is: Data/API/Test/Main.hs
+
+ Other-modules:
+ Data.API.Test.DSL
+ Data.API.Test.Gen
+ Data.API.Test.JSON
+ Data.API.Test.Migration
+ Data.API.Test.MigrationData
+
+ Build-depends:
+ api-tools >= 0.1,
+ Cabal >= 1.9.2 ,
+ QuickCheck >= 2.5.1,
+ aeson >= 0.6.2,
+ aeson-pretty <= 0.7 ,
+ array >= 0.4,
+ attoparsec >= 0.10.4,
+ base == 4.*,
+ base64-bytestring == 1.0.*,
+ bytestring >= 0.9 && < 0.11,
+ case-insensitive >= 1.0,
+ containers >= 0.4,
+ lens >= 3.8.7,
+ old-locale >= 1.0.0.4,
+ regex-compat >= 0.95 ,
+ safe >= 0.3.3,
+ safecopy >= 0.8.1,
+ tasty >= 0.3 ,
+ tasty-hunit >= 0.2 ,
+ tasty-quickcheck >= 0.3 ,
+ time >= 1.4,
+ template-haskell >= 2.7,
+ text >= 0.11,
+ unordered-containers >= 0.2.3.0 ,
+ vector >= 0.10.0.1
+
+ GHC-Options:
+ -main-is Data.API.Test.Main
+ -Wall
+ -fwarn-tabs
+
+ Default-Language: Haskell2010
95 main/Data/API/MigrationTool.hs
@@ -0,0 +1,95 @@
+module Data.API.MigrationTool
+ ( main
+ ) where
+
+import Data.API.Changes
+import Data.API.JSON
+import Data.API.Parse
+import Data.API.Types
+
+import qualified Data.Aeson as JS
+import qualified Data.Aeson.Encode.Pretty as JS
+import qualified Data.ByteString.Lazy as BS
+import System.Environment
+import System.Exit
+import System.IO
+
+
+----------------------------
+-- Main, prototype testing
+
+main :: IO ()
+main = do
+ args <- getArgs
+ case args of
+ ["migrate", startApiFile, endApiFile, inDataFile, outDataFile] ->
+ migrate startApiFile endApiFile inDataFile outDataFile
+
+ ["compare", file1, file2] ->
+ compareJSON file1 file2
+
+ ["reformat", file1, file2] ->
+ reformatJSON file1 file2
+
+ ["changes", file] ->
+ changes file
+
+ _ -> putStrLn "--migrate start.api end.api start.json end.json" >> return ()
+
+migrate :: FilePath -> FilePath -> FilePath -> FilePath -> IO ()
+migrate startApiFile endApiFile
+ inDataFile outDataFile = do
+
+ (startApi, startChangelog) <- readApiFile startApiFile
+ (endApi, endChangelog) <- readApiFile endApiFile
+ inData <- readJsonFile inDataFile
+ let startApiVer = changelogVersion startChangelog
+ endApiVer = changelogVersion endChangelog
+ case migrateDataDump (startApi, startApiVer) (endApi, endApiVer)
+ endChangelog customMigrations root CheckAll inData of
+ Left err -> do
+ hPutStrLn stderr (prettyMigrateFailure err)
+ exitFailure
+ Right (outData, warnings) -> do
+ putStrLn . unlines . map show $ warnings
+ writeJsonFile outDataFile outData
+
+root :: TypeName
+root = TypeName "DatabaseSnapshot"
+
+readJsonFile :: FromJSONWithErrs b => FilePath -> IO b
+readJsonFile file = either (fail . prettyJSONErrorPositions) return
+ . decodeWithErrs =<< BS.readFile file
+
+writeJsonFile :: JS.ToJSON a => FilePath -> a -> IO ()
+writeJsonFile file = BS.writeFile file . JS.encodePretty
+
+readApiFile :: FilePath -> IO APIWithChangelog
+readApiFile file = fmap parseAPIWithChangelog (readFile file)
+
+data ChangeTag = None
+ deriving (Read, Show)
+
+customMigrations :: CustomMigrations ChangeTag ChangeTag ChangeTag
+customMigrations = CustomMigrations nope (\ _ _ -> Nothing)
+ nope (\ _ _ -> Nothing)
+ nofld
+ where
+ nope _ v = Left (CustomMigrationError "No custom migrations defined" (JS.Object v))
+ nofld _ v = Left (CustomMigrationError "No field custom migrations defined" v)
+
+compareJSON :: FilePath -> FilePath -> IO ()
+compareJSON file1 file2 = do
+ js1 <- readJsonFile file1
+ js2 <- readJsonFile file2
+ print (js1 == (js2 :: JS.Value))
+
+reformatJSON :: FilePath -> FilePath -> IO ()
+reformatJSON file1 file2 = do
+ js <- readJsonFile file1
+ writeJsonFile file2 (js :: JS.Value)
+
+changes :: FilePath -> IO ()
+changes file = do
+ s <- readFile file
+ print (parseAPI s)
219 src/Data/API/API.hs
@@ -0,0 +1,219 @@
+{-# LANGUAGE RecordWildCards #-}
+{-# OPTIONS_GHC -fno-warn-orphans #-}
+
+-- This module extracts an API specified with the DSL as a JSON-encoded
+-- object so that it can be imported into the client's framework
+-- for building the client wrappers.
+
+module Data.API.API (extractAPI) where
+
+import qualified Data.API.API.Gen as D
+import Data.API.Types
+import Data.API.JSON
+import qualified Data.CaseInsensitive as CI
+import qualified Data.Text as T
+import Control.Applicative
+import Text.Regex
+
+
+-- | Take and API spec and generate the JSON
+
+extractAPI :: API -> Value
+extractAPI api = toJSON $ map convert [ an | ThNode an <- api ]
+
+convert :: APINode -> D.APINode
+convert (APINode{..}) =
+ D.APINode
+ { D._an_name = T.pack $ _TypeName anName
+ , D._an_comment = T.pack anComment
+ , D._an_prefix = T.pack $ CI.original anPrefix
+ , D._an_spec = convert_spec anSpec
+ , D._an_convert = fmap convert_conversion anConvert
+ , D._an_version = _Vrn anVersion
+ , D._an_log = T.pack anLog
+ }
+
+convert_spec :: Spec -> D.Spec
+convert_spec sp =
+ case sp of
+ SpNewtype sn -> D.SP_newtype $ convert_specnt sn
+ SpRecord sr -> D.SP_record $ convert_fields $ srFields sr
+ SpUnion su -> D.SP_union $ convert_union $ suFields su
+ SpEnum se -> D.SP_enum $ convert_alts $ seAlts se
+ SpSynonym ty -> D.SP_synonym $ convert_type ty
+
+convert_conversion :: (FieldName,FieldName) -> D.Conversion
+convert_conversion (i,p) =
+ D.Conversion
+ { D._cv_injection = T.pack $ _FieldName p
+ , D._cv_projection = T.pack $ _FieldName i
+ }
+
+convert_specnt :: SpecNewtype -> D.SpecNewtype
+convert_specnt sn =
+ D.SpecNewtype
+ { D._sn_type = convert_basic $ snType sn
+ , D._sn_filter = convert_filter <$> snFilter sn
+ }
+
+convert_filter :: Filter -> D.Filter
+convert_filter ftr =
+ case ftr of
+ FtrStrg RegEx{..} -> D.FT_string $ D.RegularExpression re_text
+ FtrIntg IntRange{..} -> D.FT_integer $ D.IntRange ir_lo ir_hi
+ FtrUTC UTCRange{..} -> D.FT_utc $ D.UTCRange ur_lo ur_hi
+
+convert_fields :: [(FieldName, FieldType)] -> [D.Field]
+convert_fields al = map f al
+ where
+ f (fn,fty) =
+ D.Field
+ { D._fd_name = T.pack $ _FieldName fn
+ , D._fd_type = convert_type $ ftType fty
+ , D._fd_readonly = ftReadOnly fty
+ , D._fd_default = convert_default <$> ftDefault fty
+ , D._fd_comment = T.pack $ ftComment fty
+ }
+
+convert_union :: [(FieldName, (APIType, MDComment))] -> [D.Field]
+convert_union al = map f al
+ where
+ f (fn,(ty,co)) =
+ D.Field
+ { D._fd_name = T.pack $ _FieldName fn
+ , D._fd_type = convert_type ty
+ , D._fd_readonly = False
+ , D._fd_default = Nothing
+ , D._fd_comment = T.pack co
+ }
+
+convert_alts :: [(FieldName,MDComment)] -> [T.Text]
+convert_alts fns = map (T.pack . _FieldName . fst) fns
+
+convert_type :: APIType -> D.APIType
+convert_type ty0 =
+ case ty0 of
+ TyList ty -> D.TY_list $ convert_type ty
+ TyMaybe ty -> D.TY_maybe $ convert_type ty
+ TyName tn -> D.TY_ref $ convert_ref tn
+ TyBasic bt -> D.TY_basic $ convert_basic bt
+ TyJSON -> D.TY_json 0
+
+convert_ref :: TypeName -> D.TypeRef
+convert_ref (TypeName tn) = D.TypeRef (T.pack tn)
+
+convert_basic :: BasicType -> D.BasicType
+convert_basic bt =
+ case bt of
+ BTstring -> D.BT_string
+ BTbinary -> D.BT_binary
+ BTbool -> D.BT_boolean
+ BTint -> D.BT_integer
+ BTutc -> D.BT_utc
+
+convert_default :: DefaultValue -> D.DefaultValue
+convert_default DefValList = D.DV_list 0
+convert_default DefValMaybe = D.DV_maybe 0
+convert_default (DefValString s) = D.DV_string s
+convert_default (DefValBool b) = D.DV_boolean b
+convert_default (DefValInt i) = D.DV_integer i
+convert_default (DefValUtc u) = D.DV_utc u
+
+
+
+-- | Generate an API spec from the JSON
+
+instance FromJSONWithErrs Thing where
+ parseJSONWithErrs v = (ThNode . unconvert) <$> parseJSONWithErrs v
+
+unconvert :: D.APINode -> APINode
+unconvert (D.APINode{..}) =
+ APINode
+ { anName = TypeName $ T.unpack _an_name
+ , anComment = T.unpack _an_comment
+ , anPrefix = CI.mk $ T.unpack _an_prefix
+ , anSpec = unconvert_spec _an_spec
+ , anConvert = fmap unconvert_conversion _an_convert
+ , anVersion = Vrn _an_version
+ , anLog = T.unpack _an_log
+ }
+
+unconvert_spec :: D.Spec -> Spec
+unconvert_spec sp =
+ case sp of
+ D.SP_newtype sn -> SpNewtype $ unconvert_specnt sn
+ D.SP_record sr -> SpRecord $ SpecRecord $ unconvert_fields sr
+ D.SP_union su -> SpUnion $ SpecUnion $ unconvert_union su
+ D.SP_enum se -> SpEnum $ SpecEnum $ unconvert_alts se
+ D.SP_synonym ty -> SpSynonym $ unconvert_type ty
+
+unconvert_conversion :: D.Conversion -> (FieldName, FieldName)
+unconvert_conversion c =
+ ( FieldName $ T.unpack $ D._cv_injection c
+ , FieldName $ T.unpack $ D._cv_projection c
+ )
+
+unconvert_specnt :: D.SpecNewtype -> SpecNewtype
+unconvert_specnt sn =
+ SpecNewtype
+ { snType = unconvert_basic $ D._sn_type sn
+ , snFilter = unconvert_filter <$> D._sn_filter sn
+ }
+
+unconvert_filter :: D.Filter -> Filter
+unconvert_filter ftr =
+ case ftr of
+ D.FT_string (D.RegularExpression re_text) -> FtrStrg $ RegEx re_text (mkRegexWithOpts (T.unpack re_text) False True)
+ D.FT_integer (D.IntRange ir_lo ir_hi) -> FtrIntg $ IntRange ir_lo ir_hi
+ D.FT_utc (D.UTCRange ur_lo ur_hi) -> FtrUTC $ UTCRange ur_lo ur_hi
+
+unconvert_fields :: [D.Field] -> [(FieldName, FieldType)]
+unconvert_fields al = map f al
+ where
+ f fld = ( FieldName $ T.unpack $ D._fd_name fld
+ , FieldType { ftType = unconvert_type $ D._fd_type fld
+ , ftReadOnly = D._fd_readonly fld
+ , ftDefault = unconvert_default <$> D._fd_default fld
+ , ftComment = T.unpack $ D._fd_comment fld
+ }
+ )
+
+unconvert_union :: [D.Field] -> [(FieldName, (APIType, MDComment))]
+unconvert_union al = map f al
+ where
+ f fld = ( FieldName $ T.unpack $ D._fd_name fld
+ , ( unconvert_type $ D._fd_type fld
+ , T.unpack $ D._fd_comment fld
+ ))
+
+unconvert_alts :: [T.Text] -> [(FieldName,MDComment)]
+unconvert_alts fns = map ((\x -> (x, "")) . FieldName . T.unpack) fns
+
+unconvert_type :: D.APIType -> APIType
+unconvert_type ty0 =
+ case ty0 of
+ D.TY_list ty -> TyList $ unconvert_type ty
+ D.TY_maybe ty -> TyMaybe $ unconvert_type ty
+ D.TY_ref r -> TyName $ unconvert_ref r
+ D.TY_basic bt -> TyBasic $ unconvert_basic bt
+ D.TY_json _ -> TyJSON
+
+unconvert_ref :: D.TypeRef -> TypeName
+unconvert_ref (D.TypeRef tn) = TypeName $ T.unpack tn
+
+unconvert_basic :: D.BasicType -> BasicType
+unconvert_basic bt =
+ case bt of
+ D.BT_string -> BTstring
+ D.BT_binary -> BTbinary
+ D.BT_boolean -> BTbool
+ D.BT_integer -> BTint
+ D.BT_utc -> BTutc
+
+unconvert_default :: D.DefaultValue -> DefaultValue
+unconvert_default (D.DV_list _) = DefValList
+unconvert_default (D.DV_maybe _) = DefValMaybe
+unconvert_default (D.DV_string s) = DefValString s
+unconvert_default (D.DV_boolean b) = DefValBool b
+unconvert_default (D.DV_integer i) = DefValInt i
+unconvert_default (D.DV_utc u) = DefValUtc u
166 src/Data/API/API/DSL.hs
@@ -0,0 +1,166 @@
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# OPTIONS_GHC -XNoCPP #-}
+
+module Data.API.API.DSL
+ ( apiAPI
+ ) where
+
+import Data.API.Parse
+import Data.API.Types
+
+
+apiAPI :: API
+apiAPI = [api|
+
+//
+// #The api-tools API
+//
+// Here we make the DSP specs available in JSON so that clients
+// working in non-Haskell frameworks can integrate API
+// specs into their own environments.
+
+ans :: APISpec
+ // an API specification consists of a list of nodes,
+ // each describing a named JSON thing which can be
+ // referenced in other (object) nodes.
+ = [APINode]
+
+an :: APINode
+ // Each node is named (with a name that conforms to Haskell's
+ // Type identifier rules, starting with a capital letter) and some
+ // comment text (like this comment). The JSON spec is all contained
+ // in the {Spec}, the other fields being used behind the scenes
+ // to manage the Haskell name space and migrations.
+ = record
+ name :: string // the name must start with a big letter
+ // and conform to Haskell rules for type
+ // identifiers
+ comment :: string // natural language description of the node
+ prefix :: string // (Haskell side only) this prefix used to
+ // structure the Haskell name space
+ // (which is uncomfortably flat where
+ // record field names are concerned);
+ // it has no effect on the JSON representation
+ spec :: Spec // here we specify the JSON representation
+ convert :: ? Conversion // (Haskell side only) sometimes we may
+ // choose to not use the default internal
+ // representation for the JSON
+ // but instead supply a couple of functions
+ // for injecting the default representation
+ // into the actual type we will use and the
+ // and project it back again; here we can check
+ // some properties (which must be explained
+ // in the comments) and reject a request
+ // with a 400 error; has no effect on the
+ // JSON representation
+ 'version' :: integer // (Haskell side mostly) the version number
+ // for handling server migrations
+ log :: string // a log explaining the changes that have
+ // been made through the migrations
+
+sp :: Spec
+ // the JSON representation is specified as a simple 'newtype', a record
+ // type, a union type, an enumerated type or a (mere) type synonym.
+ = union
+ | newtype :: SpecNewtype // here we have a basic string or number type
+ | 'record' :: [Field] // an object representing a record
+ | 'union' :: [Field] // an object representing a number of alternatives
+ | 'enum' :: [string] // a string type which must contain a number of
+ // discrete values
+ | synonym :: APIType // is just an alias for another type
+
+sn :: SpecNewtype
+ // 'newtype' specs are a basic type and an optional filter specification
+ = record
+ type :: BasicType
+ filter :: ? Filter
+
+ft :: Filter
+ = union
+ | 'string' :: RegularExpression
+ | 'integer' :: IntRange
+ | 'utc' :: UTCRange
+
+re :: RegularExpression
+ // regular expressions are represented by simple strings
+ = basic string
+
+ir :: IntRange
+ // integer ranges are specified with their bounds and may open at
+ // the bottom and top
+ = record
+ lo :: ? integer
+ hi :: ? integer
+
+ur :: UTCRange
+ // UTC ranges are specified with their bounds and may open at
+ // the bottom and top
+ = record
+ lo :: ? utc
+ hi :: ? utc
+
+cv :: Conversion
+ // Conversions are just used on the Haskell side to map the concrete JSON
+ // representation into an internal type and back again
+ = record
+ injection :: string // the injection function for injecting
+ // representations into the internal
+ // representation; may return a failure
+ // indicating that some API precondition
+ // was not met
+ projection:: string // the projection function converts the
+ // internal type back into the JSON
+ // representation for communication
+ // back to the client
+
+fd :: Field
+ // a field represent both a field in a record object and an alternative in
+ // a union object (in which exactly one of the 'fields' is ever present in
+ // the object)
+ = record
+ name :: string // the name of the method
+ type :: APIType // the JSON type of the field
+ readonly :: boolean // read-only status
+ default :: ? DefaultValue // the default value of the field
+ comment :: string // a comment describing the field (like
+ // this one)
+
+ty :: APIType
+ // this is used to represent JSON types in fields (or synonyms) and is one
+ // one of the following:
+ = union
+ | list :: APIType // a JSON list of the given type
+ | maybe :: APIType // either the given type or the null value
+ | ref :: TypeRef // a named type (node) with possible example
+ | 'basic':: BasicType // a basic JSON type
+ | 'json' :: integer // a generic JSON value
+
+tr :: TypeRef
+ // reference to a type name
+ = basic string
+
+bt :: BasicType
+ // finally we get down to the basic JSON types ('binary' is a string
+ // in which the byte string has been encoded with base-64, safe for
+ // embedding in a UTF-8-encoded JSON string
+ = enum
+ | 'string'
+ | 'binary'
+ | 'boolean'
+ | 'integer'
+ | 'utc'
+
+dv :: DefaultValue
+ // a default value
+ = union
+ | 'list' :: integer
+ | 'maybe' :: integer
+ | 'string' :: string
+ | 'boolean' :: boolean
+ | 'integer' :: integer
+ | 'utc' :: utc
+|]
+
+{-
+-}
14 src/Data/API/API/Gen.hs
@@ -0,0 +1,14 @@
+{-# LANGUAGE TemplateHaskell #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE DeriveDataTypeable #-}
+
+module Data.API.API.Gen where
+
+import Data.API.API.DSL
+import Data.API.Tools
+
+$(generate apiAPI)
+$(generateInstances apiAPI)
+$(generateTools apiAPI)
+$(generateTests apiAPI "apiAPISimpleTests")
1,191 src/Data/API/Changes.hs
@@ -0,0 +1,1191 @@
+{-# OPTIONS_GHC -fno-warn-deprecations #-}
+{-# LANGUAGE FlexibleInstances #-}
+
+module Data.API.Changes where
+
+import Data.API.Types
+import Data.API.JSON
+
+import Control.Applicative
+import Control.Monad
+import qualified Data.Aeson as JS
+import qualified Data.Aeson.Encode.Pretty as JS
+import qualified Data.ByteString.Char8 as B
+import qualified Data.ByteString.Lazy.Char8 as BL
+import qualified Data.ByteString.Base64 as B64
+import Data.Map (Map)
+import qualified Data.Map as Map
+import Data.Maybe
+import Data.Set (Set)
+import qualified Data.Set as Set
+import qualified Data.HashMap.Strict as HMap
+import qualified Data.Vector as V
+import qualified Data.Text as T
+import Data.Version
+import Data.Time
+import Data.List
+
+-------------------------
+-- Top level: do it all
+--
+
+-- The tag type must be an enumeration of all the custom migration
+-- tags in the changelog, as generated by generateMigrationKind in
+-- Data.API.Tools.
+
+migrateDataDump :: (Read db, Read rec, Read fld)
+ => (API, Version) -> (API, Version)
+ -> APIChangelog
+ -> CustomMigrations db rec fld
+ -> TypeName
+ -> DataChecks
+ -> JS.Value
+ -> Either MigrateFailure (JS.Value, [MigrateWarning])
+migrateDataDump startApi endApi changelog custom root chks db = do
+ let custom' = readCustomMigrations custom
+ (changes, warnings) <- validateChanges root custom' chks startApi endApi changelog
+ ?!? ValidateFailure
+ db' <- applyChangesToDatabase root custom' db changes ?!? uncurry ValueError
+ return (db', warnings)
+
+data MigrateFailure
+ = ValidateFailure ValidateFailure
+ | ValueError ValueError Position
+ deriving (Eq, Show)
+
+type MigrateWarning = ValidateWarning
+
+------------------
+-- The key types
+--
+
+type APIWithChangelog = (API, APIChangelog)
+
+-- | A API changelog. Like a changelog we have pairs of versions and the
+-- changes from one version to the next.
+data APIChangelog =
+ -- | The changes from the previous version up to this version.
+ ChangesUpTo Version [APIChange] APIChangelog
+ -- | The initial version
+ | ChangesStart Version
+ deriving (Eq, Show)
+
+data APIChange
+ = ChAddType TypeName NormTypeDecl
+ | ChDeleteType TypeName
+ | ChRenameType TypeName TypeName
+
+ -- Specific changes for record types
+ | ChAddField TypeName FieldName APIType (Maybe DefaultValue)
+ | ChDeleteField TypeName FieldName
+ | ChRenameField TypeName FieldName FieldName
+ | ChChangeField TypeName FieldName APIType MigrationTag
+
+ -- Changes for union types
+ | ChAddUnionAlt TypeName FieldName APIType
+ | ChDeleteUnionAlt TypeName FieldName
+ | ChRenameUnionAlt TypeName FieldName FieldName
+
+ -- Changes for enum types
+ | ChAddEnumVal TypeName FieldName
+ | ChDeleteEnumVal TypeName FieldName
+ | ChRenameEnumVal TypeName FieldName FieldName
+
+ -- Custom value (not type) changes
+ | ChCustomRecord TypeName MigrationTag
+ | ChCustomAll MigrationTag
+ deriving (Eq, Show)
+
+
+type MigrationTag = String
+
+data CustomMigrations db rec fld = CustomMigrations
+ { databaseMigration :: db -> JS.Object -> Either ValueError JS.Object
+ , databaseMigrationSchema :: db -> NormAPI -> Maybe NormAPI
+ , recordMigration :: rec -> JS.Object -> Either ValueError JS.Object
+ , recordMigrationSchema :: rec -> NormRecordType -> Maybe NormRecordType
+ , fieldMigration :: fld -> JS.Value -> Either ValueError JS.Value }
+
+type CustomMigrationsTagged = CustomMigrations MigrationTag MigrationTag MigrationTag
+
+readCustomMigrations :: (Read db, Read rec, Read fld)
+ => CustomMigrations db rec fld -> CustomMigrationsTagged
+readCustomMigrations (CustomMigrations db dbs r rs f) =
+ CustomMigrations (db . read) (dbs . read) (r . read) (rs . read) (f . read)
+
+
+-- | When to validate the data against the schema (each level implies
+-- the preceding levels):
+data DataChecks = NoChecks -- ^ Not at all
+ | CheckStartAndEnd -- ^ At start and end of the migration
+ | CheckCustom -- ^ After custom migrations
+ | CheckAll -- ^ After every change
+ deriving (Eq, Ord)
+
+-- | Whether to validate the dataset after this change
+validateAfter :: DataChecks -> APIChange -> Bool
+validateAfter chks (ChChangeField{}) = chks >= CheckCustom
+validateAfter chks (ChCustomRecord{}) = chks >= CheckCustom
+validateAfter chks (ChCustomAll{}) = chks >= CheckCustom
+validateAfter chks _ = chks >= CheckAll
+
+
+--------------------
+-- Changelog utils
+--
+
+changelogStartVersion :: APIChangelog -> Version
+changelogStartVersion (ChangesStart v) = v
+changelogStartVersion (ChangesUpTo _ _ clog) = changelogStartVersion clog
+
+changelogVersion :: APIChangelog -> Version
+changelogVersion (ChangesStart v) = v
+changelogVersion (ChangesUpTo v _ _) = v
+
+-- | Changelog in order starting from oldest version up to newest.
+-- Entries are @(from, to, changes-oldest-first)@.
+viewChangelogReverse :: APIChangelog -> [(Version, Version, [APIChange])]
+viewChangelogReverse clog =
+ reverse [ (v,v',reverse cs) | (v',v,cs) <- viewChangelog clog ]
+
+-- | Changelog in order as written, with latest version at the beginning, going
+-- back to older versions. Entries are @(to, from, changes-latest-first)@.
+viewChangelog :: APIChangelog -> [(Version, Version, [APIChange])]
+viewChangelog (ChangesStart _) = []
+viewChangelog (ChangesUpTo v' cs older) = (v', v, cs) : viewChangelog older
+ where v = changelogVersion older
+
+isChangelogOrdered :: APIChangelog -> Either (Version, Version) ()
+isChangelogOrdered changelog =
+ case find (\ (v', v, _) -> v' <= v) (viewChangelog changelog) of
+ Nothing -> return ()
+ Just (v', v, _) -> Left (v', v)
+
+
+-- | Sets of custom migration tags in the changelog for
+-- whole-database, single-record and single-field migrations
+changelogTags :: APIChangelog -> (Set MigrationTag, Set MigrationTag, Set MigrationTag)
+changelogTags (ChangesStart _) = (Set.empty, Set.empty, Set.empty)
+changelogTags (ChangesUpTo _ cs older) =
+ unions3 (map changeTags cs) `union3` changelogTags older
+ where
+ union3 (a, b, c) (x, y, z) = (a `Set.union` x, b `Set.union` y, c `Set.union` z)
+ unions3 xyzs = (Set.unions xs, Set.unions ys, Set.unions zs)
+ where (xs, ys, zs) = unzip3 xyzs
+
+-- | Sets of custom migration tags in a single change
+changeTags :: APIChange -> (Set MigrationTag, Set MigrationTag, Set MigrationTag)
+changeTags (ChChangeField _ _ _ t) = (Set.empty, Set.empty, Set.singleton t)
+changeTags (ChCustomRecord _ t) = (Set.empty, Set.singleton t, Set.empty)
+changeTags (ChCustomAll t) = (Set.singleton t, Set.empty, Set.empty)
+changeTags _ = (Set.empty, Set.empty, Set.empty)
+
+
+------------------------------------------
+-- Comparing APIs: canonical/normal form
+--
+
+-- | The API type has too much extra info for us to be able to simply compare
+-- them with @(==)@. Our strategy is to strip out ancillary information and
+-- normalise into a canonical form, and then we can use a simple @(==)@ compare.
+--
+-- Our normalised API discards most of the details of each type, keeping
+-- just essential information about each type. We discard order of types and
+-- fields, so we can use just associative maps.
+--
+type NormAPI = Map TypeName NormTypeDecl
+
+-- | The normal or canonical form for a type declaration, an 'APINode'.
+-- Equality of the normal form indicates equivalence of APIs.
+--
+-- We track all types.
+--
+data NormTypeDecl
+ = NRecordType NormRecordType
+ | NUnionType NormUnionType
+ | NEnumType NormEnumType
+ | NTypeSynonym APIType
+ | NNewtype BasicType
+ deriving (Eq, Show)
+
+-- | The canonical form of a record type is a map from fields to
+-- values; similarly a union is a map from fields to alternatives and
+-- an enum is a set of values.
+type NormRecordType = Map FieldName APIType
+type NormUnionType = Map FieldName APIType
+type NormEnumType = Set FieldName
+
+
+apiNormalForm :: API -> NormAPI
+apiNormalForm api =
+ Map.fromList
+ [ (name, declNF spec)
+ | ThNode (APINode {anName = name, anSpec = spec}) <- api ]
+
+declNF :: Spec -> NormTypeDecl
+declNF (SpRecord (SpecRecord fields)) = NRecordType $ Map.fromList
+ [ (fname, ftType ftype)
+ | (fname, ftype) <- fields ]
+declNF (SpUnion (SpecUnion alts)) = NUnionType $ Map.fromList
+ [ (fname, ftype)
+ | (fname, (ftype, _)) <- alts ]
+declNF (SpEnum (SpecEnum elems)) = NEnumType $ Set.fromList
+ [ fname | (fname, _) <- elems ]
+declNF (SpSynonym t) = NTypeSynonym t
+declNF (SpNewtype (SpecNewtype bt _)) = NNewtype bt
+
+
+-------------------------
+-- Type decl/expr utils
+--
+
+typeDelcsFreeVars :: NormAPI -> Set TypeName
+typeDelcsFreeVars = Set.unions . map typeDelcFreeVars . Map.elems
+
+typeDelcFreeVars :: NormTypeDecl -> Set TypeName
+typeDelcFreeVars (NRecordType fields) = Set.unions . map typeFreeVars
+ . Map.elems $ fields
+typeDelcFreeVars (NUnionType alts) = Set.unions . map typeFreeVars
+ . Map.elems $ alts
+typeDelcFreeVars (NEnumType _) = Set.empty
+typeDelcFreeVars (NTypeSynonym t) = typeFreeVars t
+typeDelcFreeVars (NNewtype _) = Set.empty
+
+typeFreeVars :: APIType -> Set TypeName
+typeFreeVars (TyList t) = typeFreeVars t
+typeFreeVars (TyMaybe t) = typeFreeVars t
+typeFreeVars (TyName n) = Set.singleton n
+typeFreeVars (TyBasic _) = Set.empty
+typeFreeVars TyJSON = Set.empty
+
+mapTypeDeclFreeVars :: (TypeName -> APIType) -> NormTypeDecl -> NormTypeDecl
+mapTypeDeclFreeVars f (NRecordType fields) = NRecordType (Map.map (mapTypeFreeVars f) fields)
+mapTypeDeclFreeVars f (NUnionType alts) = NUnionType (Map.map (mapTypeFreeVars f) alts)
+mapTypeDeclFreeVars _ d@(NEnumType _) = d
+mapTypeDeclFreeVars f (NTypeSynonym t) = NTypeSynonym (mapTypeFreeVars f t)
+mapTypeDeclFreeVars _ d@(NNewtype _) = d
+
+mapTypeFreeVars :: (TypeName -> APIType) -> APIType -> APIType
+mapTypeFreeVars f (TyList t) = TyList (mapTypeFreeVars f t)
+mapTypeFreeVars f (TyMaybe t) = TyMaybe (mapTypeFreeVars f t)
+mapTypeFreeVars f (TyName n) = f n
+mapTypeFreeVars _ t@(TyBasic _) = t
+mapTypeFreeVars _ t@TyJSON = t
+
+
+typeDeclaredInApi :: TypeName -> NormAPI -> Bool
+typeDeclaredInApi tname api = Map.member tname api
+
+-- | Check if a type is used anywhere in the API
+typeUsedInApi :: TypeName -> NormAPI -> Bool
+typeUsedInApi tname api = tname `Set.member` typeDelcsFreeVars api
+
+-- | Check if a type is used anywhere in the database (possibly in a
+-- transitive dependency of the root).
+typeUsedInApiTable :: TypeName -> TypeName -> NormAPI -> Bool
+typeUsedInApiTable root tname api =
+ tname == root || tname `Set.member` transitiveDeps api (Set.singleton root)
+
+-- | Compute the transitive dependencies of a set of types
+transitiveDeps :: NormAPI -> Set TypeName -> Set TypeName
+transitiveDeps api = transitiveClosure $ \ s ->
+ typeDelcsFreeVars $
+ Map.filterWithKey (\ x _ -> x `Set.member` s) api
+
+-- | Compute the set of types that depend (transitively) on the given types
+transitiveSped :: NormAPI -> Set TypeName -> Set TypeName
+transitiveSped api = transitiveClosure $ \ s ->
+ Map.keysSet $
+ Map.filter (intersects s . typeDelcFreeVars) api
+ where
+ intersects s1 s2 = not $ Set.null $ s1 `Set.intersection` s2
+
+-- | Compute the transitive closure of a relation. Relations are
+-- represented as functions that takes a set of elements to the set of
+-- related elements.
+transitiveClosure :: Ord a => (Set a -> Set a) -> Set a -> Set a
+transitiveClosure rel x = findUsed x0 x0
+ where
+ x0 = rel x
+
+ findUsed seen old
+ | Set.null new = seen
+ | otherwise = findUsed (seen `Set.union` new) new
+ where
+ new = rel old `Set.difference` seen
+
+renameTypeUses :: TypeName -> TypeName -> NormAPI -> NormAPI
+renameTypeUses tname tname' = Map.map (mapTypeDeclFreeVars rename)
+ where
+ rename tn | tn == tname = TyName tname'
+ | otherwise = TyName tn
+
+typeIsValid :: APIType -> NormAPI -> Either (Set TypeName) ()
+typeIsValid t api
+ | typeVars `Set.isSubsetOf` declaredTypes = return ()
+ | otherwise = Left (typeVars Set.\\ declaredTypes)
+ where
+ typeVars = typeFreeVars t
+ declaredTypes = Map.keysSet api
+
+declIsValid :: NormTypeDecl -> NormAPI -> Either (Set TypeName) ()
+declIsValid decl api
+ | declVars `Set.isSubsetOf` declaredTypes = return ()
+ | otherwise = Left (declVars Set.\\ declaredTypes)
+ where
+ declVars = typeDelcFreeVars decl
+ declaredTypes = Map.keysSet api
+
+apiInvariant :: NormAPI -> Either (Set TypeName) ()
+apiInvariant api
+ | usedTypes `Set.isSubsetOf` declaredTypes = return ()
+ | otherwise = Left (usedTypes Set.\\ declaredTypes)
+ where
+ usedTypes = typeDelcsFreeVars api
+ declaredTypes = Map.keysSet api
+
+
+--------------------------------
+-- Representing update positions
+--
+
+-- | Represents the positions in a declaration to apply an update
+data UpdateDeclPos
+ = UpdateHere (Maybe UpdateDeclPos)
+ | UpdateRecord (Map FieldName (Maybe UpdateTypePos))
+ | UpdateUnion (Map FieldName (Maybe UpdateTypePos))
+ | UpdateType UpdateTypePos
+ deriving (Eq, Show)
+
+-- | Represents the positions in a type to apply an update
+data UpdateTypePos
+ = UpdateList UpdateTypePos
+ | UpdateMaybe UpdateTypePos
+ | UpdateNamed TypeName
+ deriving (Eq, Show)
+
+data APITableChange
+ -- | The pair of an APIChange and the positions in which to apply it
+ = APIChange APIChange (Map TypeName UpdateDeclPos)
+ -- | Request to validate the dataset against the given API
+ | ValidateData NormAPI
+ deriving (Eq, Show)
+
+-- | Given a type to be modified, find the positions in which each
+-- type in the API must be updated
+findUpdatePos :: TypeName -> NormAPI -> Map TypeName UpdateDeclPos
+findUpdatePos tname api = Map.alter (Just . UpdateHere) tname $
+ fromSet findDecl deps
+ where
+ -- TODO: upgrade to Map.fromSet in containers-0.5
+ fromSet :: Ord k => (k -> a) -> Set k -> Map k a
+ fromSet f = Map.fromList . map (\ k -> (k, f k)) . Set.toList
+
+ -- The set of types that depend on the type being updated
+ deps :: Set TypeName
+ deps = transitiveSped api (Set.singleton tname)
+
+ findDecl :: TypeName -> UpdateDeclPos
+ findDecl tname' = findDecl' $
+ fromMaybe (error "findUpdatePos: missing type") $
+ Map.lookup tname' api
+
+ findDecl' :: NormTypeDecl -> UpdateDeclPos
+ findDecl' (NRecordType flds) = UpdateRecord $ fmap findType flds
+ findDecl' (NUnionType alts) = UpdateUnion $ fmap findType alts
+ findDecl' (NEnumType _) = error "findDecl': unexpected enum"
+ findDecl' (NTypeSynonym ty) = UpdateType $ fromMaybe (error "findDecl': missing") $
+ findType ty
+ findDecl' (NNewtype _) = error "findDecl': unexpected newtype"
+
+ findType :: APIType -> Maybe UpdateTypePos
+ findType (TyList ty) = UpdateList <$> findType ty
+ findType (TyMaybe ty) = UpdateMaybe <$> findType ty
+ findType (TyName tname')
+ | tname' == tname || tname' `Set.member` deps = Just $ UpdateNamed tname'
+ | otherwise = Nothing
+ findType (TyBasic _) = Nothing
+ findType TyJSON = Nothing
+
+
+---------------------------
+-- Validating API changes
+--
+
+data ValidateFailure
+ -- the changelog must be in descending order of versions
+ = ChangelogOutOfOrder { vfLaterVersion :: Version
+ , vfEarlierVersion :: Version }
+ -- forbid migrating from one version to an earlier version
+ | CannotDowngrade { vfFromVersion :: Version
+ , vfToVersion :: Version }
+ -- an API uses types that are not declared
+ | ApiInvalid { vfInvalidVersion :: Version
+ , vfMissingDeclarations :: Set TypeName }
+ -- changelog entry does not apply
+ | ChangelogEntryInvalid { vfSuccessfullyApplied :: [APITableChange]
+ , vfFailedToApply :: APIChange
+ , vfApplyFailure :: ApplyFailure }
+ -- changelog is incomplete
+ -- (ie all entries apply ok but result isn't the target api)
+ | ChangelogIncomplete { vfChangelogVersion :: Version
+ , vfTargetVersion :: Version
+ , vfDifferences :: Map TypeName (MergeResult NormTypeDecl NormTypeDecl) }
+ deriving (Eq, Show)
+
+data ValidateWarning = ValidateWarning -- add warnings about bits we cannot check (opaque custom)
+ deriving Show
+
+data ApplyFailure
+ = TypeExists { afExistingType :: TypeName } -- for adding or renaming type
+ | TypeDoesNotExist { afMissingType :: TypeName } -- for deleting or renaming a type
+ | TypeWrongKind { afTypeName :: TypeName -- e.g. it's not a record type
+ , afExpectedKind :: TypeKind }
+ | TypeInUse { afTypeName :: TypeName } -- cannot delete/modify types that are still used
+ | TypeMalformed { afType :: APIType -- type refers to a non-existent type
+ , afMissingTypes :: Set TypeName }
+ | DeclMalformed { afTypeName :: TypeName -- decl refers to a non-existent type
+ , afDecl :: NormTypeDecl
+ , afMissingTypes :: Set TypeName }
+ | FieldExists { afTypeName :: TypeName -- for adding or renaming a field
+ , afTypeKind :: TypeKind
+ , afExistingField :: FieldName }
+ | FieldDoesNotExist { afTypeName :: TypeName -- for deleting or renaming a field
+ , afTypeKind :: TypeKind
+ , afMissingField :: FieldName }
+ | FieldBadDefaultValue { afTypeName :: TypeName -- for adding a field, must be a default
+ , afFieldName :: FieldName -- value compatible with the type
+ , afFieldType :: APIType
+ , afBadDefault :: DefaultValue }
+ | DefaultMissing { afTypeName :: TypeName -- for adding a field to a table
+ , afFieldName :: FieldName }
+ | TableChangeError { afCustomMessage :: String } -- custom error in tableChange
+ deriving (Eq, Show)
+
+data TypeKind = TKRecord | TKUnion | TKEnum
+ deriving (Eq, Show)
+
+validateChanges :: TypeName -> CustomMigrationsTagged -> DataChecks
+ -> (API, Version) -> (API, Version)
+ -> APIChangelog
+ -> Either ValidateFailure ([APITableChange], [ValidateWarning])
+validateChanges root custom chks (api,ver) (api',ver') clog = do
+ -- select changes by version from log
+ (changes, verEnd) <- selectChanges clog ver ver'
+ -- take norm of start and end api,
+ let apiStart = apiNormalForm api
+ apiTarget = apiNormalForm api'
+ -- check start and end APIs are well formed.
+ apiInvariant apiStart ?!? ApiInvalid ver
+ apiInvariant apiTarget ?!? ApiInvalid ver'
+ -- check expected end api
+ (apiEnd, changes') <- applyAPIChangesToAPI root custom chks changes apiStart
+ -- check expected end api
+ guard (apiEnd == apiTarget) ?! ChangelogIncomplete verEnd ver' (diffMaps apiEnd apiTarget)
+ return (changes', [])
+
+selectChanges :: APIChangelog -> Version -> Version
+ -> Either ValidateFailure ([APIChange], Version)
+selectChanges clog ver ver'
+ | ver' == ver = return ([], ver')
+ | ver' > ver = do
+ isChangelogOrdered clog ?!? uncurry ChangelogOutOfOrder
+ let withinRange = takeWhile (\ (_, v, _) -> v <= ver') $
+ dropWhile (\ (_, v, _) -> v <= ver) $
+ viewChangelogReverse clog
+ endVer = case withinRange of
+ [] -> ver
+ ((_, v, _):_) -> v
+ return ([ c | (_,_, cs) <- withinRange, c <- cs ], endVer)
+
+ | otherwise = Left (CannotDowngrade ver ver')
+
+-- | Apply a list of changes to an API, returning the updated API and
+-- a list of the changes with appropriate TableChanges interspersed.
+-- On failure, return the list of successfully applied changes, the
+-- change that failed and the reason for the failure.
+applyAPIChangesToAPI :: TypeName -> CustomMigrationsTagged -> DataChecks
+ -> [APIChange] -> NormAPI
+ -> Either ValidateFailure (NormAPI, [APITableChange])
+applyAPIChangesToAPI root custom chks changes api = do
+ (api', changes') <- foldM (doChangeAPI root custom chks) (api, []) changes
+ let changes'' | chks >= CheckStartAndEnd = addV api $ reverse $ addV api' changes'
+ | otherwise = reverse changes'
+ return (api', changes'')
+ where
+ addV _ cs@(ValidateData _ : _) = cs
+ addV a cs = ValidateData a : cs
+
+-- | Apply the API change
+doChangeAPI :: TypeName -> CustomMigrationsTagged -> DataChecks
+ -> (NormAPI, [APITableChange]) -> APIChange
+ -> Either ValidateFailure (NormAPI, [APITableChange])
+doChangeAPI root custom chks (api, changes) change = do
+ (api', pos) <- applyAPIChangeToAPI root custom change api
+ ?!? ChangelogEntryInvalid changes change
+ let changes' = APIChange change pos : changes
+ changes'' | validateAfter chks change = ValidateData api' : changes'
+ | otherwise = changes'
+ return (api', changes'')
+
+-- Checks and and performs an API change. If it works then we get back the new
+-- overall api. This is used for two purposes, (1) validating that we can apply
+-- each change in that context, and that we end up with the API we expect
+-- and (2) getting the intermediate APIs during data migration, because we need
+-- the schema of the intermediate data as part of applying the migration.
+applyAPIChangeToAPI :: TypeName -> CustomMigrationsTagged -> APIChange -> NormAPI
+ -> Either ApplyFailure (NormAPI, Map TypeName UpdateDeclPos)
+
+applyAPIChangeToAPI _ _ (ChAddType tname tdecl) api = do
+ -- to add a new type, that type must not yet exist
+ guard (not (tname `typeDeclaredInApi` api)) ?! TypeExists tname
+ declIsValid tdecl api ?!? DeclMalformed tname tdecl
+ return (Map.insert tname tdecl api, Map.empty)
+
+applyAPIChangeToAPI _ _ (ChDeleteType tname) api = do
+ -- to delete a type, that type must exist
+ guard (tname `typeDeclaredInApi` api) ?! TypeDoesNotExist tname
+ -- it must also not be used anywhere else in the API
+ guard (not (tname `typeUsedInApi` api)) ?! TypeInUse tname
+ return (Map.delete tname api, Map.empty)
+
+applyAPIChangeToAPI _ _ (ChRenameType tname tname') api = do
+ -- to rename a type, the original type name must exist
+ -- and the new one must not yet exist
+ tinfo <- lookupType tname api
+ guard (not (tname' `typeDeclaredInApi` api)) ?! TypeExists tname'
+ return ( (renameTypeUses tname tname'
+ . Map.insert tname' tinfo . Map.delete tname) api
+ , Map.empty )
+
+applyAPIChangeToAPI _ custom (ChCustomRecord tname tag) api = do
+ -- to make some change to values of a type, the type name must exist
+ tinfo <- lookupType tname api
+ recinfo <- expectRecordType tinfo ?! TypeWrongKind tname TKRecord
+ let api' = case recordMigrationSchema custom tag recinfo of
+ Just recinfo' -> Map.insert tname (NRecordType recinfo') api
+ Nothing -> api
+ return (api', findUpdatePos tname api)
+
+applyAPIChangeToAPI root _ (ChAddField tname fname ftype mb_defval) api = do
+ tinfo <- lookupType tname api
+ recinfo <- expectRecordType tinfo ?! TypeWrongKind tname TKRecord
+ guard (not (Map.member fname recinfo)) ?! FieldExists tname TKRecord fname
+ typeIsValid ftype api ?!? TypeMalformed ftype
+ case mb_defval of
+ Just defval -> guard (compatibleDefaultValue api ftype defval)
+ ?! FieldBadDefaultValue tname fname ftype defval
+ Nothing -> guard (not (typeUsedInApiTable root tname api))
+ ?! DefaultMissing tname fname
+ let tinfo' = NRecordType (Map.insert fname ftype recinfo)
+ return (Map.insert tname tinfo' api, findUpdatePos tname api)
+
+applyAPIChangeToAPI _ _ (ChDeleteField tname fname) api = do
+ tinfo <- lookupType tname api
+ recinfo <- expectRecordType tinfo ?! TypeWrongKind tname TKRecord
+ guard (Map.member fname recinfo) ?! FieldDoesNotExist tname TKRecord fname
+ let tinfo' = NRecordType (Map.delete fname recinfo)
+ return (Map.insert tname tinfo' api, findUpdatePos tname api)
+
+applyAPIChangeToAPI _ _ (ChRenameField tname fname fname') api = do
+ tinfo <- lookupType tname api
+ recinfo <- expectRecordType tinfo ?! TypeWrongKind tname TKRecord
+ ftype <- Map.lookup fname recinfo ?! FieldDoesNotExist tname TKRecord fname
+ guard (not (Map.member fname' recinfo)) ?! FieldExists tname TKRecord fname'
+ let tinfo' = (NRecordType . Map.insert fname' ftype
+ . Map.delete fname) recinfo
+ return (Map.insert tname tinfo' api, findUpdatePos tname api)
+
+applyAPIChangeToAPI _ _ (ChChangeField tname fname ftype _) api = do
+ tinfo <- lookupType tname api
+ recinfo <- expectRecordType tinfo ?! TypeWrongKind tname TKRecord
+ guard (Map.member fname recinfo) ?! FieldDoesNotExist tname TKRecord fname
+ let tinfo' = (NRecordType . Map.insert fname ftype) recinfo
+ return (Map.insert tname tinfo' api, findUpdatePos tname api)
+
+applyAPIChangeToAPI _ _ (ChAddUnionAlt tname fname ftype) api = do
+ tinfo <- lookupType tname api
+ unioninfo <- expectUnionType tinfo ?! TypeWrongKind tname TKUnion
+ guard (not (Map.member fname unioninfo)) ?! FieldExists tname TKUnion fname
+ typeIsValid ftype api ?!? TypeMalformed ftype
+ let tinfo' = NUnionType (Map.insert fname ftype unioninfo)
+ return (Map.insert tname tinfo' api, Map.empty)
+
+applyAPIChangeToAPI root _ (ChDeleteUnionAlt tname fname) api = do
+ tinfo <- lookupType tname api
+ unioninfo <- expectUnionType tinfo ?! TypeWrongKind tname TKUnion
+ guard (not (typeUsedInApiTable root tname api)) ?! TypeInUse tname
+ guard (Map.member fname unioninfo) ?! FieldDoesNotExist tname TKUnion fname
+ let tinfo' = NUnionType (Map.delete fname unioninfo)
+ return (Map.insert tname tinfo' api, Map.empty)
+
+applyAPIChangeToAPI _ _ (ChRenameUnionAlt tname fname fname') api = do
+ tinfo <- lookupType tname api
+ unioninfo <- expectUnionType tinfo ?! TypeWrongKind tname TKUnion
+ ftype <- Map.lookup fname unioninfo ?! FieldDoesNotExist tname TKUnion fname
+ guard (not (Map.member fname' unioninfo)) ?! FieldExists tname TKUnion fname'
+ let tinfo' = (NUnionType . Map.insert fname' ftype
+ . Map.delete fname) unioninfo
+ return (Map.insert tname tinfo' api, findUpdatePos tname api)
+
+applyAPIChangeToAPI _ _ (ChAddEnumVal tname fname) api = do
+ tinfo <- lookupType tname api
+ enuminfo <- expectEnumType tinfo ?! TypeWrongKind tname TKEnum
+ guard (not (Set.member fname enuminfo)) ?! FieldExists tname TKEnum fname
+ let tinfo' = NEnumType (Set.insert fname enuminfo)
+ return (Map.insert tname tinfo' api, Map.empty)
+
+applyAPIChangeToAPI root _ (ChDeleteEnumVal tname fname) api = do
+ tinfo <- lookupType tname api
+ enuminfo <- expectEnumType tinfo ?! TypeWrongKind tname TKEnum
+ guard (not (typeUsedInApiTable root tname api)) ?! TypeInUse tname
+ guard (Set.member fname enuminfo) ?! FieldDoesNotExist tname TKEnum fname
+ let tinfo' = NEnumType (Set.delete fname enuminfo)
+ return (Map.insert tname tinfo' api, Map.empty)
+
+applyAPIChangeToAPI _ _ (ChRenameEnumVal tname fname fname') api = do
+ tinfo <- lookupType tname api
+ enuminfo <- expectEnumType tinfo ?! TypeWrongKind tname TKEnum
+ guard (Set.member fname enuminfo) ?! FieldDoesNotExist tname TKEnum fname
+ guard (not (Set.member fname' enuminfo)) ?! FieldExists tname TKEnum fname'
+ let tinfo' = (NEnumType . Set.insert fname'
+ . Set.delete fname) enuminfo
+ return (Map.insert tname tinfo' api, findUpdatePos tname api)
+
+applyAPIChangeToAPI root custom (ChCustomAll tag) api =
+ return ( fromMaybe api (databaseMigrationSchema custom tag api)
+ , Map.singleton root (UpdateHere Nothing))
+
+
+lookupType :: TypeName -> NormAPI -> Either ApplyFailure NormTypeDecl
+lookupType tname api = Map.lookup tname api ?! TypeDoesNotExist tname
+
+lookupFieldType :: TypeName -> TypeKind -> FieldName -> Map FieldName APIType
+ -> Either ApplyFailure APIType
+lookupFieldType tname tkind fname flds =
+ Map.lookup fname flds ?! FieldDoesNotExist tname tkind fname
+
+expectRecordType :: NormTypeDecl -> Maybe (Map FieldName APIType)
+expectRecordType (NRecordType rinfo) = Just rinfo
+expectRecordType _ = Nothing
+
+expectUnionType :: NormTypeDecl -> Maybe (Map FieldName APIType)
+expectUnionType (NUnionType rinfo) = Just rinfo
+expectUnionType _ = Nothing
+
+expectEnumType :: NormTypeDecl -> Maybe (Set FieldName)
+expectEnumType (NEnumType rinfo) = Just rinfo
+expectEnumType _ = Nothing
+
+-- | The \"oh noes!\" operator.
+--
+(?!) :: Maybe a -> e -> Either e a
+Nothing ?! e = Left e
+Just x ?! _ = Right x
+
+(?!?) :: Either e a -> (e -> e') -> Either e' a
+Left e ?!? f = Left (f e)
+Right x ?!? _ = Right x
+
+-----------------------------------
+-- Performing data transformation
+--
+
+-- | This is the low level one that just does the changes.
+--
+-- We assume the changes have already been validated, and that the data
+-- matches the API.
+--
+applyChangesToDatabase :: TypeName -> CustomMigrationsTagged
+ -> JS.Value -> [APITableChange]
+ -> Either (ValueError, Position) JS.Value
+applyChangesToDatabase root custom = foldM (applyChangeToDatabase root custom)
+ -- just apply each of the individual changes in sequence to the whole dataset
+
+applyChangeToDatabase :: TypeName -> CustomMigrationsTagged
+ -> JS.Value -> APITableChange
+ -> Either (ValueError, Position) JS.Value
+applyChangeToDatabase root custom v (APIChange c upds) =
+ updateTypeAt upds (applyChangeToData c custom) (UpdateNamed root) v []
+applyChangeToDatabase root _ v (ValidateData api) = do
+ dataMatchesNormAPI root api v
+ return v
+
+
+-- | Apply an update at the given position in a declaration's value
+updateDeclAt :: Map TypeName UpdateDeclPos
+ -> (JS.Value -> Position -> Either (ValueError, Position) JS.Value)
+ -> UpdateDeclPos
+ -> JS.Value -> Position -> Either (ValueError, Position) JS.Value
+updateDeclAt _ alter (UpdateHere Nothing) v p = alter v p
+updateDeclAt upds alter (UpdateHere (Just upd)) v p = flip alter p =<< updateDeclAt upds alter upd v p
+updateDeclAt upds alter (UpdateRecord upd_flds) v p = withObjectMatchingFields upd_flds
+ (maybe (pure . pure) (updateTypeAt upds alter)) v p
+updateDeclAt upds alter (UpdateUnion upd_alts) v p = withObjectMatchingUnion upd_alts
+ (maybe (pure . pure) (updateTypeAt upds alter)) v p
+updateDeclAt upds alter (UpdateType upd) v p = updateTypeAt upds alter upd v p
+
+-- | Apply an upate at the given position in a type's value
+updateTypeAt :: Map TypeName UpdateDeclPos
+ -> (JS.Value -> Position -> Either (ValueError, Position) JS.Value)
+ -> UpdateTypePos
+ -> JS.Value -> Position -> Either (ValueError, Position) JS.Value
+updateTypeAt upds alter (UpdateList upd) v p = withArrayElems (updateTypeAt upds alter upd) v p
+updateTypeAt upds alter (UpdateMaybe upd) v p = withMaybe (updateTypeAt upds alter upd) v p
+updateTypeAt upds alter (UpdateNamed tname) v p = case Map.lookup tname upds of
+ Just upd -> updateDeclAt upds alter upd v p
+ Nothing -> pure v
+
+
+-- | This actually applies the change to the data value, assuming it
+-- is already in the correct place
+applyChangeToData :: APIChange -> CustomMigrationsTagged
+ -> JS.Value -> Position -> Either (ValueError, Position) JS.Value
+
+applyChangeToData (ChAddField _tname fname _ftype (Just defval)) _ =
+ let newFieldValue = defaultValueAsJsValue defval
+ in withObject (\ v _ -> pure $ HMap.insert (fieldKey fname) newFieldValue v)
+applyChangeToData (ChAddField tname fname _ftype Nothing) _ =
+ \ _ p -> Left (InvalidAPI (DefaultMissing tname fname), p)
+
+applyChangeToData (ChDeleteField _ fname) _ =
+ withObject (\ v _ -> pure $ HMap.delete (fieldKey fname) v)
+
+applyChangeToData (ChRenameField _ fname fname') _ =
+ withObject $ \rec p -> case HMap.lookup k_fname rec of
+ Just field -> renameField field rec
+ Nothing -> Left (JSONError MissingField, InField k_fname : p)
+ where
+ k_fname = fieldKey fname
+ k_fname' = fieldKey fname'
+ renameField x = pure . HMap.insert k_fname' x . HMap.delete k_fname
+
+applyChangeToData (ChChangeField _ fname _ftype tag) custom =
+ withObjectField (fieldKey fname) (liftMigration $ fieldMigration custom tag)
+
+applyChangeToData (ChRenameUnionAlt _ fname fname') _ = withObject $ \un p ->
+ case HMap.toList un of
+ [(k, r)] | k == fieldKey fname -> return $ HMap.singleton (fieldKey fname') r
+ | otherwise -> return un
+ _ -> Left (JSONError $ SyntaxError "Not singleton", p)
+
+applyChangeToData (ChRenameEnumVal _ fname fname') _ = withString $ \s _ ->
+ if s == fieldKey fname then return (fieldKey fname')
+ else return s
+
+applyChangeToData (ChCustomRecord _ tag) custom = withObject (liftMigration $ recordMigration custom tag)
+applyChangeToData (ChCustomAll tag) custom = withObject (liftMigration $ databaseMigration custom tag)
+
+applyChangeToData (ChAddType _ _) _ = pure . pure
+applyChangeToData (ChDeleteType _) _ = pure . pure
+applyChangeToData (ChRenameType _ _) _ = pure . pure
+applyChangeToData (ChAddUnionAlt _ _ _) _ = pure . pure
+applyChangeToData (ChDeleteUnionAlt _ _) _ = pure . pure
+applyChangeToData (ChAddEnumVal _ _) _ = pure . pure
+applyChangeToData (ChDeleteEnumVal _ _) _ = pure . pure
+
+
+liftMigration :: (a -> Either ValueError b)
+ -> (a -> Position -> Either (ValueError, Position) b)
+liftMigration f v p = f v ?!? flip (,) p
+
+-------------------------------------
+-- Utils for manipulating JS.Values
+--
+
+data ValueError
+ = JSONError JSONError
+ | CustomMigrationError String JS.Value
+ | InvalidAPI ApplyFailure
+ deriving (Eq, Show)
+
+withObject :: (JS.Object -> Position -> Either (ValueError, Position) JS.Object)
+ -> JS.Value -> Position -> Either (ValueError, Position) JS.Value
+withObject alter (JS.Object obj) p = JS.Object <$> alter obj p
+withObject _ v p = Left (JSONError $ expectedObject v, p)
+
+withObjectField :: T.Text -> (JS.Value -> Position -> Either (ValueError, Position) JS.Value)
+ -> JS.Value -> Position -> Either (ValueError, Position) JS.Value
+withObjectField field alter (JS.Object obj) p =
+ case HMap.lookup field obj of
+ Nothing -> Left (JSONError MissingField, InField field : p)
+ Just fvalue -> JS.Object <$> (HMap.insert field
+ <$> (alter fvalue (InField field : p))
+ <*> pure obj)
+withObjectField _ _ v p = Left (JSONError $ expectedObject v, p)
+
+withObjectMatchingFields :: Map FieldName a
+ -> (a -> JS.Value -> Position -> Either (ValueError, Position) JS.Value)
+ -> JS.Value -> Position -> Either (ValueError, Position) JS.Value
+withObjectMatchingFields m f (JS.Object obj) p = do
+ zs <- matchMaps (Map.mapKeys fieldKey m) (hmapToMap obj) ?!? toErr
+ obj' <- mapM (\ (k, (ty, val)) -> (,) k <$> (f ty val (InField k : p))) zs
+ return $ JS.Object $ HMap.fromList obj'
+ where
+ toErr (k, Left _) = (JSONError MissingField, InField k : p)
+ toErr (k, Right _) = (JSONError UnexpectedField, InField k : p)
+
+withObjectMatchingFields _ _ v p = Left (JSONError $ expectedObject v, p)
+
+withObjectMatchingUnion :: Map FieldName a
+ -> (a -> JS.Value -> Position -> Either (ValueError, Position) JS.Value)
+ -> JS.Value -> Position -> Either (ValueError, Position) JS.Value
+withObjectMatchingUnion m f (JS.Object obj) p
+ | [(k, r)] <- HMap.toList obj
+ = do x <- Map.lookup (fromFieldKey k) m ?! (JSONError UnexpectedField, InField k : p)
+ r' <- f x r (InField k : p)
+ return $ JS.Object $ HMap.singleton k r'
+withObjectMatchingUnion _ _ _ p = Left (JSONError $ SyntaxError "Not singleton", p)
+
+withArray :: (JS.Array -> Position -> Either (ValueError, Position) JS.Array)
+ -> JS.Value -> Position -> Either (ValueError, Position) JS.Value
+withArray alter (JS.Array arr) p = JS.Array <$> alter arr p
+withArray _ v p = Left (JSONError $ expectedArray v, p)
+
+withArrayElems :: (JS.Value -> Position -> Either (ValueError, Position) JS.Value)
+ -> JS.Value -> Position -> Either (ValueError, Position) JS.Value
+withArrayElems alter (JS.Array arr) p = JS.Array <$> V.mapM alterAt (V.indexed arr)
+ where
+ alterAt (i, v) = alter v (InElem i : p)
+withArrayElems _ v p = Left (JSONError $ expectedArray v, p)
+
+withMaybe :: (JS.Value -> Position -> Either (ValueError, Position) JS.Value)
+ -> JS.Value -> Position -> Either (ValueError, Position) JS.Value
+withMaybe _ JS.Null _ = return JS.Null
+withMaybe f v p = f v p
+
+withString :: (T.Text -> Position -> Either (ValueError, Position) T.Text)
+ -> JS.Value -> Position -> Either (ValueError, Position) JS.Value
+withString alter (JS.String s) p = JS.String <$> alter s p
+withString _ v p = Left (JSONError $ expectedString v, p)
+
+tableKey :: TypeName -> T.Text
+tableKey (TypeName tname) = T.pack tname
+
+fieldKey :: FieldName -> T.Text
+fieldKey (FieldName fname) = T.pack fname
+
+fromFieldKey :: T.Text -> FieldName
+fromFieldKey = FieldName . T.unpack
+
+compatibleDefaultValue :: NormAPI -> APIType -> DefaultValue -> Bool
+compatibleDefaultValue _ (TyList _) DefValList = True
+compatibleDefaultValue _ (TyMaybe _) DefValMaybe = True
+compatibleDefaultValue api (TyMaybe ty) defval = compatibleDefaultValue api ty defval
+compatibleDefaultValue _ (TyBasic bt) defval =
+ compatibleBasicDefaultValue bt defval
+compatibleDefaultValue env (TyName tname) defval =
+ case Map.lookup tname env of
+ Just (NTypeSynonym t) -> compatibleDefaultValue env t defval
+ Just (NNewtype bt) -> compatibleBasicDefaultValue bt defval
+ Just (NEnumType vals) -> case defval of
+ DefValString s -> fromFieldKey s `Set.member` vals
+ _ -> False
+ _ -> False
+compatibleDefaultValue _ _ _ = False
+
+compatibleBasicDefaultValue :: BasicType -> DefaultValue -> Bool
+compatibleBasicDefaultValue BTstring (DefValString _) = True
+compatibleBasicDefaultValue BTbinary (DefValString v) = case B64.decode (B.pack (T.unpack v)) of
+ Left _ -> False
+ Right _ -> True
+compatibleBasicDefaultValue BTbool (DefValBool _) = True
+compatibleBasicDefaultValue BTint (DefValInt _) = True
+compatibleBasicDefaultValue BTutc (DefValUtc _) = True
+compatibleBasicDefaultValue _ _ = False
+
+
+-------------------------------------------
+-- Validation that a dataset matches an API
+--
+
+dataMatchesAPI :: TypeName -> API -> JS.Value -> Either (ValueError, Position) ()
+dataMatchesAPI root = dataMatchesNormAPI root . apiNormalForm
+
+dataMatchesNormAPI :: TypeName -> NormAPI -> JS.Value -> Either (ValueError, Position) ()
+dataMatchesNormAPI root api db = void $ valueMatches (TyName root) db []
+ where
+ declMatches :: NormTypeDecl -> JS.Value -> Position -> Either (ValueError, Position) JS.Value
+ declMatches (NRecordType flds) = withObjectMatchingFields flds valueMatches
+ declMatches (NUnionType alts) = withObjectMatchingUnion alts valueMatches
+ declMatches (NEnumType vals) = withString $ \ s p ->
+ if fromFieldKey s `Set.member` vals
+ then return s
+ else Left (JSONError UnexpectedField, InField s : p)
+ declMatches (NTypeSynonym t) = valueMatches t
+ declMatches (NNewtype bt) = valueMatchesBasic bt
+
+ valueMatches :: APIType -> JS.Value -> Position -> Either (ValueError, Position) JS.Value
+ valueMatches (TyList t) = withArrayElems (valueMatches t)
+ valueMatches (TyMaybe t) = withMaybe (valueMatches t)
+ valueMatches (TyName tname) = \ v p -> do
+ d <- lookupType tname api ?!? (\ f -> (InvalidAPI f, p))
+ declMatches d v p
+ valueMatches (TyBasic bt) = valueMatchesBasic bt
+ valueMatches TyJSON = \ v _ -> return v
+
+ valueMatchesBasic :: BasicType -> JS.Value -> Position -> Either (ValueError, Position) JS.Value
+ valueMatchesBasic BTstring = expectDecodes (fromJSONWithErrs :: Decode T.Text)
+ valueMatchesBasic BTbinary = expectDecodes (fromJSONWithErrs :: Decode Binary)
+ valueMatchesBasic BTbool = expectDecodes (fromJSONWithErrs :: Decode Bool)
+ valueMatchesBasic BTint = expectDecodes (fromJSONWithErrs :: Decode Int)
+ valueMatchesBasic BTutc = expectDecodes (fromJSONWithErrs :: Decode UTCTime)
+
+ expectDecodes :: Decode t -> JS.Value -> Position -> Either (ValueError, Position) JS.Value
+ expectDecodes f v p = case f v of
+ Right _ -> return v
+ Left ((je, _):_) -> Left (JSONError je, p)
+ Left [] -> Left (JSONError $ SyntaxError "expectDecodes", p)
+
+type Decode t = JS.Value -> Either [(JSONError, Position)] t
+
+-------------------------------------
+-- Utils for merging and diffing maps
+--
+
+data MergeResult a b = OnlyInLeft a | InBoth a b | OnlyInRight b
+ deriving (Eq, Show)
+
+mergeMaps :: Ord k => Map k a -> Map k b -> Map k (MergeResult a b)
+mergeMaps m1 m2 = Map.unionWith (\(OnlyInLeft a) (OnlyInRight b) -> InBoth a b)
+ (fmap OnlyInLeft m1) (fmap OnlyInRight m2)
+
+diffMaps :: (Eq a, Ord k) => Map k a -> Map k a -> Map k (MergeResult a a)
+diffMaps m1 m2 = Map.filter different $ mergeMaps m1 m2
+ where
+ different (InBoth a b) = a /= b
+ different _ = True
+
+-- Attempts to match the keys of the maps to produce a map from keys
+-- to pairs, represented as a list until we have traverseWithKey from
+-- more recent vesions of containers.
+matchMaps :: Ord k => Map k a -> Map k b -> Either (k, Either a b) [(k, (a, b))]
+matchMaps m1 m2 = mapM (uncurry win) $ Map.toList $ mergeMaps m1 m2
+ where
+ win k (InBoth x y) = return (k, (x, y))
+ win k (OnlyInLeft x) = Left (k, Left x)
+ win k (OnlyInRight x) = Left (k, Right x)
+
+hmapToMap :: Ord k => HMap.HashMap k a -> Map.Map k a
+hmapToMap = Map.fromList . HMap.toList
+
+
+-------------------------------------
+-- Pretty-printing
+--
+
+prettyMigrateFailure :: MigrateFailure -> String
+prettyMigrateFailure = unlines . ppLines
+
+prettyValidateFailure :: ValidateFailure -> String
+prettyValidateFailure = unlines . ppLines
+
+prettyValueError :: ValueError -> String
+prettyValueError = unlines . ppLines
+
+prettyValueErrorPosition :: (ValueError, Position) -> String
+prettyValueErrorPosition = unlines . ppLines
+
+
+class PP t where
+ pp :: t -> String
+
+class PPLines t where
+ ppLines :: t -> [String]
+
+
+instance PP [Char] where
+ pp = id
+
+instance PP Version where
+ pp = showVersion
+
+instance PP t => PP (Set t) where
+ pp s = intercalate ", " (map pp $ Set.toList s)
+
+instance PP T.Text where
+ pp = T.unpack
+
+instance PPLines JS.Value where
+ ppLines v = lines $ BL.unpack $ JS.encodePretty v
+
+instance PP TypeName where
+ pp = _TypeName
+
+instance PP FieldName where
+ pp = _FieldName
+
+instance PP APIType where
+ pp (TyList ty) = "[" ++ pp ty ++ "]"
+ pp (TyMaybe ty) = "? " ++ pp ty
+ pp (TyName t) = pp t
+ pp (TyBasic b) = pp b
+ pp TyJSON = "json"
+
+instance PP BasicType where
+ pp BTstring = "string"
+ pp BTbinary = "binary"
+ pp BTbool = "bool"
+ pp BTint = "integer"
+ pp BTutc = "utc"
+
+instance PP DefaultValue where
+ pp DefValList = "[]"
+ pp DefValMaybe = "nothing"
+ pp (DefValString t) = show t
+ pp (DefValBool True) = "true"
+ pp (DefValBool False) = "false"
+ pp (DefValInt i) = show i
+ pp (DefValUtc u) = show u
+
+instance PP TypeKind where
+ pp TKRecord = "record"
+ pp TKUnion = "union"
+ pp TKEnum = "enum"
+
+ppATypeKind :: TypeKind -> String
+ppATypeKind TKRecord = "a record"
+ppATypeKind TKUnion = "a union"
+ppATypeKind TKEnum = "an enum"
+
+ppMemberWord :: TypeKind -> String
+ppMemberWord TKRecord = "field"
+ppMemberWord TKUnion = "alternative"
+ppMemberWord TKEnum = "value"
+
+
+inFrontOf :: String -> [String] -> [String]
+inFrontOf x [] = [x]
+inFrontOf x (s:ss) = (x ++ s) : ss
+
+indent :: [String] -> [String]
+indent = map (" " ++)
+
+instance PPLines t => PPLines [t] where
+ ppLines = concatMap ppLines
+
+instance (PPLines s, PPLines t) => PPLines (s, t) where
+ ppLines (s, t) = ppLines s ++ ppLines t
+
+instance PPLines APIChange where
+ ppLines (ChAddType t d) = ("added " ++ pp t ++ " ") `inFrontOf` ppLines d
+ ppLines (ChDeleteType t) = ["removed " ++ pp t]
+ ppLines (ChRenameType t t') = ["renamed " ++ pp t ++ " to " ++ pp t']
+ ppLines (ChAddField t f ty mb_v) = [ "changed record " ++ pp t
+ , " field added " ++ pp f ++ " :: " ++ pp ty
+ ++ maybe "" (\ v -> " default " ++ pp v) mb_v]
+ ppLines (ChDeleteField t f) = ["changed record " ++ pp t, " field removed " ++ pp f]
+ ppLines (ChRenameField t f f') = [ "changed record " ++ pp t
+ , " field renamed " ++ pp f ++ " to " ++ pp f']
+ ppLines (ChChangeField t f ty c) = [ "changed record " ++ pp t
+ , " field changed " ++ pp f ++ " :: " ++ pp ty
+ ++ " migration " ++ pp c]
+ ppLines (ChAddUnionAlt t f ty) = [ "changed union " ++ pp t
+ , " alternative added " ++ pp f ++ " :: " ++ pp ty]
+ ppLines (ChDeleteUnionAlt t f) = [ "changed union " ++ pp t
+ , " alternative removed " ++ pp f]
+ ppLines (ChRenameUnionAlt t f f') = [ "changed union " ++ pp t
+ , " alternative renamed " ++ pp f ++ " to " ++ pp f']
+ ppLines (ChAddEnumVal t f) = [ "changed enum " ++ pp t
+ , " alternative added " ++ pp f]
+ ppLines (ChDeleteEnumVal t f) = [ "changed enum " ++ pp t
+ , " alternative removed " ++ pp f]
+ ppLines (ChRenameEnumVal t f f') = [ "changed enum " ++ pp t
+ , " alternative renamed " ++ pp f ++ " to " ++ pp f']
+ ppLines (ChCustomRecord t c) = ["changed record " ++ pp t ++ " migration " ++ pp c]
+ ppLines (ChCustomAll c) = ["migration " ++ pp c]
+
+instance PPLines NormTypeDecl where
+ ppLines (NRecordType flds) = "record" : map (\ (f, ty) -> " " ++ pp f
+ ++ " :: " ++ pp ty)
+ (Map.toList flds)
+ ppLines (NUnionType alts) = "union" : map (\ (f, ty) -> " " ++ pp f
+ ++ " :: " ++ pp ty)
+ (Map.toList alts)
+ ppLines (NEnumType vals) = "enum" : map (\ v -> " " ++ pp v)
+ (Set.toList vals)
+ ppLines (NTypeSynonym t) = [pp t]
+ ppLines (NNewtype b) = ["basic " ++ pp b]
+
+instance PPLines MigrateFailure where
+ ppLines (ValidateFailure x) = ppLines x
+ ppLines (ValueError x ps) = ppLines x ++ map prettyStep ps
+
+instance PPLines ValidateFailure where
+ ppLines (ChangelogOutOfOrder later earlier) =
+ ["Changelog out of order: version " ++ pp later
+ ++ " appears after version " ++ pp earlier]
+ ppLines (CannotDowngrade from to) =
+ ["Cannot downgrade from version " ++ pp from
+ ++ " to version " ++ pp to]
+ ppLines (ApiInvalid ver missing) =
+ ["Missing declarations in API version " ++ pp ver ++ ": " ++ pp missing]
+ ppLines (ChangelogEntryInvalid succs change af) =
+ ppLines af ++ ("when applying the change" : indent (ppLines change))
+ ++ if not (null succs)
+ then "after successfully applying the changes:"
+ : indent (ppLines succs)
+ else []
+ ppLines (ChangelogIncomplete ver ver' diffs) =
+ ("Changelog incomplete! Differences between log version ("
+ ++ showVersion ver ++ ") and latest version (" ++ showVersion ver' ++ "):")
+ : indent (concatMap (uncurry ppDiff) $ Map.toList diffs)
+
+instance PPLines APITableChange where
+ ppLines (APIChange c _) = ppLines c
+ ppLines (ValidateData _) = []
+
+ppDiff :: TypeName -> MergeResult NormTypeDecl NormTypeDecl -> [String]
+ppDiff t (OnlyInLeft _) = ["removed " ++ pp t]
+ppDiff t (OnlyInRight d) = ("added " ++ pp t ++ " ") `inFrontOf` ppLines d
+ppDiff t (InBoth (NRecordType flds) (NRecordType flds')) =
+ ("changed record " ++ pp t)
+ : (concatMap (uncurry (ppDiffFields "field")) $ Map.toList $ diffMaps flds flds')
+ppDiff t (InBoth (NUnionType alts) (NUnionType alts')) =
+ ("changed union " ++ pp t)
+ : (concatMap (uncurry (ppDiffFields "alternative")) $ Map.toList $ diffMaps alts alts')
+ppDiff t (InBoth (NEnumType vals) (NEnumType vals')) =
+ ("changed enum " ++ pp t)
+ : (map (\ x -> " alternative removed " ++ pp x) $ Set.toList $ vals Set.\\ vals')
+ ++ (map (\ x -> " alternative added " ++ pp x) $ Set.toList $ vals' Set.\\ vals)
+ppDiff t (InBoth _ _) = ["incompatible definitions of " ++ pp t]
+
+ppDiffFields :: String -> FieldName -> MergeResult APIType APIType -> [String]
+ppDiffFields s f (OnlyInLeft _) = [" " ++ s ++ " removed " ++ pp f]
+ppDiffFields s f (OnlyInRight ty) = [" " ++ s ++ " added " ++ pp f ++ " :: " ++ pp ty]
+ppDiffFields s f (InBoth ty ty') = [ " incompatible types for " ++ s ++ " " ++ pp f
+ , " changelog type: " ++ pp ty
+ , " latest version type: " ++ pp ty' ]
+
+
+instance PPLines ApplyFailure where
+ ppLines (TypeExists t) = ["Type " ++ pp t ++ " already exists"]
+ ppLines (TypeDoesNotExist t) = ["Type " ++ pp t ++ " does not exist"]
+ ppLines (TypeWrongKind t k) = ["Type " ++ pp t ++ " is not " ++ ppATypeKind k]
+ ppLines (TypeInUse t) = ["Type " ++ pp t ++ " is in use, so it cannot be modified"]
+ ppLines (TypeMalformed ty xs) = ["Type " ++ pp ty
+ ++ " is malformed, missing declarations:"
+ , " " ++ pp xs]
+ ppLines (DeclMalformed t _ xs) = [ "Declaration of " ++ pp t
+ ++ " is malformed, missing declarations:"
+ , " " ++ pp xs]
+ ppLines (FieldExists t k f) = ["Type " ++ pp t ++ " already has the "
+ ++ ppMemberWord k ++ " " ++ pp f]
+ ppLines (FieldDoesNotExist t k f) = ["Type " ++ pp t ++ " does not have the "
+ ++ ppMemberWord k ++ " " ++ pp f]
+ ppLines (FieldBadDefaultValue _ _ ty v) = ["Default value " ++ pp v
+ ++ " is not compatible with the type " ++ pp ty]
+ ppLines (DefaultMissing t f) = ["Field " ++ pp f ++ " does not have a default value, but "
+ ++ pp t ++ " occurs in the database"]
+ ppLines (TableChangeError s) = ["Error when detecting changed tables:", " " ++ s]
+
+
+instance PPLines ValueError where
+ ppLines (JSONError e) = [prettyJSONError e]
+ ppLines (CustomMigrationError e v) = [ "Custom migration error:", " " ++ e
+ , "when migrating value"] ++ indent (ppLines v)
+ ppLines (InvalidAPI af) = "Invalid API detected during value migration:"
+ : indent (ppLines af)
+
+instance PPLines Step where
+ ppLines s = [prettyStep s]
296 src/Data/API/Doc/Call.hs
@@ -0,0 +1,296 @@
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE RecordWildCards #-}
+
+module Data.API.Doc.Call
+ ( callHtml
+ ) where
+
+import Data.API.Doc.Subst
+import Data.API.Doc.Types
+
+import Data.List
+import Data.Ord
+
+
+callHtml :: DocInfo -> Dict -> Call -> String
+callHtml di dct0 call@Call{..} = concat
+ [ container_open dct
+ , headersHtml di call dct
+ , paramsHtml di call dct
+ , bodyHtml di call dct
+ , viewsHtml di call dct
+ , samplesHtml di call dct
+ ]
+ where
+ dct = flip extDict dct0
+ [ (,) "HTTP-METHOD" $ call_http_method
+ , (,) "PATH" $ ('/' :) $ concat $ intersperse "/" call_path
+ , (,) "CALL-DESCRIPTION" call_description
+ , (,) "AUTH-REQUIRED" $ if call_auth_required then "yes" else "no"
+ ]
+
+headersHtml :: DocInfo -> Call -> Dict -> String
+headersHtml di Call{..} c_dct =
+ case call_headers of
+ [] -> ""
+ _ -> concat
+ [ headers_head
+ , concatMap mk_header $ sortBy (comparing header_name) call_headers
+ , headers_foot
+ ]
+ where
+ mk_header hdr = header_content $ call_dict_header di c_dct hdr
+
+
+paramsHtml :: DocInfo -> Call -> Dict -> String
+paramsHtml di Call{..} c_dct =
+ case call_params of
+ [] -> no_params c_dct
+ _ -> concat
+ [ params_head c_dct
+ , paramsRows di c_dct call_params
+ , params_foot c_dct
+ ]
+
+paramsRows :: DocInfo -> Dict -> [Param] -> String
+paramsRows di c_dct = concatMap mk_param . sortBy (comparing param_name)
+ where
+ mk_param param = parameter_row $ call_dict_param di c_dct param
+
+bodyHtml :: DocInfo -> Call -> Dict -> String
+bodyHtml di Call{..} c_dct =
+ case call_body of
+ Nothing -> ""
+ Just (typ,spl) ->
+ body_sample $
+ flip extDict c_dct
+ [ (,) "BODY-TYPE" (renderAPIType di typ)
+ , (,) "BODY-SAMPLE" spl
+ ]
+
+viewsHtml :: DocInfo -> Call -> Dict -> String
+viewsHtml di Call{..} c_dct
+ | null call_views = ""
+ | otherwise = concat [ views_head
+ , concatMap (view_content . view_dict) call_views
+ , views_foot
+ , concatMap view_detailed call_views
+ ]
+ where
+ view_dict vw = flip extDict c_dct
+ [ (,) "VIEW-ID" $ view_id vw
+ , (,) "VIEW-TYPE" $ renderAPIType di $ view_type vw
+ , (,) "VIEW-DOC" $ view_doc vw
+ ]
+
+ view_detailed vw
+ | null (view_params vw) = ""
+ | otherwise = concat [ view_detail_head v_dct
+ , paramsRows di v_dct $ view_params vw
+ , view_detail_foot
+ ]
+ where v_dct = view_dict vw
+
+samplesHtml :: DocInfo -> Call -> Dict -> String
+samplesHtml di Call{..} c_dct = concat
+ [ sample_heading c_dct
+ , concat $ map mk_sample call_samples
+ ]
+ where
+ mk_sample spl = sample s_dct
+ where
+ s_dct = call_dict_sample di c_dct spl
+
+call_dict_header :: DocInfo -> Dict -> Header -> Dict
+call_dict_header di dct Header{..} = flip extDict dct
+ [ (,) "HEADER-NAME" header_name
+ , (,) "HEADER-OR" $ if header_required then "Required" else "Optional"
+ , (,) "HEADER-EXAMPLE" header_expl
+ , (,) "HEADER-DESC" header_desc
+ , (,) "HEADER-TYPE" $ renderAPIType di header_type
+ ]
+
+call_dict_param :: DocInfo -> Dict -> Param -> Dict
+call_dict_param di dct Param{..} = flip extDict dct
+ [ (,) "PARAMETER-NAME" param_name
+ , (,) "PARAMETER-OR" $ if param_required then "Required" else "Optional"
+ , (,) "PARAMETER-EXAMPLE" param_expl
+ , (,) "PARAMETER-DESC" param_desc
+ , (,) "PARAMETER-TYPE" $ either id (renderAPIType di) param_type
+ ]
+
+call_dict_sample :: DocInfo -> Dict -> Sample -> Dict
+call_dict_sample di dct Sample{..} = flip extDict dct
+ [ (,) "HTTP-STATUS" $ show sample_status
+ , (,) "SAMPLE-TYPE" $ renderBodyType di sample_type
+ , (,) "SAMPLE-RESPONSE" $ maybe "" id sample_response
+ ]
+
+
+container_open :: Dict -> String
+container_open dct = prep dct
+ [ " <nav class='breadcrumbs'>"
+ , " <a href='<<BC-HOME-URL>>'><<BC-HOME-TEXT>></a> &raquo; <<HTTP-METHOD>> <<PATH>>"
+ , " </nav>"
+ , " <h2>"
+ , " <<HTTP-METHOD>> <<PATH>>"
+ , " </h2>"
+ , " <br>"
+ , " <div class='description'>"
+ , " <p><<CALL-DESCRIPTION>></p>"
+ , " </div>"
+ , " <table border='0' cellspacing='3' cellpadding='0' class='details-table'>"
+ , " <tr>"
+ , " <td width='180'><strong>Request Method</strong></td>"
+ , " <td><code><<HTTP-METHOD>></code>&nbsp;</td>"
+ , " </tr>"
+ , " <tr>"
+ , " <td width='180'><strong>Resource URI</strong></td>"
+ , " <td><code><<ENDPOINT>><<PATH>></code>&nbsp;</td>"
+ , " </tr>"
+ , " <tr>"
+ , " <td width='180'><strong>Authentication Required</strong></td>"
+ , " <td><code>"
+ , " <<AUTH-REQUIRED>>"
+ , " </code>&nbsp;"
+ , " </td>"
+ , " </tr>"
+-- , " <tr>"
+-- , " <td width='180'><strong>Request Body</strong></td>"
+-- , " <td><b>" ++ mkLink dct "POST-NODE-URL" "POST-NODE" ++ "</b></td>"
+-- , " </tr>"
+ , " </table>"
+ , " <br>"
+ , " <hr/>"
+ ]
+
+headers_head :: String
+headers_head = unlines
+ [ " <br>"
+ , " <h3>Headers</h3>"
+ , " <br>"
+ , " <table border='0' cellspacing='0' cellpadding='0' width='100%' id='headers'>"
+ ]
+
+header_content :: Dict -> String
+header_content dct = prep dct
+ [ " <tr>"
+ , " <td><code><<HEADER-NAME>></code></td>"
+ , " <td><em><<HEADER-OR>></em></td>"
+ , " <td class='details'><p><<HEADER-TYPE>></p></td>"
+ , " <td class='details'><p><tt><<HEADER-EXAMPLE>></tt></p></td>"
+ , " <td class='details'><p><<HEADER-DESC>></p></td>"
+ , " </tr>"
+ ]
+
+headers_foot :: String
+headers_foot = "</table><br>"
+
+
+no_params :: Dict -> String
+no_params dct = prep dct
+ [ " <br>"
+ , " <h3>Parameters</h3>"
+ , " <br>"
+ , " <em>There are no parameters for this resource.</em>"
+ , " <br>"
+ ]
+
+params_head :: Dict -> String
+params_head dct = prep dct
+ [ " <br>"
+ , " <h3>Parameters</h3>"
+ , " <br>"
+ , " <table border='0' cellspacing='0' cellpadding='0' width='100%' id='params' class='params'>"
+ ]
+
+
+
+parameter_row :: Dict -> String
+parameter_row dct = prep dct
+ [ " <tr>"
+ , " <td width='130'><code><<PARAMETER-NAME>></code></td>"
+ , " <td width='75'>"
+ , " <em>"
+ , " <<PARAMETER-OR>>"
+ , " </em>"
+ , " </td>"
+ , " <td class='details'>"
+ , " <p><<PARAMETER-TYPE>></p>"
+ , " </td>"
+ , " <td class='details'>"
+ , " <p><tt><<PARAMETER-EXAMPLE>></tt></p>"
+ , " </td>"
+ , " <td class='details'>"
+ , " <p><<PARAMETER-DESC>></p>"
+ , " </td>"
+ , " </tr>"
+ ]
+
+
+params_foot :: Dict -> String
+params_foot dct = prep dct
+ [ " </table>"
+ ]
+
+body_sample :: Dict -> String
+body_sample dct = prep dct
+ [ " <br>"
+ , " <h3>Sample Body</h3>"
+ , " <br>"
+ , " <div class='response-format'>"
+ , " <<BODY-TYPE>>"
+ , " </div>"
+ , " <pre><<BODY-SAMPLE>></pre>"
+ ]
+
+
+views_head :: String
+views_head = unlines
+ [ " <br>"
+ , " <h3>Views</h3>"
+ , " <br>"
+ , " <table border='0' cellspacing='0' cellpadding='0' width='100%' id='views'>"
+ ]
+
+view_content :: Dict -> String
+view_content dct = prep dct
+ [ " <tr>"
+ , " <td width='130'><code><<VIEW-ID>></code></td>"
+ , " <td class='details'><p><<VIEW-TYPE>></p></td>"
+ , " <td class='details'><p><<VIEW-DOC>></p></td>"
+ , " </tr>"
+ ]
+
+views_foot :: String
+views_foot = "</table><br>"
+
+
+view_detail_head :: Dict -> String
+view_detail_head dct = prep dct
+ [ " <br>"
+ , " <h3>Parameters for <code><<VIEW-ID>></code> view :: <<VIEW-TYPE>></h3>"
+ , " <br>"
+ , " <table border='0' cellspacing='0' cellpadding='0' width='100%' class='params view-detail' id='view-<<VIEW-ID>>'>"
+ ]
+
+view_detail_foot :: String
+view_detail_foot = "</table>"
+
+
+
+sample_heading :: Dict -> String
+sample_heading dct = prep dct
+ [ " <br>"
+ , " <h3>Sample Responses</h3>"
+ , " <br>"
+ ]
+
+sample :: Dict -> String
+sample dct = prep dct
+ [ " <div class='response-format'>"
+ , " <<SAMPLE-TYPE>>"
+ , " <span>HTTP Status: <<HTTP-STATUS>></span>"
+ , " </div>"
+ , " <pre><<SAMPLE-RESPONSE>></pre>"
+ ]
109 src/Data/API/Doc/Dir.hs
@@ -0,0 +1,109 @@
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE RecordWildCards #-}
+
+module Data.API.Doc.Dir
+ ( dirHtml
+ ) where
+
+import Data.API.Doc.Types
+import Data.API.Doc.Subst
+import Data.List
+import Data.Ord
+import Data.Char
+
+
+dirHtml :: DocInfo -> Dict -> [Call] -> String
+dirHtml di dct cls = concat
+ [ container_open dct
+ , concat $ map (resourceHtml di dct) $ aggregate cls
+ ]
+
+aggregate :: [Call] -> [(String,[Call])]
+aggregate = map f . groupBy eq . sortBy (comparing fst) . map x_r
+ where
+ x_r cl@Call{..} =
+ case call_path of
+ [] -> ("/" ,cl)
+ rsrc:_ -> (rsrc,cl)
+
+ eq (x,_) (y,_) = x==y
+
+ f [] = error "Data.API.Doc.Dir.aggregate"
+ f ((rsrc,cl):ps) = (rsrc,cl:map snd ps)
+
+
+resourceHtml :: DocInfo -> Dict -> (String,[Call]) -> String
+resourceHtml di dct (rsc,cls) = concat
+ [ resource_heading r_dct
+ , ul_open r_dct
+ , concat [ callHtml di r_dct cl | cl<-cls ]
+ , ul_close r_dct
+ ]
+ where
+ r_dct = resource_dict dct rsc
+
+resource_dict :: Dict -> String -> Dict
+resource_dict dct rsc = flip extDict dct
+ [ (,) "RESOURCE-HEADING" rsc
+ ]
+
+callHtml :: DocInfo -> Dict -> Call -> String
+callHtml di dct cl = call $ call_dict di dct cl
+
+call_dict :: DocInfo -> Dict -> Call -> Dict
+call_dict di dct Call{..} = flip extDict dct
+ -- calls
+ [ (,) "CALL-URL" $ doc_info_call_url di call_http_method call_path
+ , (,) "METHOD-CLASS" $ map toLower mth_s
+ , (,) "METHOD" $ mth_s
+ , (,) "PATH" $ '/' : concat (intersperse "/" call_path)
+ , (,) "BODY-TYPE" $ maybe "&mdash;" (renderAPIType di . fst) call_body
+ ]
+ where
+ mth_s = call_http_method
+-- cvt = T.unpack . _APINodeName
+
+container_open :: Dict -> String
+container_open dct = prep dct
+ [ "<h2>"
+ , " <<TITLE>>"
+ , " <span class='tagline'><<TAGLINE>></span>"
+ , "</h2>"
+ , "<strong>Endpoint: </strong><<ENDPOINT>>"
+ , "<br>"
+ , "<div id='toc'>"
+ , " <div id='app-description'>"
+ , " <p><<SUMMARY>></p>"
+ , " </div>"
+ , " <hr/>"
+ , " <br>"
+ , " <h3 class='section-title'>"
+ , " API Resources for This Application"
+ , " </h3>"
+ ]
+
+resource_heading :: Dict -> String
+resource_heading dct = prep dct
+ [ " <h5 class='tag'><<RESOURCE-HEADING>></h5>"
+ ]
+
+ul_open :: Dict -> String
+ul_open dct = prep dct
+ [ " <ul class='list resource-list'>"
+ ]
+
+call :: Dict -> String
+call dct = prep dct
+ [ " <li >"
+ , " <a class='reflink' href='<<CALL-URL>>'>"
+ , " <span class='<<METHOD-CLASS>>'><<METHOD>></span>"
+ , " <<PATH>>"
+ , " </a>"
+ , " <div class='post-data' ><<BODY-TYPE>></div>"
+ , " </li>"
+ ]
+
+ul_close :: Dict -> String
+ul_close dct = prep dct
+ [ " </ul>"
+ ]
40 src/Data/API/Doc/Subst.hs
@@ -0,0 +1,40 @@
+module Data.API.Doc.Subst
+ ( Dict
+ , subst
+ , prep
+ , mkDict
+ , extDict
+ )
+ where
+
+import qualified Data.Map as Map
+import Text.Regex
+import Safe
+
+
+type Dict = Map.Map String String
+
+
+subst_re :: Regex
+subst_re = mkRegexWithOpts "<<[a-zA-Z0-9_'-]+>>" True True
+
+subst :: Dict -> String -> String
+subst dct str =
+ case matchRegexAll subst_re str of
+ Nothing -> str
+ Just (pre,var_,pst,_) -> pre ++ rpl ++ subst dct pst
+ where
+ rpl = maybe ("<<"++var++">>") id $ Map.lookup var dct
+
+ var = chp $ reverse $ chp $ reverse var_
+
+ chp = tailNote "subst.chp" . tailNote "subst.chp"
+
+prep :: Dict -> [String] -> String
+prep dct = unlines . map (subst dct)
+
+mkDict :: [(String,String)] -> Dict
+mkDict = Map.fromList
+
+extDict :: [(String,String)] -> Dict -> Dict
+extDict al dct = foldr (uncurry Map.insert) dct al
112 src/Data/API/Doc/Types.hs
@@ -0,0 +1,112 @@
+{-# LANGUAGE DeriveFunctor #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE RecordWildCards #-}
+
+module Data.API.Doc.Types
+ ( URL
+ , HTTPMethod
+ , StatusCode
+ , Call(..)
+ , Header(..)
+ , Param(..)
+ , View(..)
+ , Sample(..)
+ , Body(..)
+ , DocInfo(..)
+ , htmlBody
+ , renderAPIType
+ , renderBodyType
+ , mk_link
+ ) where
+
+import Data.API.Types
+
+import Text.Printf
+
+type URL = String
+type HTTPMethod = String
+type StatusCode = Int
+
+data Call
+ = Call
+ { call_http_method :: HTTPMethod
+ , call_path :: [String]
+ , call_description :: String
+ , call_auth_required :: Bool
+ , call_headers :: [Header]
+ , call_body :: Maybe (APIType, String)
+ , call_params :: [Param]
+ , call_views :: [View]
+ , call_samples :: [Sample]
+ }
+ deriving (Show)
+
+data Header
+ = Header
+ { header_name :: String
+ , header_expl :: String
+ , header_desc :: String
+ , header_type :: APIType
+ , header_required :: Bool
+ } deriving (Show)
+
+data Param
+ = Param
+ { param_name :: String
+ , param_expl :: String
+ , param_desc :: String
+ , param_type :: Either String APIType
+ , param_required :: Bool
+ } deriving (Show)
+
+data View
+ = View
+ { view_id :: String
+ , view_type :: APIType
+ , view_doc :: String
+ , view_params :: [Param]
+ } deriving (Show)
+
+data Sample
+ = Sample
+ { sample_status :: StatusCode
+ , sample_type :: Body APIType
+ , sample_response :: Maybe String
+ } deriving (Show)
+
+data Body t = EmptyBody
+ | JSONBody t
+ | OtherBody String
+ deriving (Functor, Show)
+
+data DocInfo
+ = DocInfo
+ { doc_info_call_url :: HTTPMethod -> [String] -> URL
+ , doc_info_type_url :: TypeName -> URL
+ }
+
+
+htmlBody :: Body t
+htmlBody = OtherBody "html"
+
+renderBodyType :: DocInfo -> Body APIType -> String
+renderBodyType _ EmptyBody = "empty"
+renderBodyType di (JSONBody ty) = "json&nbsp;&nbsp;" ++ renderAPIType di ty
+renderBodyType _ (OtherBody s) = s
+
+renderAPIType :: DocInfo -> APIType -> String
+renderAPIType di (TyList ty ) = "[" ++ renderAPIType di ty ++ "]"
+renderAPIType di (TyMaybe ty ) = "?" ++ renderAPIType di ty
+renderAPIType di (TyName tn ) = mk_link (doc_info_type_url di tn) (_TypeName tn)
+renderAPIType _ (TyBasic bt ) = renderBasicType bt
+renderAPIType _ TyJSON = "json"
+
+renderBasicType :: BasicType -> String
+renderBasicType BTstring{} = "string"
+renderBasicType BTbinary{} = "binary"
+renderBasicType BTbool {} = "bool"
+renderBasicType BTint {} = "int"
+renderBasicType BTutc {} = "utc"
+
+mk_link :: URL -> String -> String
+mk_link = printf "<b><a class='reflink' href='%s' >%s</a></b>"
425 src/Data/API/JSON.hs
@@ -0,0 +1,425 @@
+{-# LANGUAGE DeriveFunctor #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE TemplateHaskell #-}
+
+
+module Data.API.JSON
+ ( JSONError(..)
+ , Expected(..)
+ , FormatExpected(..)
+ , prettyJSONError
+ , prettyStep
+ , prettyJSONErrorPositions
+ , Position
+ , Step(..)
+ , ParserWithErrs
+ , FromJSONWithErrs(..)
+ , promoteFromJSON
+ , fromJSONWithErrs
+ , decodeWithErrs
+ , failWith
+ , expectedArray
+ , expectedBool
+ , expectedInt
+ , expectedObject
+ , expectedString
+ , badFormat
+ , withInt
+ , with_int_fr, with_int_to, with_int_fr_to
+ , withBinary
+ , withBool
+ , withText
+ , with_txt_re
+ , withUTC