diff --git a/.gitignore b/.gitignore index 78bfe03..f94ebe4 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,4 @@ dist-newstyle/ *~ tags +result* diff --git a/README.md b/README.md index 5622d92..5e05382 100644 --- a/README.md +++ b/README.md @@ -90,3 +90,69 @@ You need an HTTP server to serve it from and simply point to the application por Run the application, go to the local port http://localhost:3000/swagger.json and copy the content into https://editor.swagger.io/ Voila! You got it, the spec is there. +## How to run + +### Create DB + +You first need to create the database. You can provide your own path, the example will use the default location. We need the PostgreSQL database and we create it with: +``` +PGPASSFILE=config/pgpass ./scripts/postgresql-setup.sh --createdb +``` +Or if it needs to be recreated: +``` +PGPASSFILE=config/pgpass ./scripts/postgresql-setup.sh --recreatedb +``` + +After that we need to run the migrations (if there are any): +``` +PGPASSFILE=config/pgpass stack run smash-exe -- run-migrations --mdir ./schema +``` + +And after that we can run additional migration scripts if they need to be created: +``` +PGPASSFILE=config/pgpass stack run smash-exe -- create-migration --mdir ./schema +``` + +To show all tables: +``` +\dt +``` + +To show details about specific table: +``` +\d+ TABLE_NAME +``` + +For example: +``` +\d+ block +``` + +Dumping the schema: +``` +pg_dump -c -s --no-owner cexplorer > cexplorer.sql +``` + +## Inserting pool metadata + + +This is an example (we got the hash from Blake2 256): +``` +stack exec smash-exe -- insert-pool --metadata test_pool.json --poolhash "cbdfc4f21feb0a414b2b9471fa56b0ebd312825e63db776d68cc3fa0ca1f5a2f" +``` + +## Test script + +An example of how the whole thing works. +``` +PGPASSFILE=config/pgpass ./scripts/postgresql-setup.sh --recreatedb +PGPASSFILE=config/pgpass stack run smash-exe -- run-migrations --mdir ./schema +PGPASSFILE=config/pgpass stack run smash-exe -- create-migration --mdir ./schema +PGPASSFILE=config/pgpass stack run smash-exe -- run-migrations --mdir ./schema + +PGPASSFILE=config/pgpass stack run smash-exe -- insert-pool --metadata test_pool.json --poolhash "cbdfc4f21feb0a414b2b9471fa56b0ebd312825e63db776d68cc3fa0ca1f5a2f" + +PGPASSFILE=config/pgpass stack run smash-exe -- run-app +``` + +After the server is running, you can check the hash on http://localhost:3100/api/v1/metadata/cbdfc4f21feb0a414b2b9471fa56b0ebd312825e63db776d68cc3fa0ca1f5a2f to see it return the JSON metadata. diff --git a/app/Main.hs b/app/Main.hs index e67fb56..874f362 100644 --- a/app/Main.hs +++ b/app/Main.hs @@ -4,5 +4,136 @@ import Cardano.Prelude import Lib +import DB + +import Control.Applicative (optional) + +import Data.Monoid ((<>)) + +import Options.Applicative (Parser, ParserInfo, ParserPrefs) +import qualified Options.Applicative as Opt + + main :: IO () -main = runApp defaultConfiguration +main = do + Opt.customExecParser p opts >>= runCommand + where + opts :: ParserInfo Command + opts = Opt.info (Opt.helper <*> pVersion <*> pCommand) + ( Opt.fullDesc + <> Opt.header "SMASH - Manage the Stakepool Metadata Aggregation Server" + ) + + p :: ParserPrefs + p = Opt.prefs Opt.showHelpOnEmpty + +-- ----------------------------------------------------------------------------- + +data Command + = CreateMigration MigrationDir + | RunMigrations MigrationDir (Maybe LogFileDir) + | RunApplication + | InsertPool FilePath Text + +runCommand :: Command -> IO () +runCommand cmd = + case cmd of + CreateMigration mdir -> doCreateMigration mdir + RunMigrations mdir mldir -> runMigrations (\pgConfig -> pgConfig) False mdir mldir + RunApplication -> runApp defaultConfiguration + InsertPool poolMetadataJsonPath poolHash -> do + putTextLn "Inserting pool metadata!" + result <- runPoolInsertion poolMetadataJsonPath poolHash + either + (\err -> putTextLn $ "Error occured. " <> renderLookupFail err) + (\_ -> putTextLn "Insertion completed!") + result + +doCreateMigration :: MigrationDir -> IO () +doCreateMigration mdir = do + mfp <- createMigration mdir + case mfp of + Nothing -> putTextLn "No migration needed." + Just fp -> putTextLn $ toS ("New migration '" ++ fp ++ "' created.") + +------------------------------------------------------------------------------- + +pVersion :: Parser (a -> a) +pVersion = + Opt.infoOption "cardano-db-tool version 0.1.0.0" + ( Opt.long "version" + <> Opt.short 'v' + <> Opt.help "Print the version and exit" + ) + +pCommand :: Parser Command +pCommand = + Opt.subparser + ( Opt.command "create-migration" + ( Opt.info pCreateMigration + $ Opt.progDesc "Create a database migration (only really used by devs)." + ) + <> Opt.command "run-migrations" + ( Opt.info pRunMigrations + $ Opt.progDesc "Run the database migrations (which are idempotent)." + ) + <> Opt.command "run-app" + ( Opt.info pRunApp + $ Opt.progDesc "Run the actual application." + ) + <> Opt.command "insert-pool" + ( Opt.info pInsertPool + $ Opt.progDesc "Inserts the pool into the database (utility)." + ) + ) + where + pCreateMigration :: Parser Command + pCreateMigration = + CreateMigration <$> pMigrationDir + + pRunMigrations :: Parser Command + pRunMigrations = + RunMigrations <$> pMigrationDir <*> optional pLogFileDir + + -- Empty right now but we might add some params over time. Like ports and stuff? + pRunApp :: Parser Command + pRunApp = + pure RunApplication + + -- Empty right now but we might add some params over time. + pInsertPool :: Parser Command + pInsertPool = + InsertPool <$> pFilePath <*> pPoolHash + +pFilePath :: Parser FilePath +pFilePath = + Opt.strOption + ( Opt.long "metadata" + <> Opt.help "The JSON metadata filepath location." + <> Opt.metavar "FILEPATH" + <> Opt.completer (Opt.bashCompleter "directory") + ) + +pPoolHash :: Parser Text +pPoolHash = + Opt.strOption + ( Opt.long "poolhash" + <> Opt.help "The JSON metadata Blake2 256 hash." + ) + +pMigrationDir :: Parser MigrationDir +pMigrationDir = + MigrationDir <$> Opt.strOption + ( Opt.long "mdir" + <> Opt.help "The directory containing the migrations." + <> Opt.completer (Opt.bashCompleter "directory") + ) + +pLogFileDir :: Parser LogFileDir +pLogFileDir = + LogFileDir <$> Opt.strOption + ( Opt.long "ldir" + <> Opt.help "The directory to write the log to." + <> Opt.completer (Opt.bashCompleter "directory") + ) + diff --git a/cabal.project b/cabal.project index 24a2bdf..d057d6c 100644 --- a/cabal.project +++ b/cabal.project @@ -1,21 +1,118 @@ -index-state: 2020-01-31T00:00:00Z +index-state: 2020-04-01T00:00:00Z packages: - . + ./. source-repository-package type: git location: https://github.com/input-output-hk/cardano-prelude - tag: fe76ec64f6b45259cc407a6d840dad79ee6063b6 - --sha256: 1w39806djx7vmps47vvc72i20w4dkam3l8687kkw5ih1isvl671x + tag: e0257be9d745a04f85ab8287a48a9c193acafec8 + --sha256: 09cxfa0vzny8xhjskc6khrfwsnajjawas6j574i2qfd71yd8mrrg source-repository-package type: git location: https://github.com/input-output-hk/cardano-prelude - tag: fe76ec64f6b45259cc407a6d840dad79ee6063b6 - --sha256: 1w39806djx7vmps47vvc72i20w4dkam3l8687kkw5ih1isvl671x + tag: e0257be9d745a04f85ab8287a48a9c193acafec8 + --sha256: 09cxfa0vzny8xhjskc6khrfwsnajjawas6j574i2qfd71yd8mrrg subdir: test +source-repository-package + type: git + location: https://github.com/input-output-hk/iohk-monitoring-framework + tag: 71df15f7b888d2671f619cc632080aaaaca48087 + --sha256: 02shwi4qppf88biq2y9amfzkzif9j0fdj0d77zdjrfjacldvc6ci + subdir: contra-tracer + +source-repository-package + type: git + location: https://github.com/input-output-hk/iohk-monitoring-framework + tag: 71df15f7b888d2671f619cc632080aaaaca48087 + --sha256: 02shwi4qppf88biq2y9amfzkzif9j0fdj0d77zdjrfjacldvc6ci + subdir: iohk-monitoring + +source-repository-package + type: git + location: https://github.com/input-output-hk/iohk-monitoring-framework + tag: 71df15f7b888d2671f619cc632080aaaaca48087 + --sha256: 02shwi4qppf88biq2y9amfzkzif9j0fdj0d77zdjrfjacldvc6ci + subdir: plugins/backend-aggregation + +source-repository-package + type: git + location: https://github.com/input-output-hk/iohk-monitoring-framework + tag: 71df15f7b888d2671f619cc632080aaaaca48087 + --sha256: 02shwi4qppf88biq2y9amfzkzif9j0fdj0d77zdjrfjacldvc6ci + subdir: plugins/backend-ekg + +source-repository-package + type: git + location: https://github.com/input-output-hk/iohk-monitoring-framework + tag: 71df15f7b888d2671f619cc632080aaaaca48087 + --sha256: 02shwi4qppf88biq2y9amfzkzif9j0fdj0d77zdjrfjacldvc6ci + subdir: plugins/backend-monitoring + +source-repository-package + type: git + location: https://github.com/input-output-hk/iohk-monitoring-framework + tag: 71df15f7b888d2671f619cc632080aaaaca48087 + --sha256: 02shwi4qppf88biq2y9amfzkzif9j0fdj0d77zdjrfjacldvc6ci + subdir: plugins/backend-trace-forwarder + +source-repository-package + type: git + location: https://github.com/input-output-hk/iohk-monitoring-framework + tag: 71df15f7b888d2671f619cc632080aaaaca48087 + --sha256: 02shwi4qppf88biq2y9amfzkzif9j0fdj0d77zdjrfjacldvc6ci + subdir: plugins/scribe-systemd + +source-repository-package + type: git + location: https://github.com/input-output-hk/iohk-monitoring-framework + tag: 71df15f7b888d2671f619cc632080aaaaca48087 + --sha256: 02shwi4qppf88biq2y9amfzkzif9j0fdj0d77zdjrfjacldvc6ci + subdir: tracer-transformers + +source-repository-package + type: git + location: https://github.com/input-output-hk/ouroboros-network + tag: 16bca08140fb37746538edff9fe77220acf91d55 + --sha256: 1m6z6qfy6y5rvlj0isrzf3wcm1vsmlzvhmn988dp14r6fycljvca + subdir: Win32-network + +source-repository-package + type: git + location: https://github.com/input-output-hk/cardano-crypto + tag: 2547ad1e80aeabca2899951601079408becbc92c + --sha256: 1p2kg2w02q5w1cvqzhfhqmxviy4xrzada3mmb096j2n6hfr20kri + +source-repository-package + type: git + location: https://github.com/input-output-hk/cardano-base + tag: 4a457f44e68132ce2bd978ab45a3188e64327abc + --sha256: 1yzbhkil119hkcb36ykwrchxk3zqmshpp57an17zjjqh7n2nw392 + subdir: binary + +source-repository-package + type: git + location: https://github.com/input-output-hk/cardano-base + tag: 4a457f44e68132ce2bd978ab45a3188e64327abc + --sha256: 1yzbhkil119hkcb36ykwrchxk3zqmshpp57an17zjjqh7n2nw392 + subdir: binary/test + +source-repository-package + type: git + location: https://github.com/input-output-hk/cardano-base + tag: 4a457f44e68132ce2bd978ab45a3188e64327abc + --sha256: 1yzbhkil119hkcb36ykwrchxk3zqmshpp57an17zjjqh7n2nw392 + subdir: cardano-crypto-class + +source-repository-package + type: git + location: https://github.com/input-output-hk/cardano-base + tag: 4a457f44e68132ce2bd978ab45a3188e64327abc + --sha256: 1yzbhkil119hkcb36ykwrchxk3zqmshpp57an17zjjqh7n2nw392 + subdir: slotting + constraints: ip < 1.5, hedgehog >= 1.0, @@ -27,3 +124,15 @@ allow-newer: katip:Win32 package comonad flags: -test-doctests + +package cardano-prelude + tests: False + +package iohk-monitoring + tests: False + +package cardano-binary + tests: False + +package cardano-crypto-class + tests: False diff --git a/config/pgpass b/config/pgpass new file mode 100644 index 0000000..795c354 --- /dev/null +++ b/config/pgpass @@ -0,0 +1 @@ +/var/run/postgresql:5432:smash:*:* diff --git a/default.nix b/default.nix index b418d75..c75bd10 100644 --- a/default.nix +++ b/default.nix @@ -35,6 +35,10 @@ let tests = collectChecks haskellPackages; }; + nixosTests = import ./nix/nixos/tests { + inherit pkgs; + }; + shell = import ./shell.nix { inherit pkgs; withHoogle = true; diff --git a/nix/default.nix b/nix/default.nix index 28e1de9..6a4e7e7 100644 --- a/nix/default.nix +++ b/nix/default.nix @@ -6,24 +6,23 @@ let sources = import ./sources.nix { inherit pkgs; } // sourcesOverride; - iohKNix = import sources.iohk-nix {}; + iohkNix = import sources.iohk-nix {}; haskellNix = import sources."haskell.nix"; # use our own nixpkgs if it exists in our sources, # otherwise use iohkNix default nixpkgs. nixpkgs = if (sources ? nixpkgs) then (builtins.trace "Not using IOHK default nixpkgs (use 'niv drop nixpkgs' to use default for better sharing)" sources.nixpkgs) - else (builtins.trace "Using IOHK default nixpkgs" - iohKNix.nixpkgs); + else iohkNix.nixpkgs; # for inclusion in pkgs: overlays = # Haskell.nix (https://github.com/input-output-hk/haskell.nix) haskellNix.overlays # haskell-nix.haskellLib.extra: some useful extra utility functions for haskell.nix - ++ iohKNix.overlays.haskell-nix-extra + ++ iohkNix.overlays.haskell-nix-extra # iohkNix: nix utilities and niv: - ++ iohKNix.overlays.iohkNix + ++ iohkNix.overlays.iohkNix # our own overlays: ++ [ (pkgs: _: with pkgs; { diff --git a/nix/haskell.nix b/nix/haskell.nix index 870558c..98ec325 100644 --- a/nix/haskell.nix +++ b/nix/haskell.nix @@ -13,11 +13,19 @@ }: let + src = haskell-nix.haskellLib.cleanGit { + name = "smash-src"; + src = ../.; + }; + + projectPackages = lib.attrNames (haskell-nix.haskellLib.selectProjectPackages + (haskell-nix.cabalProject { inherit src; })); + # This creates the Haskell package set. # https://input-output-hk.github.io/haskell.nix/user-guide/projects/ pkgSet = haskell-nix.cabalProject { - src = haskell-nix.haskellLib.cleanGit { src = ../. ; }; - ghc = buildPackages.haskell-nix.compiler.${compiler}; + inherit src; + compiler-nix-name = compiler; modules = [ # Allow reinstallation of Win32 { nonReinstallablePkgs = @@ -35,9 +43,11 @@ let # "stm" "terminfo" ]; } - { - packages.smash.configureFlags = [ "--ghc-option=-Werror" ]; - } + # TODO: Compile all local packages with -Werror: + #{ + # packages = lib.genAttrs projectPackages + # (name: { configureFlags = [ "--ghc-option=-Werror" ]; }); + #} ]; }; in diff --git a/nix/nixos/default.nix b/nix/nixos/default.nix new file mode 100644 index 0000000..779a456 --- /dev/null +++ b/nix/nixos/default.nix @@ -0,0 +1,5 @@ +{ + imports = [ + ./smash-service.nix + ]; +} diff --git a/nix/nixos/smash-service.nix b/nix/nixos/smash-service.nix new file mode 100644 index 0000000..3417871 --- /dev/null +++ b/nix/nixos/smash-service.nix @@ -0,0 +1,53 @@ +{ config, lib, pkgs, ... }: + +let + cfg = config.services.smash; +in { + + options = { + services.smash = { + enable = lib.mkEnableOption "enable the smash server"; + script = lib.mkOption { + internal = true; + type = lib.types.package; + }; + package = lib.mkOption { + type = lib.types.package; + default = (import ../. {}).smashHaskellPackages.smash.components.exes.smash-exe; + }; + }; + }; + config = lib.mkIf cfg.enable { + services.smash.script = pkgs.writeShellScript "smash" '' + set -euo pipefail + + cp ${config.services.cardano-db-sync.pgpass} $RUNTIME_DIRECTORY/pgpass + chmod 0600 $RUNTIME_DIRECTORY/pgpass + export PGPASSFILE=$RUNTIME_DIRECTORY/pgpass + + ${cfg.package}/bin/smash-exe run-migrations --mdir ${../../schema} + exec ${cfg.package}/bin/smash-exe run-app + ''; + environment.systemPackages = [ cfg.package config.services.postgresql.package ]; + systemd.services.smash = { + path = [ cfg.package config.services.postgresql.package pkgs.netcat ]; + preStart = '' + for x in {1..10}; do + nc -z localhost ${toString config.services.cardano-db-sync.postgres.port} && break + echo loop $x: waiting for postgresql 2 sec... + sleep 2 + done + sleep 1 + ''; + serviceConfig = { + ExecStart = config.services.smash.script; + DynamicUser = true; + RuntimeDirectory = "smash"; + }; + + wantedBy = [ "multi-user.target" ]; + after = [ "postgres.service" "cardano-db-sync.service" ]; + requires = [ "postgresql.service" ]; + }; + }; +} diff --git a/nix/nixos/tests/default.nix b/nix/nixos/tests/default.nix new file mode 100644 index 0000000..4dc9b93 --- /dev/null +++ b/nix/nixos/tests/default.nix @@ -0,0 +1,20 @@ +{ pkgs +, supportedSystems ? [ "x86_64-linux" ] +}: + +with pkgs; +with pkgs.commonLib; + + let + forAllSystems = genAttrs supportedSystems; + importTest = fn: args: system: let + imported = import fn; + test = import (pkgs.path + "/nixos/tests/make-test.nix") imported; + in test ({ + inherit pkgs system config; + } // args); + callTest = fn: args: forAllSystems (system: hydraJob (importTest fn args system)); +in rec { + + smashTest = callTest ./smash-test.nix {}; +} diff --git a/nix/nixos/tests/smash-test.nix b/nix/nixos/tests/smash-test.nix new file mode 100644 index 0000000..7757c48 --- /dev/null +++ b/nix/nixos/tests/smash-test.nix @@ -0,0 +1,61 @@ +{ pkgs, ... }: +with pkgs; with commonLib; +{ + name = "smash-test"; + nodes = { + machine = { config, ... }: { + nixpkgs.pkgs = pkgs; + environment = { + systemPackages = with pkgs; [ curl jq ]; + variables = { + PGPASSFILE = config.services.cardano-db-sync.pgpass; + }; + }; + imports = [ + ../smash-service.nix + (sources.cardano-db-sync + "/nix/nixos") + ]; + services.cardano-db-sync = { + enable = true; + cluster = "ff"; + }; + services.smash = { + enable = true; + package = smashHaskellPackages.smash.components.exes.smash-exe; + }; + services.postgresql = { + enable = true; + package = postgresql_12; + enableTCPIP = false; + ensureDatabases = [ "${config.services.cardano-db-sync.user}" ]; + ensureUsers = [ + { + name = "${config.services.cardano-db-sync.user}"; + ensurePermissions = { + "DATABASE ${config.services.cardano-db-sync.user}" = "ALL PRIVILEGES"; + }; + } + ]; + identMap = '' + cdbsync-users root ${config.services.cardano-db-sync.user} + cdbsync-users smash ${config.services.cardano-db-sync.user} + cdbsync-users ${config.services.cardano-db-sync.user} ${config.services.cardano-db-sync.user} + cdbsync-users postgres postgres + ''; + authentication = '' + local all all ident map=cdbsync-users + ''; + }; + }; + }; + testScript = '' + startAll + $machine->waitForUnit("postgresql.service"); + $machine->waitForUnit("smash.service"); + $machine->waitForOpenPort(3100); + $machine->succeed("smash-exe insert-pool --metadata ${../../../test_pool.json} --poolhash \"cbdfc4f21feb0a414b2b9471fa56b0ebd312825e63db776d68cc3fa0ca1f5a2f\""); + $machine->succeed("curl -s -i -H \"Accept: application/json\" http://localhost:3100/api/v1/metadata/cbdfc4f21feb0a414b2b9471fa56b0ebd312825e63db776d68cc3fa0ca1f5a2f | systemd-cat --identifier=curl-smash"); + $machine->succeed("curl -s -H \"Accept: application/json\" http://localhost:3100/api/v1/metadata/cbdfc4f21feb0a414b2b9471fa56b0ebd312825e63db776d68cc3fa0ca1f5a2f | jq | systemd-cat --identifier=jq-smash"); + ''; + +} diff --git a/nix/sources.json b/nix/sources.json index 0521212..c2af853 100644 --- a/nix/sources.json +++ b/nix/sources.json @@ -1,14 +1,26 @@ { + "cardano-db-sync": { + "branch": "master", + "description": "A component that follows the Cardano chain and stores blocks and transactions in PostgreSQL", + "homepage": "", + "owner": "input-output-hk", + "repo": "cardano-db-sync", + "rev": "6d2917212848a7f70e7e84336b9291dd2634ea61", + "sha256": "0bi6v6b9c2i2d3h416lpn0p2qy8pd08w0g0zjjsnldr2sb7ph3s9", + "type": "tarball", + "url": "https://github.com/input-output-hk/cardano-db-sync/archive/6d2917212848a7f70e7e84336b9291dd2634ea61.tar.gz", + "url_template": "https://github.com///archive/.tar.gz" + }, "haskell.nix": { "branch": "master", "description": "Alternative Haskell Infrastructure for Nixpkgs", "homepage": "https://input-output-hk.github.io/haskell.nix", "owner": "input-output-hk", "repo": "haskell.nix", - "rev": "480177ce8308d7baa9e060c4163f158d8584c4e2", - "sha256": "00mcb5r7c1w52jbiig0qrjhqfyrlrlq67jv0w5n1ll8l6zic6sss", + "rev": "36836c1f580a021e99e7ad3ad23f4257e7ffda2b", + "sha256": "0jsg3lairnfn9i74dj6r88n53y5ca724wv9lvak67cbhmxgj35df", "type": "tarball", - "url": "https://github.com/input-output-hk/haskell.nix/archive/480177ce8308d7baa9e060c4163f158d8584c4e2.tar.gz", + "url": "https://github.com/input-output-hk/haskell.nix/archive/36836c1f580a021e99e7ad3ad23f4257e7ffda2b.tar.gz", "url_template": "https://github.com///archive/.tar.gz" }, "iohk-nix": { @@ -17,10 +29,10 @@ "homepage": null, "owner": "input-output-hk", "repo": "iohk-nix", - "rev": "6ce1a519cc139781dfd4d68760d8596f0a6e4e84", - "sha256": "058lfjbinjn7ra8h5mpy7l191z2pzyqihrsw0a6kmy46v9xlm7gi", + "rev": "a3eae81cbae000bbfdc2bc1880528512747a3a9c", + "sha256": "1jairn2vp43z36j0b1fi2nmnfpadff217dqv5bjjxm5v099g0gsw", "type": "tarball", - "url": "https://github.com/input-output-hk/iohk-nix/archive/6ce1a519cc139781dfd4d68760d8596f0a6e4e84.tar.gz", + "url": "https://github.com/input-output-hk/iohk-nix/archive/a3eae81cbae000bbfdc2bc1880528512747a3a9c.tar.gz", "url_template": "https://github.com///archive/.tar.gz" } } diff --git a/nix/sources.nix b/nix/sources.nix index 8a725cb..7bd0f3e 100644 --- a/nix/sources.nix +++ b/nix/sources.nix @@ -12,36 +12,29 @@ let else pkgs.fetchurl { inherit (spec) url sha256; }; - fetch_tarball = pkgs: spec: - if spec.builtin or true then - builtins_fetchTarball { inherit (spec) url sha256; } - else - pkgs.fetchzip { inherit (spec) url sha256; }; + fetch_tarball = pkgs: name: spec: + let + ok = str: ! builtins.isNull (builtins.match "[a-zA-Z0-9+-._?=]" str); + # sanitize the name, though nix will still fail if name starts with period + name' = stringAsChars (x: if ! ok x then "-" else x) "${name}-src"; + in + if spec.builtin or true then + builtins_fetchTarball { name = name'; inherit (spec) url sha256; } + else + pkgs.fetchzip { name = name'; inherit (spec) url sha256; }; fetch_git = spec: builtins.fetchGit { url = spec.repo; inherit (spec) rev ref; }; - fetch_builtin-tarball = spec: - builtins.trace - '' - WARNING: - The niv type "builtin-tarball" will soon be deprecated. You should - instead use `builtin = true`. + fetch_local = spec: spec.path; - $ niv modify -a type=tarball -a builtin=true - '' - builtins_fetchTarball { inherit (spec) url sha256; }; + fetch_builtin-tarball = name: throw + ''[${name}] The niv type "builtin-tarball" is deprecated. You should instead use `builtin = true`. + $ niv modify ${name} -a type=tarball -a builtin=true''; - fetch_builtin-url = spec: - builtins.trace - '' - WARNING: - The niv type "builtin-url" will soon be deprecated. You should - instead use `builtin = true`. - - $ niv modify -a type=file -a builtin=true - '' - (builtins_fetchurl { inherit (spec) url sha256; }); + fetch_builtin-url = name: throw + ''[${name}] The niv type "builtin-url" will soon be deprecated. You should instead use `builtin = true`. + $ niv modify ${name} -a type=file -a builtin=true''; # # Various helpers @@ -72,10 +65,11 @@ let if ! builtins.hasAttr "type" spec then abort "ERROR: niv spec ${name} does not have a 'type' attribute" else if spec.type == "file" then fetch_file pkgs spec - else if spec.type == "tarball" then fetch_tarball pkgs spec + else if spec.type == "tarball" then fetch_tarball pkgs name spec else if spec.type == "git" then fetch_git spec - else if spec.type == "builtin-tarball" then fetch_builtin-tarball spec - else if spec.type == "builtin-url" then fetch_builtin-url spec + else if spec.type == "local" then fetch_local spec + else if spec.type == "builtin-tarball" then fetch_builtin-tarball name + else if spec.type == "builtin-url" then fetch_builtin-url name else abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}"; @@ -87,13 +81,23 @@ let listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set)) ); + # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/lists.nix#L295 + range = first: last: if first > last then [] else builtins.genList (n: first + n) (last - first + 1); + + # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L257 + stringToCharacters = s: map (p: builtins.substring p 1 s) (range 0 (builtins.stringLength s - 1)); + + # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L269 + stringAsChars = f: s: concatStrings (map f (stringToCharacters s)); + concatStrings = builtins.concatStringsSep ""; + # fetchTarball version that is compatible between all the versions of Nix - builtins_fetchTarball = { url, sha256 }@attrs: + builtins_fetchTarball = { url, name, sha256 }@attrs: let inherit (builtins) lessThan nixVersion fetchTarball; in if lessThan nixVersion "1.12" then - fetchTarball { inherit url; } + fetchTarball { inherit name url; } else fetchTarball attrs; diff --git a/release.nix b/release.nix new file mode 100644 index 0000000..b330d51 --- /dev/null +++ b/release.nix @@ -0,0 +1,69 @@ +############################################################################ +# +# Hydra release jobset. +# +# The purpose of this file is to select jobs defined in default.nix and map +# them to all supported build platforms. +# +############################################################################ + +# The project sources +{ smash ? { outPath = ./.; rev = "abcdef"; } + +# Function arguments to pass to the project +, projectArgs ? { + inherit sourcesOverride; + config = { allowUnfree = false; inHydra = true; }; + gitrev = smash.rev; + } + +# The systems that the jobset will be built for. +, supportedSystems ? [ "x86_64-linux" "x86_64-darwin" ] + +# The systems used for cross-compiling (default: linux) +, supportedCrossSystems ? [ (builtins.head supportedSystems) ] + +# A Hydra option +, scrubJobs ? true + +# Dependencies overrides +, sourcesOverride ? {} + +# Import pkgs, including IOHK common nix lib +, pkgs ? import ./nix { inherit sourcesOverride; } + +}: + +with (import pkgs.iohkNix.release-lib) { + inherit pkgs; + inherit supportedSystems supportedCrossSystems scrubJobs projectArgs; + packageSet = import smash; + gitrev = smash.rev; +}; + +with pkgs.lib; + +let + # restrict supported systems to a subset where tests (if exist) are required to pass: + testsSupportedSystems = intersectLists supportedSystems [ "x86_64-linux" "x86_64-darwin" ]; + # Recurse through an attrset, returning all derivations in a list matching test supported systems. + collectJobs' = ds: filter (d: elem d.system testsSupportedSystems) (collect isDerivation ds); + # Adds the package name to the derivations for windows-testing-bundle.nix + # (passthru.identifier.name does not survive mapTestOn) + collectJobs = ds: concatLists ( + mapAttrsToList (packageName: package: + map (drv: drv // { inherit packageName; }) (collectJobs' package) + ) ds); + + jobs = { + native = mapTestOn (__trace (__toJSON (packagePlatforms project)) (packagePlatforms project)); + # only build nixos tests on first supported system (linux) + inherit (pkgsFor (builtins.head supportedSystems)) nixosTests; + } // (mkRequiredJob (concatLists [ + (collectJobs jobs.native.checks) + (collectJobs jobs.native.libs) + (collectJobs jobs.native.exes) + (collectJobs jobs.nixosTests.smashTest) + ])); + +in jobs diff --git a/schema/migration-1-0000-20200610.sql b/schema/migration-1-0000-20200610.sql new file mode 100644 index 0000000..050bd87 --- /dev/null +++ b/schema/migration-1-0000-20200610.sql @@ -0,0 +1,22 @@ +-- Hand written migration that creates a 'schema_version' table and initializes it. + +CREATE FUNCTION init() RETURNS void AS $$ + +DECLARE + emptyDB boolean; + +BEGIN + SELECT NOT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name='schema_version') INTO emptyDB; + IF emptyDB THEN + CREATE TABLE "schema_version" (id SERIAL PRIMARY KEY UNIQUE, stage_one INT8 NOT NULL, stage_two INT8 NOT NULL, stage_three INT8 NOT NULL); + INSERT INTO "schema_version" (stage_one, stage_two, stage_three) VALUES (0, 0, 0); + + RAISE NOTICE 'DB has been initialized'; + END IF; +END; + +$$ LANGUAGE plpgsql; + +SELECT init(); + +DROP FUNCTION init(); diff --git a/schema/migration-1-0001-20200611.sql b/schema/migration-1-0001-20200611.sql new file mode 100644 index 0000000..c5e5ca1 --- /dev/null +++ b/schema/migration-1-0001-20200611.sql @@ -0,0 +1,33 @@ +-- Hand written migration to create the custom types with 'DOMAIN' statements. + +CREATE FUNCTION migrate() RETURNS void AS $$ + +DECLARE + next_version int; + +BEGIN + SELECT stage_one + 1 INTO next_version FROM "schema_version"; + IF next_version = 1 THEN + CREATE DOMAIN lovelace AS bigint CHECK (VALUE >= 0 AND VALUE <= 45000000000000000); + CREATE DOMAIN txindex AS smallint CHECK (VALUE >= 0 AND VALUE < 1024); + CREATE DOMAIN uinteger AS integer CHECK (VALUE >= 0); + + -- Base16 encoded values use a 64 byte hash. + CREATE DOMAIN base16type AS bytea CHECK (octet_length (VALUE) = 64); + + -- Blocks, transactions and merkel roots use a 32 byte hash. + CREATE DOMAIN hash32type AS bytea CHECK (octet_length (VALUE) = 32); + + -- Addresses use a 28 byte hash (as do StakeholdIds). + CREATE DOMAIN hash28type AS bytea CHECK (octet_length (VALUE) = 28); + + UPDATE "schema_version" SET stage_one = 1; + RAISE NOTICE 'DB has been migrated to stage_one version %', next_version; + END IF; +END; + +$$ LANGUAGE plpgsql; + +SELECT migrate(); + +DROP FUNCTION migrate(); diff --git a/schema/migration-2-0001-20200615.sql b/schema/migration-2-0001-20200615.sql new file mode 100644 index 0000000..7c32d6c --- /dev/null +++ b/schema/migration-2-0001-20200615.sql @@ -0,0 +1,20 @@ +-- Persistent generated migration. + +CREATE FUNCTION migrate() RETURNS void AS $$ +DECLARE + next_version int ; +BEGIN + SELECT stage_two + 1 INTO next_version FROM schema_version ; + IF next_version = 1 THEN + CREATe TABLE "tx_metadata"("id" SERIAL8 PRIMARY KEY UNIQUE,"hash" base16type NOT NULL,"metadata" json NOT NULL); + ALTER TABLE "tx_metadata" ADD CONSTRAINT "unique_tx_metadata" UNIQUE("hash"); + -- Hand written SQL statements can be added here. + UPDATE schema_version SET stage_two = 1 ; + RAISE NOTICE 'DB has been migrated to stage_two version %', next_version ; + END IF ; +END ; +$$ LANGUAGE plpgsql ; + +SELECT migrate() ; + +DROP FUNCTION migrate() ; diff --git a/scripts/postgresql-setup.sh b/scripts/postgresql-setup.sh new file mode 100755 index 0000000..b500146 --- /dev/null +++ b/scripts/postgresql-setup.sh @@ -0,0 +1,206 @@ +#!/usr/bin/env bash + +# Unoffiical bash strict mode. +# See: http://redsymbol.net/articles/unofficial-bash-strict-mode/ +set -u +set -o pipefail +IFS=$'\n\t' + +progname="$0" + + +function die { + echo "$1" + exit 1 +} + +function check_pgpass_file { + if test -z ${PGPASSFILE+x} ; then + echo "Error: The PGPASSFILE env var should be set to the location of the pgpass file." + echo + echo "Eg for mainnet:" + echo "export PGPASSFILE=$(pwd)/config/pgpass" + echo + exit 1 + fi + + if test ! -f "${PGPASSFILE}" ; then + echo "Error: PostgeSQL password file ${PGPASSFILE} does not exist." + exit 1 + fi + + databasename=$(cut -d ":" -f 3 "${PGPASSFILE}") + export databasename +} + +function check_for_psql { + # Make sure we have the psql executable. + psql -V > /dev/null 2>&1 || die "Error : Missing 'psql' executable!" +} + +function check_psql_superuser { + user="$(whoami)" + set +e + psql -l > /dev/null 2>&1 + if test $? -ne 0 ; then + echo + echo "Error : User '$user' can't access postgres." + echo + echo "To fix this, log into the postgres account and run:" + echo " createuser --createdb --superuser $user" + echo + exit 1 + fi + set -e +} + +function check_connect_as_user { + psql "${databasename}" --no-password --command='\dt' > /dev/null + if test $? -ne 0 ; then + echo + echo "Error : Not able to connect as '$(whoami)' user." + echo + exit 1 + fi +} + +function check_db_exists { + set +e + count=$(psql -l | grep -c "${databasename} ") + if test "${count}" -lt 1 ; then + echo + echo "Error : No '${databasename}' database." + echo + echo "To create one run:" + echo " $progname --createdb" + echo + exit 1 + fi + count=$(psql -l | grep "${databasename} " | cut -d \| -f 3 | grep -c UTF8) + if test "${count}" -ne 1 ; then + echo + echo "Error : '${databasename}' database exists, but is not UTF8." + echo + echo "To fix this you should drop the current one and create a new one using:" + echo " $progname --dropdb" + echo " $progname --createdb" + echo + exit 1 + fi + set -e +} + +function create_db { + createdb -T template0 --owner="$(whoami)" --encoding=UTF8 "${databasename}" +} + +function drop_db { + dropdb --if-exists "${databasename}" +} + +function list_views { + psql "${databasename}" \ + --command="select table_name from information_schema.views where table_catalog = '${databasename}' and table_schema = 'public' ;" +} + +function create_migration { + echo "To create a migration:" + echo "cabal run create-migration --mdir schema/" + exit 0 +} + +function run_migrations { + echo "To run migrations:" + echo "cabal run cardano-db-tool run-migrations --mdir schema/ --ldir ." + echo "You probably do not need to do this." + exit 0 +} + +function dump_schema { + pg_dump -s "${databasename}" +} + +function usage_exit { + echo + echo "Usage:" + echo " $progname --check - Check database exists and is set up correctly." + echo " $progname --createdb - Create database." + echo " $progname --dropdb - Drop database." + echo " $progname --list-views - List the currently definied views." + echo " $progname --recreatedb - Drop and recreate database." + echo " $progname --create-user - Create database user (from config/pgass file)." + echo " $progname --create-migration - Create a migration (if one is needed)." + echo " $progname --run-migrations - Run all migrations applying as needed." + echo " $progname --dump-schema - Dump the schema of the database." + echo + exit 0 +} + +# postgresql_version=$(psql -V | head -1 | sed -e "s/.* //;s/\.[0-9]*$//") + +set -e + +case "${1:-""}" in + --check) + check_pgpass_file + check_for_psql + check_psql_superuser + check_db_exists + check_connect_as_user + ;; + --createdb) + check_pgpass_file + check_for_psql + check_psql_superuser + create_db + ;; + --dropdb) + check_pgpass_file + check_for_psql + check_psql_superuser + drop_db + ;; + --list-views) + check_pgpass_file + check_for_psql + check_psql_superuser + check_db_exists + check_connect_as_user + list_views + ;; + --recreatedb) + check_pgpass_file + check_for_psql + check_psql_superuser + check_db_exists + check_connect_as_user + drop_db + create_db + echo "The database ${databasename} has been dropped and recreated." + echo "The tables will be recreated when the application is run." + exit 0 + ;; + --create-user) + check_pgpass_file + check_for_psql + check_psql_superuser + create_user + ;; + --create-migration) + create_migration + ;; + --run-migrations) + run_migrations + ;; + --dump-schema) + check_pgpass_file + check_db_exists + dump_schema + ;; + *) + usage_exit + ;; + esac + +echo "All good!" +exit 0 diff --git a/shell.nix b/shell.nix index ed28f9c..fe231b0 100644 --- a/shell.nix +++ b/shell.nix @@ -13,10 +13,7 @@ let shell = smashHaskellPackages.shellFor { name = "cabal-dev-shell"; - # If shellFor local packages selection is wrong, - # then list all local packages then include source-repository-package that cabal complains about: - #packages = ps: with ps; [ - #]; + packages = ps: lib.attrValues (haskell-nix.haskellLib.selectProjectPackages ps); # These programs will be available inside the nix-shell. buildInputs = with haskellPackages; [ diff --git a/smash.cabal b/smash.cabal index 286b0b7..c81d72c 100644 --- a/smash.cabal +++ b/smash.cabal @@ -23,6 +23,20 @@ library Lib , Types , DB + -- Migration + , Cardano.Db.Migration + , Cardano.Db.Migration.Haskell + , Cardano.Db.Migration.Version + -- Config + , Cardano.Db.PGConfig + , Cardano.Db.Run + , Cardano.Db.Schema + -- DB operations + , Cardano.Db.Error + , Cardano.Db.Insert + , Cardano.Db.Query + + other-modules: Paths_smash hs-source-dirs: @@ -38,6 +52,32 @@ library , wai , warp , aeson + -- DB + , persistent + , persistent-postgresql + , persistent-template >= 2.7.0 + , postgresql-simple + , esqueleto + -- REST + , cardano-crypto + , cardano-crypto-class + , base16-bytestring + , monad-logger + , transformers + , transformers-except + , resourcet + , extra + , conduit-extra + , bytestring + , text + , time + , directory + , filepath + , template-haskell + , fast-logger + , unix + , contra-tracer + , iohk-monitoring default-language: Haskell2010 default-extensions: NoImplicitPrelude OverloadedStrings @@ -60,6 +100,7 @@ executable smash-exe base >=4.7 && <5 , cardano-prelude , smash + , optparse-applicative default-language: Haskell2010 default-extensions: NoImplicitPrelude OverloadedStrings diff --git a/src/Cardano/Db/Error.hs b/src/Cardano/Db/Error.hs new file mode 100644 index 0000000..188a6e4 --- /dev/null +++ b/src/Cardano/Db/Error.hs @@ -0,0 +1,65 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE DeriveGeneric #-} + +module Cardano.Db.Error + ( DBFail (..) + , renderLookupFail + ) where + +import Cardano.Prelude + +import Data.Aeson (ToJSON (..), (.=), object, Value (..)) + +import Data.ByteString.Char8 (ByteString) + +-- | Errors, not exceptions. +data DBFail + = DbLookupTxMetadataHash !ByteString + | PoolMetadataHashMismatch + | UnableToEncodePoolMetadataToJSON !Text + | UnknownError !Text + deriving (Eq, Show, Generic) + +{- + +The example we agreed would be: +``` +{ + "code": "ERR_4214", + "description": "You did something wrong." +} +``` + +-} + +instance ToJSON DBFail where + toJSON failure@(DbLookupTxMetadataHash _hash) = + object + [ "code" .= String "DbLookupTxMetadataHash" + , "description" .= String (renderLookupFail failure) + ] + toJSON failure@(PoolMetadataHashMismatch) = + object + [ "code" .= String "PoolMetadataHashMismatch" + , "description" .= String (renderLookupFail failure) + ] + toJSON failure@(UnableToEncodePoolMetadataToJSON _err) = + object + [ "code" .= String "UnableToEncodePoolMetadataToJSON" + , "description" .= String (renderLookupFail failure) + ] + toJSON failure@(UnknownError _err) = + object + [ "code" .= String "UnknownError" + , "description" .= String (renderLookupFail failure) + ] + + +renderLookupFail :: DBFail -> Text +renderLookupFail lf = + case lf of + DbLookupTxMetadataHash hash -> "The hash " <> decodeUtf8 hash <> " is missing from the DB." + PoolMetadataHashMismatch -> "The pool metadata does not match!" + UnableToEncodePoolMetadataToJSON err -> "Unable to encode the content to JSON. " <> err + UnknownError text -> "Unknown error. Context: " <> text + diff --git a/src/Cardano/Db/Insert.hs b/src/Cardano/Db/Insert.hs new file mode 100644 index 0000000..336b78c --- /dev/null +++ b/src/Cardano/Db/Insert.hs @@ -0,0 +1,43 @@ + +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE TypeFamilies #-} + +module Cardano.Db.Insert + ( insertTxMetadata + + -- Export mainly for testing. + , insertByReturnKey + ) where + +import Cardano.Prelude + +import Control.Monad.IO.Class (MonadIO) +import Control.Monad.Trans.Reader (ReaderT) + +import Database.Persist.Class (AtLeastOneUniqueKey, Key, PersistEntityBackend, + getByValue, insert) +import Database.Persist.Sql (SqlBackend) +import Database.Persist.Types (entityKey) + +import Cardano.Db.Schema + + +insertTxMetadata :: MonadIO m => TxMetadata -> ReaderT SqlBackend m TxMetadataId +insertTxMetadata = insertByReturnKey + +------------------------------------------------------------------------------- + +-- | Insert a record (with a Unique constraint), and return 'Right key' if the +-- record is inserted and 'Left key' if the record already exists in the DB. +insertByReturnKey + :: ( AtLeastOneUniqueKey record + , MonadIO m + , PersistEntityBackend record ~ SqlBackend + ) + => record -> ReaderT SqlBackend m (Key record) +insertByReturnKey value = do + res <- getByValue value + case res of + Nothing -> insert value + Just r -> pure $ entityKey r + diff --git a/src/Cardano/Db/Migration.hs b/src/Cardano/Db/Migration.hs new file mode 100644 index 0000000..24cfc29 --- /dev/null +++ b/src/Cardano/Db/Migration.hs @@ -0,0 +1,193 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Cardano.Db.Migration + ( MigrationDir (..) + , LogFileDir (..) + , createMigration + , applyMigration + , runMigrations + ) where + +import Cardano.Prelude + +import Control.Exception (SomeException, bracket, handle) +import Control.Monad (forM_, unless) +import Control.Monad.IO.Class (liftIO) +import Control.Monad.Logger (NoLoggingT) +import Control.Monad.Trans.Reader (ReaderT) +import Control.Monad.Trans.Resource (runResourceT) + +import Data.Conduit.Binary (sinkHandle) +import Data.Conduit.Process (sourceCmdWithConsumer) +import Data.Either (partitionEithers) +import qualified Data.List as List +import qualified Data.ByteString.Char8 as BS +import Data.Text (Text) +import qualified Data.Text as Text +import qualified Data.Text.IO as Text +import Data.Time.Clock (getCurrentTime) +import Data.Time.Format (defaultTimeLocale, formatTime, iso8601DateFormat) + +import Database.Persist.Sql (SqlBackend, SqlPersistT, entityVal, getMigration, selectFirst) + +import Cardano.Db.Migration.Haskell +import Cardano.Db.Migration.Version +import Cardano.Db.PGConfig +import Cardano.Db.Run +import Cardano.Db.Schema + +import System.Directory (listDirectory) +import System.Exit (ExitCode (..), exitFailure) +import System.FilePath ((), takeFileName) +import System.IO (Handle, IOMode (AppendMode), hClose, hFlush, hPrint, openFile, stdout) + + + +newtype MigrationDir + = MigrationDir FilePath + +newtype LogFileDir + = LogFileDir FilePath + +-- | Run the migrations in the provided 'MigrationDir' and write date stamped log file +-- to 'LogFileDir'. +runMigrations :: (PGConfig -> PGConfig) -> Bool -> MigrationDir -> Maybe LogFileDir -> IO () +runMigrations cfgOverride quiet migrationDir mLogfiledir = do + pgconfig <- cfgOverride <$> readPGPassFileEnv + scripts <- getMigrationScripts migrationDir + case mLogfiledir of + Nothing -> do + putTextLn "Running:" + forM_ scripts $ applyMigration quiet pgconfig Nothing stdout + putTextLn "Success!" + + Just logfiledir -> do + logFilename <- genLogFilename logfiledir + bracket (openFile logFilename AppendMode) hClose $ \logHandle -> do + unless quiet $ putTextLn "Running:" + forM_ scripts $ applyMigration quiet pgconfig (Just logFilename) logHandle + unless quiet $ putTextLn "Success!" + where + genLogFilename :: LogFileDir -> IO FilePath + genLogFilename (LogFileDir logdir) = + (logdir ) + . formatTime defaultTimeLocale ("migrate-" ++ iso8601DateFormat (Just "%H%M%S") ++ ".log") + <$> getCurrentTime + +applyMigration :: Bool -> PGConfig -> Maybe FilePath -> Handle -> (MigrationVersion, FilePath) -> IO () +applyMigration quiet pgconfig mLogFilename logHandle (version, script) = do + -- This assumes that the credentials for 'psql' are already sorted out. + -- One way to achive this is via a 'PGPASSFILE' environment variable + -- as per the PostgreSQL documentation. + let command = + List.intercalate " " + [ "psql" + , BS.unpack (pgcDbname pgconfig) + , "--no-password" + , "--quiet" + , "--username=" <> BS.unpack (pgcUser pgconfig) + , "--host=" <> BS.unpack (pgcHost pgconfig) + , "--no-psqlrc" -- Ignore the ~/.psqlrc file. + , "--single-transaction" -- Run the file as a transaction. + , "--set ON_ERROR_STOP=on" -- Exit with non-zero on error. + , "--file='" ++ script ++ "'" + , "2>&1" -- Pipe stderr to stdout. + ] + hPutStrLn logHandle $ "Running : " ++ takeFileName script + unless quiet $ putStr (" " ++ takeFileName script ++ " ... ") + hFlush stdout + exitCode <- fst <$> handle (errorExit :: SomeException -> IO a) + (runResourceT $ sourceCmdWithConsumer command (sinkHandle logHandle)) + case exitCode of + ExitSuccess -> do + unless quiet $ putTextLn "ok" + runHaskellMigration logHandle version + ExitFailure _ -> errorExit exitCode + where + errorExit :: Show e => e -> IO a + errorExit e = do + print e + hPrint logHandle e + case mLogFilename of + Nothing -> pure () + Just logFilename -> putStrLn $ "\nErrors in file: " ++ logFilename ++ "\n" + exitFailure + +-- | Create a database migration (using functionality built into Persistent). If no +-- migration is needed return 'Nothing' otherwise return the migration as 'Text'. +createMigration :: MigrationDir -> IO (Maybe FilePath) +createMigration (MigrationDir migdir) = do + mt <- runDbNoLogging create + case mt of + Nothing -> pure Nothing + Just (ver, mig) -> do + let fname = toS $ renderMigrationVersionFile ver + Text.writeFile (migdir fname) mig + pure $ Just $ fname + where + create :: ReaderT SqlBackend (NoLoggingT IO) (Maybe (MigrationVersion, Text)) + create = do + ver <- getSchemaVersion + statements <- getMigration migrateCardanoDb + if null statements + then pure Nothing + else do + nextVer <- liftIO $ nextMigrationVersion ver + pure $ Just (nextVer, genScript statements (mvVersion nextVer)) + + genScript :: [Text] -> Int -> Text + genScript statements next_version = + Text.concat $ + [ "-- Persistent generated migration.\n\n" + , "CREATE FUNCTION migrate() RETURNS void AS $$\n" + , "DECLARE\n" + , " next_version int ;\n" + , "BEGIN\n" + , " SELECT stage_two + 1 INTO next_version FROM schema_version ;\n" + , " IF next_version = " <> textShow next_version <> " THEN\n" + ] + ++ concatMap buildStatement statements ++ + [ " -- Hand written SQL statements can be added here.\n" + , " UPDATE schema_version SET stage_two = ", textShow next_version, " ;\n" + , " RAISE NOTICE 'DB has been migrated to stage_two version %', next_version ;\n" + , " END IF ;\n" + , "END ;\n" + , "$$ LANGUAGE plpgsql ;\n\n" + , "SELECT migrate() ;\n\n" + , "DROP FUNCTION migrate() ;\n" + ] + + buildStatement :: Text -> [Text] + buildStatement sql = [" ", sql, ";\n"] + + getSchemaVersion :: SqlPersistT (NoLoggingT IO) MigrationVersion + getSchemaVersion = do + res <- selectFirst [] [] + case res of + Nothing -> panic "getSchemaVersion failed!" + Just x -> do + -- Only interested in the stage2 version because that is the only stage for + -- which Persistent migrations are generated. + let (SchemaVersion _ stage2 _) = entityVal x + pure $ MigrationVersion 2 stage2 0 + +-------------------------------------------------------------------------------- + +getMigrationScripts :: MigrationDir -> IO [(MigrationVersion, FilePath)] +getMigrationScripts (MigrationDir location) = do + files <- listDirectory location + let xs = map addVersionString (List.sort $ List.filter isMigrationScript files) + case partitionEithers xs of + ([], rs) -> pure rs + (ls, _) -> panic (toS $ "getMigrationScripts: Unable to parse " ++ show ls) + where + isMigrationScript :: FilePath -> Bool + isMigrationScript fp = + List.isPrefixOf "migration-" fp && List.isSuffixOf ".sql" fp + + addVersionString :: FilePath -> Either FilePath (MigrationVersion, FilePath) + addVersionString fp = + maybe (Left fp) (\mv -> Right (mv, location fp)) $ (parseMigrationVersionFromFile $ toS fp) + +textShow :: Show a => a -> Text +textShow = Text.pack . show diff --git a/src/Cardano/Db/Migration/Haskell.hs b/src/Cardano/Db/Migration/Haskell.hs new file mode 100644 index 0000000..df45949 --- /dev/null +++ b/src/Cardano/Db/Migration/Haskell.hs @@ -0,0 +1,72 @@ +{-# LANGUAGE ConstraintKinds #-} +{-# LANGUAGE OverloadedStrings #-} + +module Cardano.Db.Migration.Haskell + ( runHaskellMigration + ) where + +import Cardano.Prelude + +import Control.Exception (SomeException, handle) +import Control.Monad.Logger (MonadLogger) +import Control.Monad.Trans.Reader (ReaderT) + +import Data.Map.Strict (Map) +import qualified Data.Map.Strict as Map + +import Database.Persist.Sql (SqlBackend) + +import Cardano.Db.Migration.Version +import Cardano.Db.Run + +import System.Exit (exitFailure) +import System.IO (Handle, hClose, hFlush, stdout) + +-- | Run a migration written in Haskell (eg one that cannot easily be done in SQL). +-- The Haskell migration is paired with an SQL migration and uses the same MigrationVersion +-- numbering system. For example when 'migration-2-0008-20190731.sql' is applied this +-- function will be called and if a Haskell migration with that version number exists +-- in the 'migrationMap' it will be run. +-- +-- An example of how this may be used is: +-- 1. 'migration-2-0008-20190731.sql' adds a new NULL-able column. +-- 2. Haskell migration 'MigrationVersion 2 8 20190731' populates new column from data already +-- in the database. +-- 3. 'migration-2-0009-20190731.sql' makes the new column NOT NULL. + +runHaskellMigration :: Handle -> MigrationVersion -> IO () +runHaskellMigration logHandle mversion = + case Map.lookup mversion migrationMap of + Nothing -> pure () + Just action -> do + let migrationVersion = toS $ renderMigrationVersion mversion + hPutStrLn logHandle $ "Running : migration-" ++ migrationVersion ++ ".hs" + putStr $ " migration-" ++ migrationVersion ++ ".hs ... " + hFlush stdout + handle handler $ runDbHandleLogger logHandle action + putTextLn "ok" + where + handler :: SomeException -> IO a + handler e = do + putStrLn $ "runHaskellMigration: " ++ show e + hPutStrLn logHandle $ "runHaskellMigration: " ++ show e + hClose logHandle + exitFailure + +-------------------------------------------------------------------------------- + +migrationMap :: MonadLogger m => Map MigrationVersion (ReaderT SqlBackend m ()) +migrationMap = + Map.fromList + [ ( MigrationVersion 2 1 20190731, migration0001 ) + ] + +-------------------------------------------------------------------------------- + +migration0001 :: MonadLogger m => ReaderT SqlBackend m () +migration0001 = + -- Place holder. + pure () + +-------------------------------------------------------------------------------- + diff --git a/src/Cardano/Db/Migration/Version.hs b/src/Cardano/Db/Migration/Version.hs new file mode 100644 index 0000000..cedf59c --- /dev/null +++ b/src/Cardano/Db/Migration/Version.hs @@ -0,0 +1,60 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Cardano.Db.Migration.Version + ( MigrationVersion (..) + , parseMigrationVersionFromFile + , nextMigrationVersion + , renderMigrationVersion + , renderMigrationVersionFile + ) where + +import Cardano.Prelude + +import qualified Data.List as List +import qualified Data.List.Extra as List +import qualified Data.Time.Calendar as Time +import qualified Data.Time.Clock as Time + +import Text.Printf (printf) + + +data MigrationVersion = MigrationVersion + { mvStage :: Int + , mvVersion :: Int + , mvDate :: Int + } deriving (Eq, Ord, Show) + + +parseMigrationVersionFromFile :: Text -> Maybe MigrationVersion +parseMigrationVersionFromFile str = + case List.splitOn "-" (List.takeWhile (/= '.') (toS str)) of + [_, stage, ver, date] -> + case (readMaybe stage, readMaybe ver, readMaybe date) of + (Just s, Just v, Just d) -> Just $ MigrationVersion s v d + _ -> Nothing + _ -> Nothing + +nextMigrationVersion :: MigrationVersion -> IO MigrationVersion +nextMigrationVersion (MigrationVersion _stage ver _date) = do + -- We can ignore the provided 'stage' and 'date' fields, but we do bump the version number. + -- All new versions have 'stage == 2' because the stage 2 migrations are the Presistent + -- generated ones. For the date we use today's date. + (y, m, d) <- Time.toGregorian . Time.utctDay <$> Time.getCurrentTime + pure $ MigrationVersion 2 (ver + 1) (fromIntegral y * 10000 + m * 100 + d) + +renderMigrationVersion :: MigrationVersion -> Text +renderMigrationVersion mv = + toS $ List.intercalate "-" + [ printf "%d" (mvStage mv) + , printf "%04d" (mvVersion mv) + , show (mvDate mv) + ] + +renderMigrationVersionFile :: MigrationVersion -> Text +renderMigrationVersionFile mv = + toS $ List.concat + [ "migration-" + , toS $ renderMigrationVersion mv + , ".sql" + ] + diff --git a/src/Cardano/Db/PGConfig.hs b/src/Cardano/Db/PGConfig.hs new file mode 100644 index 0000000..8381522 --- /dev/null +++ b/src/Cardano/Db/PGConfig.hs @@ -0,0 +1,103 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ScopedTypeVariables #-} + +module Cardano.Db.PGConfig + ( PGConfig (..) + , PGPassFile (..) + , readPGPassFileEnv + , readPGPassFile + , readPGPassFileExit + , toConnectionString + ) where + +import Cardano.Prelude + +import Control.Exception (IOException) +import qualified Control.Exception as Exception + +import Data.ByteString.Char8 (ByteString) +import qualified Data.ByteString.Char8 as BS + +import Database.Persist.Postgresql (ConnectionString) + +import System.Environment (lookupEnv, setEnv) +import System.Posix.User (getEffectiveUserName) + +-- | PGConfig as specified by https://www.postgresql.org/docs/11/libpq-pgpass.html +-- However, this module expects the config data to be on the first line. +data PGConfig = PGConfig + { pgcHost :: ByteString + , pgcPort :: ByteString + , pgcDbname :: ByteString + , pgcUser :: ByteString + , pgcPassword :: ByteString + } deriving Show + +newtype PGPassFile + = PGPassFile FilePath + +toConnectionString :: PGConfig -> ConnectionString +toConnectionString pgc = + BS.concat + [ "host=", pgcHost pgc, " " + , "port=", pgcPort pgc, " " + , "user=", pgcUser pgc, " " + , "dbname=", pgcDbname pgc, " " + , "password=", pgcPassword pgc + ] + +-- | Read the PostgreSQL configuration from the file at the location specified by the +-- '$PGPASSFILE' environment variable. +readPGPassFileEnv :: IO PGConfig +readPGPassFileEnv = do + mpath <- lookupEnv "PGPASSFILE" + case mpath of + Just fp -> readPGPassFileExit (PGPassFile fp) + Nothing -> panic $ "Environment variable 'PGPASSFILE' not set." + +-- | Read the PostgreSQL configuration from the specified file. +readPGPassFile :: PGPassFile -> IO (Maybe PGConfig) +readPGPassFile (PGPassFile fpath) = do + ebs <- Exception.try $ BS.readFile fpath + case ebs of + Left e -> pure $ handler e + Right bs -> extract bs + where + handler :: IOException -> Maybe a + handler = const Nothing + + extract :: ByteString -> IO (Maybe PGConfig) + extract bs = + case BS.lines bs of + (b:_) -> parseConfig b + _ -> pure Nothing + + parseConfig :: ByteString -> IO (Maybe PGConfig) + parseConfig bs = + case BS.split ':' bs of + [h, pt, d, u, pwd] -> Just <$> replaceUser (PGConfig h pt d u pwd) + _ -> pure Nothing + + replaceUser :: PGConfig -> IO PGConfig + replaceUser pgc + | pgcUser pgc /= "*" = pure pgc + | otherwise = do + euser <- Exception.try getEffectiveUserName + case euser of + Left (_ :: IOException) -> + panic "readPGPassFile: User in pgpass file was specified as '*' but getEffectiveUserName failed." + Right user -> + pure $ pgc { pgcUser = BS.pack user } + + +-- | Read 'PGPassFile' into 'PGConfig'. +-- If it fails it will raise an error. +-- If it succeeds, it will set the 'PGPASSFILE' environment variable. +readPGPassFileExit :: PGPassFile -> IO PGConfig +readPGPassFileExit pgpassfile@(PGPassFile fpath) = do + mc <- readPGPassFile pgpassfile + case mc of + Nothing -> panic $ toS $ "Not able to read PGPassFile at " ++ show fpath ++ "." + Just pgc -> do + setEnv "PGPASSFILE" fpath + pure pgc diff --git a/src/Cardano/Db/Query.hs b/src/Cardano/Db/Query.hs new file mode 100644 index 0000000..a043a71 --- /dev/null +++ b/src/Cardano/Db/Query.hs @@ -0,0 +1,53 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE NumericUnderscores #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ScopedTypeVariables #-} + +module Cardano.Db.Query + ( DBFail (..) + , queryTxMetadata + ) where + +import Cardano.Prelude hiding (from, maybeToEither) + +import Control.Monad (join) +import Control.Monad.Extra (mapMaybeM) +import Control.Monad.IO.Class (MonadIO, liftIO) +import Control.Monad.Trans.Except (ExceptT (..), runExceptT) +import Control.Monad.Trans.Reader (ReaderT) + +import Data.ByteString.Char8 (ByteString) +import Data.Fixed (Micro) +import Data.Maybe (catMaybes, fromMaybe, listToMaybe) +import Data.Ratio ((%), numerator) +import Data.Text (Text) +import Data.Time.Clock (UTCTime, addUTCTime, diffUTCTime, getCurrentTime) +import Data.Time.Clock.POSIX (POSIXTime, utcTimeToPOSIXSeconds) +import Data.Word (Word16, Word64) + +import Database.Esqueleto (Entity (..), From, InnerJoin (..), LeftOuterJoin (..), + PersistField, SqlExpr, SqlQuery, Value (..), ValueList, + (^.), (==.), (<=.), (&&.), (||.), (>.), + count, countRows, desc, entityKey, entityVal, from, exists, + in_, isNothing, just, limit, max_, min_, not_, notExists, on, orderBy, + select, subList_select, sum_, unValue, unSqlBackendKey, val, where_) +import Database.Persist.Sql (SqlBackend) + +import Cardano.Db.Error +import Cardano.Db.Schema + +-- | Get the 'Block' associated with the given hash. +queryTxMetadata :: MonadIO m => ByteString -> ReaderT SqlBackend m (Either DBFail TxMetadata) +queryTxMetadata hash = do + res <- select . from $ \ blk -> do + where_ (blk ^. TxMetadataHash ==. val hash) + pure blk + pure $ maybeToEither (DbLookupTxMetadataHash hash) entityVal (listToMaybe res) + +------------------------------------------------------------------------------------ + +maybeToEither :: e -> (a -> b) -> Maybe a -> Either e b +maybeToEither e f = + maybe (Left e) (Right . f) + + diff --git a/src/Cardano/Db/Run.hs b/src/Cardano/Db/Run.hs new file mode 100644 index 0000000..7bef2e6 --- /dev/null +++ b/src/Cardano/Db/Run.hs @@ -0,0 +1,167 @@ +{-# LANGUAGE ConstraintKinds #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} + +module Cardano.Db.Run + ( getBackendGhci + , ghciDebugQuery + , runDbAction + , runDbHandleLogger + , runDbIohkLogging + , runDbNoLogging + , runDbStdoutLogging + ) where + +import Cardano.BM.Data.LogItem (LogObject (..), LOContent (..), PrivacyAnnotation (..), mkLOMeta) +import Cardano.BM.Data.Severity (Severity (..)) +import Cardano.BM.Trace (Trace) + +import Control.Tracer (traceWith) + +import Cardano.Prelude + +import Control.Monad.Logger (LogLevel (..), LogSource, LoggingT, NoLoggingT, + defaultLogStr, runLoggingT, runNoLoggingT, runStdoutLoggingT) +import Control.Monad.Trans.Reader (ReaderT) +import Control.Monad.IO.Class (liftIO) + +import qualified Data.ByteString.Char8 as BS +import Data.Text (Text) +import qualified Data.Text.Encoding as T +import qualified Data.Text.Lazy.IO as LT +import qualified Data.Text.Lazy.Builder as LT + +import Database.Persist.Postgresql (withPostgresqlConn, openSimpleConn) +import Database.PostgreSQL.Simple (connectPostgreSQL) +import Database.Persist.Sql (SqlBackend, IsolationLevel (..), runSqlConnWithIsolation) + +import Database.Esqueleto +import Database.Esqueleto.Internal.Sql + +import Cardano.Db.PGConfig + +import Language.Haskell.TH.Syntax (Loc) + +import System.IO (Handle, stdout) +import System.Log.FastLogger (LogStr, fromLogStr) + +-- | Run a DB action logging via the provided Handle. +runDbHandleLogger :: Handle -> ReaderT SqlBackend (LoggingT IO) a -> IO a +runDbHandleLogger logHandle dbAction = do + pgconf <- readPGPassFileEnv + runHandleLoggerT . + withPostgresqlConn (toConnectionString pgconf) $ \backend -> + -- The 'runSqlConnWithIsolation' function starts a transaction, runs the 'dbAction' + -- and then commits the transaction. + runSqlConnWithIsolation dbAction backend Serializable + where + runHandleLoggerT :: LoggingT m a -> m a + runHandleLoggerT action = + runLoggingT action logOut + + logOut :: Loc -> LogSource -> LogLevel -> LogStr -> IO () + logOut loc src level msg = + BS.hPutStrLn logHandle . fromLogStr $ defaultLogStr loc src level msg + +runDbAction :: Maybe (Trace IO Text) -> ReaderT SqlBackend (LoggingT IO) a -> IO a +runDbAction mLogging dbAction = do + pgconf <- readPGPassFileEnv + case mLogging of + Nothing -> + runSilentLoggingT . + withPostgresqlConn (toConnectionString pgconf) $ \backend -> + runSqlConnWithIsolation dbAction backend Serializable + Just tracer -> + runIohkLogging tracer . + withPostgresqlConn (toConnectionString pgconf) $ \backend -> + runSqlConnWithIsolation dbAction backend Serializable + where + runSilentLoggingT :: LoggingT m a -> m a + runSilentLoggingT action = runLoggingT action silentLog + + silentLog :: Monad m => Loc -> LogSource -> LogLevel -> LogStr -> m () + silentLog _loc _src _level _msg = pure () + +-- | Run a DB action logging via iohk-monitoring-framework. +runDbIohkLogging :: Trace IO Text -> ReaderT SqlBackend (LoggingT IO) b -> IO b +runDbIohkLogging tracer dbAction = do + pgconf <- readPGPassFileEnv + runIohkLogging tracer . + withPostgresqlConn (toConnectionString pgconf) $ \backend -> + runSqlConnWithIsolation dbAction backend Serializable + +runIohkLogging :: Trace IO Text -> LoggingT m a -> m a +runIohkLogging tracer action = + runLoggingT action toIohkLog + where + toIohkLog :: Loc -> LogSource -> LogLevel -> LogStr -> IO () + toIohkLog _loc _src level msg = do + meta <- mkLOMeta (toIohkSeverity level) Public + traceWith tracer $ + (name, LogObject name meta (LogMessage . T.decodeLatin1 $ fromLogStr msg)) + + name :: Text + name = "db-sync" + + toIohkSeverity :: LogLevel -> Severity + toIohkSeverity = + \case + LevelDebug -> Debug + LevelInfo -> Info + LevelWarn -> Warning + LevelError -> Error + LevelOther _ -> Error + +-- | Run a DB action without any logging. Mainly for tests. +runDbNoLogging :: ReaderT SqlBackend (NoLoggingT IO) a -> IO a +runDbNoLogging action = do + pgconfig <- readPGPassFileEnv + runNoLoggingT . + withPostgresqlConn (toConnectionString pgconfig) $ \backend -> + runSqlConnWithIsolation action backend Serializable + +-- | Run a DB action with stdout logging. Mainly for debugging. +runDbStdoutLogging :: ReaderT SqlBackend (LoggingT IO) b -> IO b +runDbStdoutLogging action = do + pgconfig <- readPGPassFileEnv + runStdoutLoggingT . + withPostgresqlConn (toConnectionString pgconfig) $ \backend -> + runSqlConnWithIsolation action backend Serializable + +-- from Control.Monad.Logger, wasnt exported +defaultOutput :: Handle + -> Loc + -> LogSource + -> LogLevel + -> LogStr + -> IO () +defaultOutput h loc src level msg = + BS.hPutStr h ls + where + ls = defaultLogStrBS loc src level msg + +defaultLogStrBS :: Loc + -> LogSource + -> LogLevel + -> LogStr + -> BS.ByteString +defaultLogStrBS a b c d = + toBS $ defaultLogStr a b c d + where + toBS = fromLogStr + +getBackendGhci :: IO SqlBackend +getBackendGhci = do + pgconfig <- readPGPassFileEnv + connection <- connectPostgreSQL (toConnectionString pgconfig) + openSimpleConn (\loc source level str -> defaultOutput stdout loc source level str) connection + +ghciDebugQuery :: SqlSelect a r => SqlQuery a -> IO () +ghciDebugQuery query = do + pgconfig <- readPGPassFileEnv + runStdoutLoggingT . withPostgresqlConn (toConnectionString pgconfig) $ \backend -> do + let + (sql,params) = toRawSql SELECT (backend, initialIdentState) query + liftIO $ do + LT.putStr $ LT.toLazyText sql + print params diff --git a/src/Cardano/Db/Schema.hs b/src/Cardano/Db/Schema.hs new file mode 100644 index 0000000..cce23d9 --- /dev/null +++ b/src/Cardano/Db/Schema.hs @@ -0,0 +1,63 @@ +{-# LANGUAGE ConstraintKinds #-} +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE UndecidableInstances #-} + +module Cardano.Db.Schema where + +import Cardano.Prelude hiding (Meta) + +import Data.ByteString.Char8 (ByteString) +import Data.Text (Text) +import Data.Time.Clock (UTCTime) +import Data.Word (Word16, Word64) + +-- Do not use explicit imports from this module as the imports can change +-- from version to version due to changes to the TH code in Persistent. +import Database.Persist.TH + +-- In the schema definition we need to match Haskell types with with the +-- custom type defined in PostgreSQL (via 'DOMAIN' statements). For the +-- time being the Haskell types will be simple Haskell types like +-- 'ByteString' and 'Word64'. + +-- We use camelCase here in the Haskell schema definition and 'persistLowerCase' +-- specifies that all the table and column names are converted to lower snake case. + +share + [ mkPersist sqlSettings + , mkDeleteCascade sqlSettings + , mkMigrate "migrateCardanoDb" + ] + [persistLowerCase| + + -- Schema versioning has three stages to best allow handling of schema migrations. + -- Stage 1: Set up PostgreSQL data types (using SQL 'DOMAIN' statements). + -- Stage 2: Persistent generated migrations. + -- Stage 3: Set up 'VIEW' tables (for use by other languages and applications). + -- This table should have a single row. + SchemaVersion + stageOne Int + stageTwo Int + stageThree Int + + -- The table containing the metadata. + + TxMetadata + hash ByteString sqltype=base16type + metadata Text sqltype=json + UniqueTxMetadata hash + + |] + diff --git a/src/DB.hs b/src/DB.hs index bff3b3b..1e609e2 100644 --- a/src/DB.hs +++ b/src/DB.hs @@ -6,9 +6,10 @@ {-# LANGUAGE TypeOperators #-} module DB - ( DataLayerError + ( module X , DataLayer (..) , stubbedDataLayer + , postgresqlDataLayer -- * Examples , stubbedInitialDataMap , stubbedBlacklistedPools @@ -16,28 +17,47 @@ module DB import Cardano.Prelude +import Control.Monad.Trans.Except.Exit (orDie) +import Control.Monad.Trans.Except.Extra (newExceptT) + +import Data.Aeson (eitherDecode') import qualified Data.Map as Map import Data.IORef (IORef, readIORef, modifyIORef) +import qualified Data.Text as T +import qualified Data.ByteString.Lazy as BL + import Types --- | Errors, not exceptions. -data DataLayerError - = PoolHashNotFound !PoolHash - deriving (Eq, Show) +import Cardano.Db.Insert (insertTxMetadata) +import Cardano.Db.Query (DBFail (..), queryTxMetadata) + +import qualified Cardano.Crypto.Hash.Class as Crypto +import qualified Cardano.Crypto.Hash.Blake2b as Crypto + +import qualified Data.ByteString.Base16 as B16 + +import Cardano.Db.Migration as X +import Cardano.Db.Migration.Version as X +import Cardano.Db.PGConfig as X +import Cardano.Db.Run as X +import Cardano.Db.Schema as X +import Cardano.Db.Error as X -- | This is the data layer for the DB. -- The resulting operation has to be @IO@, it can be made more granular, -- but currently there is no complexity involved for that to be a sane choice. data DataLayer = DataLayer - { dlGetPoolMetadata :: PoolHash -> IO (Either DataLayerError PoolOfflineMetadata) - , dlAddPoolMetadata :: PoolHash -> PoolOfflineMetadata -> IO (Either DataLayerError PoolOfflineMetadata) - , dlGetBlacklistedPools :: IO (Either DataLayerError [PoolHash]) - , dlAddBlacklistedPool :: PoolHash -> IO (Either DataLayerError PoolHash) + { dlGetPoolMetadataSimple :: PoolHash -> IO (Either DBFail Text) + , dlGetPoolMetadata :: PoolHash -> IO (Either DBFail PoolOfflineMetadata) + , dlAddPoolMetadata :: PoolHash -> PoolOfflineMetadata -> IO (Either DBFail PoolOfflineMetadata) + , dlAddPoolMetadataSimple :: PoolHash -> Text -> IO (Either DBFail TxMetadataId) + , dlGetBlacklistedPools :: IO (Either DBFail [PoolHash]) + , dlAddBlacklistedPool :: PoolHash -> IO (Either DBFail PoolHash) } -- | Simple stubbed @DataLayer@ for an example. --- We do need state here. _This thing is thread safe._ +-- We do need state here. _This is thread safe._ -- __This is really our model here.__ stubbedDataLayer :: IORef (Map PoolHash PoolOfflineMetadata) @@ -48,13 +68,18 @@ stubbedDataLayer ioDataMap ioBlacklistedPool = DataLayer ioDataMap' <- readIORef ioDataMap case (Map.lookup poolHash ioDataMap') of Just poolOfflineMetadata' -> return $ Right poolOfflineMetadata' - Nothing -> return $ Left (PoolHashNotFound poolHash) + Nothing -> return $ Left (DbLookupTxMetadataHash (encodeUtf8 $ getPoolHash poolHash)) + + , dlGetPoolMetadataSimple = \poolHash -> panic "To implement!" , dlAddPoolMetadata = \poolHash poolMetadata -> do -- TODO(KS): What if the pool metadata already exists? _ <- modifyIORef ioDataMap (\dataMap -> Map.insert poolHash poolMetadata dataMap) return $ Right poolMetadata + -- TODO(KS): To speed up development. + , dlAddPoolMetadataSimple = panic "To implement!" + , dlGetBlacklistedPools = do blacklistedPool <- readIORef ioBlacklistedPool return $ Right blacklistedPool @@ -64,7 +89,6 @@ stubbedDataLayer ioDataMap ioBlacklistedPool = DataLayer -- TODO(KS): Do I even need to query this? blacklistedPool <- readIORef ioBlacklistedPool return $ Right poolHash - } -- The approximation for the table. @@ -77,3 +101,37 @@ stubbedInitialDataMap = Map.fromList stubbedBlacklistedPools :: [PoolHash] stubbedBlacklistedPools = [] +postgresqlDataLayer :: DataLayer +postgresqlDataLayer = DataLayer + { dlGetPoolMetadata = \poolHash -> panic "To implement!" + + , dlGetPoolMetadataSimple = \poolHash -> do + txMetadata <- runDbAction Nothing $ queryTxMetadata (encodeUtf8 $ getPoolHash poolHash) + return (txMetadataMetadata <$> txMetadata) + + , dlAddPoolMetadata = \poolHash poolMetadata -> panic "To implement!" + + , dlAddPoolMetadataSimple = \poolHash poolMetadata -> do + + let poolOfflineMetadataByteString = BL.fromStrict . encodeUtf8 $ poolMetadata + + -- Let us try to decode the contents to JSON. + let decodedPoolMetadataJSON :: Either DBFail PoolOfflineMetadata + decodedPoolMetadataJSON = case (eitherDecode' poolOfflineMetadataByteString) of + Left err -> Left $ UnableToEncodePoolMetadataToJSON $ toS err + Right result -> return result + + -- If unable to decode into JSON object, fails! + _ <- orDie (\e -> renderLookupFail e) (newExceptT $ pure decodedPoolMetadataJSON) + + let poolHashBytestring = encodeUtf8 $ getPoolHash poolHash + let hashFromMetadata = B16.encode $ Crypto.digest (Proxy :: Proxy Crypto.Blake2b_256) (encodeUtf8 poolMetadata) + + if hashFromMetadata /= poolHashBytestring + then return $ Left PoolMetadataHashMismatch + else fmap Right $ runDbAction Nothing $ insertTxMetadata $ TxMetadata poolHashBytestring poolMetadata + + , dlGetBlacklistedPools = panic "To implement!" + , dlAddBlacklistedPool = \poolHash -> panic "To implement!" + } + diff --git a/src/Lib.hs b/src/Lib.hs index fe923d5..81aa3db 100644 --- a/src/Lib.hs +++ b/src/Lib.hs @@ -7,8 +7,11 @@ module Lib ( Configuration (..) + , DBFail (..) -- We need to see errors clearly outside , defaultConfiguration , runApp + , runAppStubbed + , runPoolInsertion ) where import Cardano.Prelude @@ -16,7 +19,6 @@ import Cardano.Prelude import Data.IORef (newIORef) import Data.Swagger (Info (..), Swagger (..)) - import Network.Wai.Handler.Warp (defaultSettings, runSettings, setBeforeMainLoop, setPort) @@ -36,8 +38,11 @@ import Types -- The basic auth. type BasicAuthURL = BasicAuth "smash" User +-- | Shortcut for common api result types. +type ApiRes verb a = verb '[JSON] (ApiResult DBFail a) + -- GET api/v1/metadata/{hash} -type OfflineMetadataAPI = "api" :> "v1" :> "metadata" :> Capture "hash" PoolHash :> Get '[JSON] PoolOfflineMetadata +type OfflineMetadataAPI = "api" :> "v1" :> "metadata" :> Capture "hash" PoolHash :> ApiRes Get PoolMetadataWrapped -- POST api/v1/blacklist |-> {"blacklistPool" : "pool"} type BlacklistPoolAPI = BasicAuthURL :> "api" :> "v1" :> "blacklist" :> ReqBody '[JSON] BlacklistPool :> Post '[JSON] PoolOfflineMetadata @@ -86,8 +91,18 @@ runApp configuration = do runSettings settings =<< mkApp configuration -mkApp :: Configuration -> IO Application -mkApp configuration = do +runAppStubbed :: Configuration -> IO () +runAppStubbed configuration = do + let port = cPortNumber configuration + let settings = + setPort port $ + setBeforeMainLoop (hPutStrLn stderr ("listening on port " ++ show port)) $ + defaultSettings + + runSettings settings =<< mkAppStubbed configuration + +mkAppStubbed :: Configuration -> IO Application +mkAppStubbed configuration = do ioDataMap <- newIORef stubbedInitialDataMap ioBlacklistedPools <- newIORef stubbedBlacklistedPools @@ -100,6 +115,30 @@ mkApp configuration = do (basicAuthServerContext stubbedApplicationUsers) (server configuration dataLayer) +mkApp :: Configuration -> IO Application +mkApp configuration = do + + let dataLayer :: DataLayer + dataLayer = postgresqlDataLayer + + return $ serveWithContext + fullAPI + (basicAuthServerContext stubbedApplicationUsers) + (server configuration dataLayer) + +--runPoolInsertion poolMetadataJsonPath poolHash +runPoolInsertion :: FilePath -> Text -> IO (Either DBFail TxMetadataId) +runPoolInsertion poolMetadataJsonPath poolHash = do + putTextLn $ "Inserting pool! " <> (toS poolMetadataJsonPath) <> " " <> poolHash + + let dataLayer :: DataLayer + dataLayer = postgresqlDataLayer + + --PoolHash -> ByteString -> IO (Either DBFail PoolHash) + poolMetadataJson <- readFile poolMetadataJsonPath + + (dlAddPoolMetadataSimple dataLayer) (PoolHash poolHash) poolMetadataJson + -- | We need to supply our handlers with the right Context. basicAuthServerContext :: ApplicationUsers -> Context (BasicAuthCheck User ': '[]) basicAuthServerContext applicationUsers = (authCheck applicationUsers) :. EmptyContext @@ -131,7 +170,7 @@ convertIOToHandler = Handler . ExceptT . try server :: Configuration -> DataLayer -> Server API --Server SmashAPI server configuration dataLayer = return todoSwagger - :<|> getPoolOfflineMetadata + :<|> getPoolOfflineMetadata dataLayer :<|> postBlacklistPool postBlacklistPool :: User -> BlacklistPool -> Handler PoolOfflineMetadata @@ -140,10 +179,11 @@ postBlacklistPool user blacklistPool = convertIOToHandler $ do return examplePoolOfflineMetadata -- throwError err404 -getPoolOfflineMetadata :: PoolHash -> Handler PoolOfflineMetadata -getPoolOfflineMetadata poolHash = convertIOToHandler $ do - putTextLn $ show poolHash - return examplePoolOfflineMetadata +getPoolOfflineMetadata :: DataLayer -> PoolHash -> Handler (ApiResult DBFail PoolMetadataWrapped) +getPoolOfflineMetadata dataLayer poolHash = convertIOToHandler $ do + let getPoolMetadataSimple = dlGetPoolMetadataSimple dataLayer + poolMetadata <- getPoolMetadataSimple poolHash + return . ApiResult $ PoolMetadataWrapped <$> poolMetadata -- | Here for checking the validity of the data type. --isValidPoolOfflineMetadata :: PoolOfflineMetadata -> Bool diff --git a/src/Types.hs b/src/Types.hs index af8aecd..8c314e4 100644 --- a/src/Types.hs +++ b/src/Types.hs @@ -1,4 +1,4 @@ -{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DeriveGeneric #-} module Types ( ApplicationUser (..) @@ -9,8 +9,10 @@ module Types , checkIfUserValid -- * Pool info , BlacklistPool - , PoolHash + , PoolHash (..) , createPoolHash + -- * Wrapper + , PoolMetadataWrapped (..) -- * Pool offline metadata , PoolName (..) , PoolDescription (..) @@ -27,14 +29,22 @@ module Types -- * Configuration , Configuration (..) , defaultConfiguration + -- * API + , ApiResult (..) ) where import Cardano.Prelude -import Data.Aeson -import Data.Swagger (ToParamSchema (..), ToSchema (..)) +import Data.Aeson (FromJSON (..), ToJSON (..), object, + withObject, (.:), (.=)) +import Data.Aeson.Encoding (unsafeToEncoding) +import Data.Swagger (NamedSchema (..), ToParamSchema (..), + ToSchema (..)) +import Data.Text.Encoding (encodeUtf8Builder) -import Servant (FromHttpApiData (..)) +import Servant (FromHttpApiData (..)) + +import Cardano.Db.Error -- | The basic @Configuration@. data Configuration = Configuration @@ -153,10 +163,10 @@ instance ToSchema PoolHomepage -- | The bit of the pool data off the chain. data PoolOfflineMetadata = PoolOfflineMetadata - { pomName :: !PoolName - , pomDescription :: !PoolDescription - , pomTicker :: !PoolTicker - , pomHomepage :: !PoolHomepage + { pomName :: !PoolName + , pomDescription :: !PoolDescription + , pomTicker :: !PoolTicker + , pomHomepage :: !PoolHomepage } deriving (Eq, Show, Ord, Generic) -- | Smart constructor, just adding one more layer of indirection. @@ -179,8 +189,8 @@ newtype PoolPledgeAddress = PoolPledgeAddress -- | The bit of the pool data on the chain. -- This doesn't leave the internal database. data PoolOnlineData = PoolOnlineData - { podOwner :: !PoolOwner - , podPledgeAddress :: !PoolPledgeAddress + { podOwner :: !PoolOwner + , podPledgeAddress :: !PoolPledgeAddress } deriving (Eq, Show, Ord, Generic) -- Required instances @@ -210,3 +220,37 @@ instance ToJSON PoolOfflineMetadata where --instance ToParamSchema PoolOfflineMetadata instance ToSchema PoolOfflineMetadata + +newtype PoolMetadataWrapped = PoolMetadataWrapped Text + deriving (Eq, Ord, Show, Generic) + +-- Here we are usingg the unsafe encoding since we already have the JSON format +-- from the database. +instance ToJSON PoolMetadataWrapped where + toJSON (PoolMetadataWrapped metadata) = toJSON metadata + toEncoding (PoolMetadataWrapped metadata) = unsafeToEncoding $ encodeUtf8Builder metadata + +instance ToSchema PoolMetadataWrapped where + declareNamedSchema _ = + return $ NamedSchema (Just "PoolMetadataWrapped") $ mempty + +instance ToSchema DBFail where + declareNamedSchema _ = + return $ NamedSchema (Just "DBFail") $ mempty + +instance ToSchema (ApiResult err a) where + declareNamedSchema _ = + return $ NamedSchema (Just "ApiResult") $ mempty + +-- Result wrapper. +newtype ApiResult err a = ApiResult (Either err a) + +instance (ToJSON err, ToJSON a) => ToJSON (ApiResult err a) where + + toJSON (ApiResult (Left dbFail)) = toJSON dbFail + toJSON (ApiResult (Right result)) = toJSON result + + toEncoding (ApiResult (Left result)) = toEncoding result + toEncoding (ApiResult (Right result)) = toEncoding result + + diff --git a/stack.yaml b/stack.yaml index bf5abe0..122b444 100644 --- a/stack.yaml +++ b/stack.yaml @@ -20,6 +20,8 @@ resolver: https://raw.githubusercontent.com/input-output-hk/cardano-prelude/fe76ec64f6b45259cc407a6d840dad79ee6063b6/snapshot.yaml compiler: ghc-8.6.5 +allow-newer: true + # User packages to be built. # Various formats can be used as shown in the example below. # @@ -41,12 +43,81 @@ packages: # commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a # extra-deps: + + - th-lift-instances-0.1.16 + - th-lift-0.8.1 + - network-3.0.1.1 + - servant-0.16.2 + + - binary-0.8.7.0 + - bimap-0.4.0 + - brick-0.47.1 + - config-ini-0.2.4.0 + - containers-0.5.11.0 + - data-clist-0.1.2.3 + - ekg-prometheus-adapter-0.1.0.4 + - esqueleto-3.2.2 + - generic-monoid-0.1.0.0 + - libsystemd-journal-1.4.4 + - snap-core-1.0.4.1 + - snap-server-1.1.1.1 + - persistent-2.10.5.1 + - persistent-postgresql-2.10.1.2 + - persistent-template-2.8.2.3 + - prometheus-2.1.2 + - pvss-0.2.0 + - tasty-hedgehog-1.0.0.2 + - text-zipper-0.10.1 + - time-units-1.0.0 + - word-wrap-0.4.1 + - transformers-except-0.1.1 + - text-ansi-0.1.0 + - Diff-0.4.0 + - katip-0.8.3.0 + - moo-1.2 + - gray-code-0.3.1 + - Unique-0.4.7.6 + - statistics-linreg-0.3 + - socks-0.6.1 + - connection-0.3.1 + - http-api-data-0.4.1.1 + - time-compat-1.9.2.2 + - quiet-0.2 + - git: https://github.com/input-output-hk/cardano-prelude - commit: fe76ec64f6b45259cc407a6d840dad79ee6063b6 + commit: e0257be9d745a04f85ab8287a48a9c193acafec8 subdirs: - . - test + - git: https://github.com/input-output-hk/iohk-monitoring-framework + commit: 71df15f7b888d2671f619cc632080aaaaca48087 + subdirs: + - contra-tracer + - iohk-monitoring + - plugins/backend-aggregation + - plugins/backend-ekg + - plugins/backend-monitoring + - plugins/backend-trace-forwarder + - plugins/scribe-systemd + - tracer-transformers + + - git: https://github.com/input-output-hk/ouroboros-network + commit: 16bca08140fb37746538edff9fe77220acf91d55 + subdirs: + - Win32-network + + - git: https://github.com/input-output-hk/cardano-crypto + commit: 2547ad1e80aeabca2899951601079408becbc92c + + - git: https://github.com/input-output-hk/cardano-base + commit: 4a457f44e68132ce2bd978ab45a3188e64327abc + subdirs: + - binary + - binary/test + - cardano-crypto-class + - slotting + # Override default flag values for local packages and extra-deps # flags: {} diff --git a/stack.yaml.lock b/stack.yaml.lock index 770abd3..432fa28 100644 --- a/stack.yaml.lock +++ b/stack.yaml.lock @@ -6,9 +6,6 @@ packages: - completed: subdir: cborg - cabal-file: - size: 4786 - sha256: 1f5ea5fa9fdff393211c89a6e32553837955920f8f5a09af50a48db2317f231a name: cborg version: 0.2.2.1 git: https://github.com/well-typed/cborg @@ -55,38 +52,478 @@ packages: sha256: 18e3afbd09480a247cd39355929d638e0888ccd2c3873cb79c5a5bc2231fae4f original: hackage: canonical-json-0.6.0.0 +- completed: + hackage: th-lift-instances-0.1.16@sha256:82562e05dd6f08943972549af31a7748b9ab128f65143abb9f0890b142d63df3,2608 + pantry-tree: + size: 526 + sha256: bda6b1bec896bc0ec616cc53285dba55cf31d15241168e6738605eee7227fe25 + original: + hackage: th-lift-instances-0.1.16 +- completed: + hackage: th-lift-0.8.1@sha256:51ffb3c76b2175405f643d5461a07820fade6c5ac635698a87afe3aba35ae343,2544 + pantry-tree: + size: 462 + sha256: 79821d0901c430670dd1521ae5f0d6354707217cf9f173404862de534e8e9090 + original: + hackage: th-lift-0.8.1 +- completed: + hackage: network-3.0.1.1@sha256:1a251b790ea98b6f7433f677958f921950780ba6f143d61ba8c0e3f7a9879097,4074 + pantry-tree: + size: 3296 + sha256: 78e378780c998faaf4e32a16b98880220f502d8ef005f24d7ee0c99fb50636e6 + original: + hackage: network-3.0.1.1 +- completed: + hackage: servant-0.16.2@sha256:6c5d19d09a9dd2f2b2bf99193f944491f8374802678b79ec729e3f3ae51e216d,5867 + pantry-tree: + size: 2604 + sha256: bee4504840cf4836326e85f1cf02584bf898188b64d976dac6c6c6793d926794 + original: + hackage: servant-0.16.2 +- completed: + hackage: binary-0.8.7.0@sha256:ae3e6cca723ac55c54bbb3fa771bcf18142bc727afd57818e66d6ee6c8044f12,7705 + pantry-tree: + size: 1976 + sha256: 35e44b6d3ccf0d56fc5407dc3f0895e74696a66da189afbd65973c95743f5e25 + original: + hackage: binary-0.8.7.0 +- completed: + hackage: bimap-0.4.0@sha256:c59d587b56b575c299ba0c2fff44e630991a120a167de5a19cd7a81320f63c84,1717 + pantry-tree: + size: 414 + sha256: f88d0c994fdb5fe7780e4a1ed722a303520abebc12151451b9b0791551725d5d + original: + hackage: bimap-0.4.0 +- completed: + hackage: brick-0.47.1@sha256:de548e3c39cc393dafbc1ecfe788746103fe5233d5a69c2cca5da8ed819ba661,13868 + pantry-tree: + size: 3752 + sha256: bea3f31fbc18a56f0a78d3b27438eba5eb4a81d59c5ca120ddee4ca919183018 + original: + hackage: brick-0.47.1 +- completed: + hackage: config-ini-0.2.4.0@sha256:bfa283598b71db63e7ef431bf758bc267f68f2d5855725e987a986824ac03e97,3340 + pantry-tree: + size: 886 + sha256: 0b81e8d96be4ff42e570dc1b06b725c5fc56a2539acb36da6d96b126b44fd8a7 + original: + hackage: config-ini-0.2.4.0 +- completed: + hackage: containers-0.5.11.0@sha256:28ad7337057442f75bc689315ab4ec7bdf5e6b2c39668f306672cecd82c02798,16685 + pantry-tree: + size: 4849 + sha256: faa4e75922a28f7cfe9920c1d7ab3866b792cefcd29bf79f54cfe3b6b5f57cbf + original: + hackage: containers-0.5.11.0 +- completed: + hackage: data-clist-0.1.2.3@sha256:1e26251c8921821c8a1c7e168955449822f1bacf03d056cc59c84fd2863a0f8e,983 + pantry-tree: + size: 219 + sha256: d4b6685ed28130114f99aead9854656ea5a7006e8598ad948c953a20e786e43d + original: + hackage: data-clist-0.1.2.3 +- completed: + hackage: ekg-prometheus-adapter-0.1.0.4@sha256:d95487be7f282976cfb23c5b3826eaa1a279aff1e3821b29cf60ad3164e903ab,1494 + pantry-tree: + size: 300 + sha256: 7879229e738c82a340a9c5584a39771df432ef2e36c62430ee2b4160e2f562cd + original: + hackage: ekg-prometheus-adapter-0.1.0.4 +- completed: + hackage: esqueleto-3.2.2@sha256:11d0727b58a9473d7ecb3a2404b557b17a29e4b6874e803743f68a3a69c895f9,5483 + pantry-tree: + size: 1461 + sha256: 5a3d0287f757460581e18f16808acf054bd69ce1b4b8bb49b53dc6518b685530 + original: + hackage: esqueleto-3.2.2 +- completed: + hackage: generic-monoid-0.1.0.0@sha256:5af8a7d43a97fa9f148ef614f7fe5e8b01c9c487110732015e77834ee07efdc7,856 + pantry-tree: + size: 393 + sha256: d162e8289e15bbccfeb12c9e31ccc833f558dd353646672be2d67f7c0497f51a + original: + hackage: generic-monoid-0.1.0.0 +- completed: + hackage: libsystemd-journal-1.4.4@sha256:ad86652aad0c1755945a15c323ec77afc139d1f66d60dfe2c02c1caf6efd913e,1238 + pantry-tree: + size: 280 + sha256: 3e3ac394e103c996eb606acf2c31bf90ec97a7e758afa0165d4c1afc2cf21a4d + original: + hackage: libsystemd-journal-1.4.4 +- completed: + hackage: snap-core-1.0.4.1@sha256:8f9e5b2e8af35c2d620e284c027ff446b8b4fb3befeb0460f2409924f3d31b55,9669 + pantry-tree: + size: 3376 + sha256: fc9d9bee8a9cc4db7ff3cb7fd62855d4e222d3d926db6a739b9fb74602ea601b + original: + hackage: snap-core-1.0.4.1 +- completed: + hackage: snap-server-1.1.1.1@sha256:c5e1e4d89fbfb1d1f2cf4238a031c586a0461ca7e0ac85713a38f815f218aaf6,15089 + pantry-tree: + size: 3360 + sha256: 6491260abc6613fdfd333cf2a95d7078f055c5ed14a07588b1c049414a5b46aa + original: + hackage: snap-server-1.1.1.1 +- completed: + hackage: persistent-2.10.5.1@sha256:878a69d07888af8889195dca35b66b03d1f6fdb933f0af1107a9760541672d55,4740 + pantry-tree: + size: 2096 + sha256: 58316a510ecb23b99468d4f6aab8af468538f8927720bc7823425d8e441f2a39 + original: + hackage: persistent-2.10.5.1 +- completed: + hackage: persistent-postgresql-2.10.1.2@sha256:e9a16e1dd9be459bb7580fe8deea36ff16d089fddc31246d22c19e8e8fdfefe5,2873 + pantry-tree: + size: 740 + sha256: 3c62919b34f42839edcd711d36775aa98b8105132133038f33171e3d495b1839 + original: + hackage: persistent-postgresql-2.10.1.2 +- completed: + hackage: persistent-template-2.8.2.3@sha256:26f398b06fdc05c6a1a1a75afdc06f28dfd1f33626249db7e28aa3f117d52a0a,2774 + pantry-tree: + size: 561 + sha256: 3dee1f219ab93cab19c5b87e25a0942c5bae0b75b260d71d377fc8c04f0b88d9 + original: + hackage: persistent-template-2.8.2.3 +- completed: + hackage: prometheus-2.1.2@sha256:dbd20f6003bd0b4602433b67ced7696322ec486143efc12b1d452dd312152224,3990 + pantry-tree: + size: 1559 + sha256: 1365a10d00c6c0e14a0c13de2fd281409dbc10416dd7fa74f22bfe4bf7bbfc60 + original: + hackage: prometheus-2.1.2 +- completed: + hackage: pvss-0.2.0@sha256:8a35561a8620e299ec8b85705c2721eb0c5866c68c426ba395691560ee02c3e4,2382 + pantry-tree: + size: 685 + sha256: 32a629c92f858a287a543d530d2180012f4b26eaa0eb3e50a952a63f6a3d9607 + original: + hackage: pvss-0.2.0 +- completed: + hackage: tasty-hedgehog-1.0.0.2@sha256:874e810030bd982d7bc7fd23ae9db1c22938bb14cecf8d869971c632fbb892ec,1845 + pantry-tree: + size: 330 + sha256: 60b7210949aaf5597a524bcf77d65d0bf63646af6c3c995c18a6777d518395d7 + original: + hackage: tasty-hedgehog-1.0.0.2 +- completed: + hackage: text-zipper-0.10.1@sha256:8b73a97a3717a17df9b0a722b178950c476ff2268ca5c583e99d010c94af849e,1471 + pantry-tree: + size: 600 + sha256: ebd5f0e2fc8c59b1bde6706cef1fdb012d52bb26b3953a97a4fa6502f56cdd65 + original: + hackage: text-zipper-0.10.1 +- completed: + hackage: time-units-1.0.0@sha256:27cf54091c4a0ca73d504fc11d5c31ab4041d17404fe3499945e2055697746c1,928 + pantry-tree: + size: 212 + sha256: 9b516d4195fcea22cd1f4335cfe210e88deca397ba7dacc494d5a2feb69e1af8 + original: + hackage: time-units-1.0.0 +- completed: + hackage: word-wrap-0.4.1@sha256:f72233b383ef569c557bfd9812cbb8e306c415ce509082c0bd15ee51c0239ccc,1606 + pantry-tree: + size: 423 + sha256: dcf5071895ee477e60e3c9de1e30eb711e11e9a7335db160616f80baeb20ad71 + original: + hackage: word-wrap-0.4.1 +- completed: + hackage: transformers-except-0.1.1@sha256:6c12ef8e632a10440968cd541e75074bd6ef4b5ff4012677f8f8189d7b2d0df6,1387 + pantry-tree: + size: 322 + sha256: db2c54886fc6de6966bd00d437bb790053d621d8bb357a116040feb80845fd82 + original: + hackage: transformers-except-0.1.1 +- completed: + hackage: text-ansi-0.1.0@sha256:2112c437a4be5337a3e99b63aa05414bac03f1f6e6f5147048f9c9c5777a1d62,1389 + pantry-tree: + size: 317 + sha256: d2aa51b82fed1411d86a35e3a0c4bf19e043464017bdda63d6ff5c97c444d5ed + original: + hackage: text-ansi-0.1.0 +- completed: + hackage: Diff-0.4.0@sha256:b5cfbeed498f555a18774ffd549bbeff7a24bdfe5984154dcfc9f4328a3c2847,1275 + pantry-tree: + size: 415 + sha256: 01b215c454152a0fe10a5378a4013d92e89da2f0695ffffc466ead5f3343cf3a + original: + hackage: Diff-0.4.0 +- completed: + hackage: katip-0.8.3.0@sha256:8a67c0aec3ba1f0eabcfae443cb909e4cf9405e29bac99ccf1420f1f1bbda9c4,4097 + pantry-tree: + size: 1140 + sha256: cad8c67256ec85819309d77bdcbc15b67885940ef76f2b850c8be20c2efd0149 + original: + hackage: katip-0.8.3.0 +- completed: + hackage: moo-1.2@sha256:0c4be1a01548db785dcbbe6b8c98579dbf03c5b3b536e0420dce3ba6a61337cb,5951 + pantry-tree: + size: 2861 + sha256: a32c48fefc42e1f7775c868794a91604522a59a1c0d2b2accff12197329f2d17 + original: + hackage: moo-1.2 +- completed: + hackage: gray-code-0.3.1@sha256:2c8a4ed9c9ee37320305610604d6d93504e0813d7c9768949af418b53043185a,2388 + pantry-tree: + size: 506 + sha256: 7b133d19b93231ea84b286cfe1a2038d67f9b553826845b33fad6526464e1de7 + original: + hackage: gray-code-0.3.1 +- completed: + hackage: Unique-0.4.7.6@sha256:a1ff411f4d68c756e01e8d532fbe8e57f1ac77f2cc0ee8a999770be2bca185c5,2723 + pantry-tree: + size: 1366 + sha256: 587d279ff94e8d6f43da3710634ca3611fa4f6886b1e541a73c69303c00297b9 + original: + hackage: Unique-0.4.7.6 +- completed: + hackage: statistics-linreg-0.3@sha256:95c6efe6c7f6b26bc6e9ada90ab2d18216371cf59a6ef2b517b4a6fd35d9a76f,2544 + pantry-tree: + size: 233 + sha256: 8d1978a6497e4fd9c66b0c5f24ea659aa9714f5b7e8b1dcfce7fb7bb8d4dee0e + original: + hackage: statistics-linreg-0.3 +- completed: + hackage: socks-0.6.1@sha256:ac190808eea704672df18f702e8f2ad0b7a4d0af528e95ee55ea6ee0be672e2a,1258 + pantry-tree: + size: 692 + sha256: 53d29c06c42e737ae99660e077c7c4b286f1272b3394ad7f74163867a5b3b8fa + original: + hackage: socks-0.6.1 +- completed: + hackage: connection-0.3.1@sha256:65da1c055610095733bcd228d85dff80804b23a5d18fede994a0f9fcd1b0c121,1554 + pantry-tree: + size: 386 + sha256: 13a2c21049e69068e4c3d725533d94729772f2d3e05f5ef611bef28363b08798 + original: + hackage: connection-0.3.1 +- completed: + hackage: http-api-data-0.4.1.1@sha256:998b3a5e4d2707dff19e2a877c2f9859ac5fdf491a94d31024d20212c2c250b7,3704 + pantry-tree: + size: 887 + sha256: 4e76b5bc6a9e45679e25665be7dd1ed8161840d4d781e3e125557e6f2b9d1fd7 + original: + hackage: http-api-data-0.4.1.1 +- completed: + hackage: time-compat-1.9.2.2@sha256:ccf268e6ec91a6d9a79392697634c670c095a34a60d1ccfa1be1c84f20bb24c5,4254 + pantry-tree: + size: 3602 + sha256: f16cc56a43fa6047ad46b23770d5a16b9c400fd8d019e9b010b766a13e8eb588 + original: + hackage: time-compat-1.9.2.2 +- completed: + hackage: quiet-0.2@sha256:60bb3cb8dd4a225557351b6d622147a4e541f1564847e05a81621365a155ad6c,1413 + pantry-tree: + size: 416 + sha256: c64a9ec9827f140768c01f1d9df1ed9ec9de2d70682a3822061bbe52cf85006c + original: + hackage: quiet-0.2 - completed: subdir: . - cabal-file: - size: 4901 - sha256: 03d3491158c85dd325540acb0b7da7565d91cbbff93b7906842bbaf01f127293 name: cardano-prelude version: 0.1.0.0 git: https://github.com/input-output-hk/cardano-prelude pantry-tree: - size: 4603 - sha256: 698b2e7075911dcb2b77955930cf3c4d34ef12cd10816d869f81e4448ff8b54f - commit: fe76ec64f6b45259cc407a6d840dad79ee6063b6 + size: 4461 + sha256: a4691e5d2f63bd812194a118d0407aa59a130e77a077abec26787d2036394c1f + commit: e0257be9d745a04f85ab8287a48a9c193acafec8 original: subdir: . git: https://github.com/input-output-hk/cardano-prelude - commit: fe76ec64f6b45259cc407a6d840dad79ee6063b6 + commit: e0257be9d745a04f85ab8287a48a9c193acafec8 - completed: subdir: test - cabal-file: - size: 2028 - sha256: 9edbf9c6d91b2271cfa43a5bc00511ac5d4912307423c5be0b372cc01e4680ea name: cardano-prelude-test version: 0.1.0.0 git: https://github.com/input-output-hk/cardano-prelude pantry-tree: - size: 1222 - sha256: 062b469b6751a510f441977a41bae03abefe9ae3022ba4510fcf1245cd73261f - commit: fe76ec64f6b45259cc407a6d840dad79ee6063b6 + size: 1153 + sha256: bd6687f172eb8150e5f0142a49349180f9ac611ade8cbb4bf6b778b9ffc5266f + commit: e0257be9d745a04f85ab8287a48a9c193acafec8 original: subdir: test git: https://github.com/input-output-hk/cardano-prelude - commit: fe76ec64f6b45259cc407a6d840dad79ee6063b6 + commit: e0257be9d745a04f85ab8287a48a9c193acafec8 +- completed: + subdir: contra-tracer + name: contra-tracer + version: 0.1.0.0 + git: https://github.com/input-output-hk/iohk-monitoring-framework + pantry-tree: + size: 338 + sha256: 7902fbc68aba33d6f139d0815a6e71e704a62684bf2bddaca9959e569301f25d + commit: 71df15f7b888d2671f619cc632080aaaaca48087 + original: + subdir: contra-tracer + git: https://github.com/input-output-hk/iohk-monitoring-framework + commit: 71df15f7b888d2671f619cc632080aaaaca48087 +- completed: + subdir: iohk-monitoring + name: iohk-monitoring + version: 0.1.10.1 + git: https://github.com/input-output-hk/iohk-monitoring-framework + pantry-tree: + size: 4798 + sha256: 13490d359f66151f45d2defb294ec70ebcde768ed2f05cb14da2dc796552970b + commit: 71df15f7b888d2671f619cc632080aaaaca48087 + original: + subdir: iohk-monitoring + git: https://github.com/input-output-hk/iohk-monitoring-framework + commit: 71df15f7b888d2671f619cc632080aaaaca48087 +- completed: + subdir: plugins/backend-aggregation + name: lobemo-backend-aggregation + version: 0.1.0.0 + git: https://github.com/input-output-hk/iohk-monitoring-framework + pantry-tree: + size: 343 + sha256: fe1d61fe24e95753a3824d453143ef4e7a51ccba2331dd1160a58c8b2abedc95 + commit: 71df15f7b888d2671f619cc632080aaaaca48087 + original: + subdir: plugins/backend-aggregation + git: https://github.com/input-output-hk/iohk-monitoring-framework + commit: 71df15f7b888d2671f619cc632080aaaaca48087 +- completed: + subdir: plugins/backend-ekg + name: lobemo-backend-ekg + version: 0.1.0.1 + git: https://github.com/input-output-hk/iohk-monitoring-framework + pantry-tree: + size: 409 + sha256: 7f0145c55891efb584effa03427a07221a0bf28a5d37b5ffa1feae5db53c9b45 + commit: 71df15f7b888d2671f619cc632080aaaaca48087 + original: + subdir: plugins/backend-ekg + git: https://github.com/input-output-hk/iohk-monitoring-framework + commit: 71df15f7b888d2671f619cc632080aaaaca48087 +- completed: + subdir: plugins/backend-monitoring + name: lobemo-backend-monitoring + version: 0.1.0.0 + git: https://github.com/input-output-hk/iohk-monitoring-framework + pantry-tree: + size: 471 + sha256: 37c92978d1dbcc78c17e25ff9cb95dbc0770a5165fdca3184249ed05a8335d1e + commit: 71df15f7b888d2671f619cc632080aaaaca48087 + original: + subdir: plugins/backend-monitoring + git: https://github.com/input-output-hk/iohk-monitoring-framework + commit: 71df15f7b888d2671f619cc632080aaaaca48087 +- completed: + subdir: plugins/backend-trace-forwarder + name: lobemo-backend-trace-forwarder + version: 0.1.0.0 + git: https://github.com/input-output-hk/iohk-monitoring-framework + pantry-tree: + size: 350 + sha256: 36bf86eff7748183bca5272d966b4f1d65d42f78328313968b30c0f0c890b616 + commit: 71df15f7b888d2671f619cc632080aaaaca48087 + original: + subdir: plugins/backend-trace-forwarder + git: https://github.com/input-output-hk/iohk-monitoring-framework + commit: 71df15f7b888d2671f619cc632080aaaaca48087 +- completed: + subdir: plugins/scribe-systemd + name: lobemo-scribe-systemd + version: 0.1.0.0 + git: https://github.com/input-output-hk/iohk-monitoring-framework + pantry-tree: + size: 332 + sha256: f0481607ca5a9ea854202ba5a3bc91b85b14617e6d3ae471ae05bfad7cf1aaa1 + commit: 71df15f7b888d2671f619cc632080aaaaca48087 + original: + subdir: plugins/scribe-systemd + git: https://github.com/input-output-hk/iohk-monitoring-framework + commit: 71df15f7b888d2671f619cc632080aaaaca48087 +- completed: + subdir: tracer-transformers + name: tracer-transformers + version: 0.1.0.1 + git: https://github.com/input-output-hk/iohk-monitoring-framework + pantry-tree: + size: 631 + sha256: ef26fb18daa504590d1b60a320d2dbadac77c68be750aa873eb33bfbae2a4020 + commit: 71df15f7b888d2671f619cc632080aaaaca48087 + original: + subdir: tracer-transformers + git: https://github.com/input-output-hk/iohk-monitoring-framework + commit: 71df15f7b888d2671f619cc632080aaaaca48087 +- completed: + subdir: Win32-network + name: Win32-network + version: 0.1.0.0 + git: https://github.com/input-output-hk/ouroboros-network + pantry-tree: + size: 1802 + sha256: 36c14669f770cd548a7ebd335930e5387132b76b912c78f4d728d5cbf7416091 + commit: 16bca08140fb37746538edff9fe77220acf91d55 + original: + subdir: Win32-network + git: https://github.com/input-output-hk/ouroboros-network + commit: 16bca08140fb37746538edff9fe77220acf91d55 +- completed: + name: cardano-crypto + version: 1.1.0 + git: https://github.com/input-output-hk/cardano-crypto + pantry-tree: + size: 5282 + sha256: dd0cccf3ea66557d90599f596467ca0c4a758b644393da913629572cb83c5613 + commit: 2547ad1e80aeabca2899951601079408becbc92c + original: + git: https://github.com/input-output-hk/cardano-crypto + commit: 2547ad1e80aeabca2899951601079408becbc92c +- completed: + subdir: binary + name: cardano-binary + version: 1.5.0 + git: https://github.com/input-output-hk/cardano-base + pantry-tree: + size: 1884 + sha256: b91847fafaf9798a563b6834ab9469fc2d573059662beee4ea8c07c17e485d94 + commit: 4a457f44e68132ce2bd978ab45a3188e64327abc + original: + subdir: binary + git: https://github.com/input-output-hk/cardano-base + commit: 4a457f44e68132ce2bd978ab45a3188e64327abc +- completed: + subdir: binary/test + name: cardano-binary-test + version: 1.3.0 + git: https://github.com/input-output-hk/cardano-base + pantry-tree: + size: 1007 + sha256: e86851cebba16ed9ab7cb76d4531a391ef37f19eb0fc24efdc7b3a7f77efef56 + commit: 4a457f44e68132ce2bd978ab45a3188e64327abc + original: + subdir: binary/test + git: https://github.com/input-output-hk/cardano-base + commit: 4a457f44e68132ce2bd978ab45a3188e64327abc +- completed: + subdir: cardano-crypto-class + name: cardano-crypto-class + version: 2.0.0 + git: https://github.com/input-output-hk/cardano-base + pantry-tree: + size: 2647 + sha256: 984376cef71f5ee813702a15e0187a1b609c3d593b9ec0137fad1a5988cbbf58 + commit: 4a457f44e68132ce2bd978ab45a3188e64327abc + original: + subdir: cardano-crypto-class + git: https://github.com/input-output-hk/cardano-base + commit: 4a457f44e68132ce2bd978ab45a3188e64327abc +- completed: + subdir: slotting + name: cardano-slotting + version: 0.1.0.0 + git: https://github.com/input-output-hk/cardano-base + pantry-tree: + size: 648 + sha256: f76432c3ccfb6dc635343e7a505239394df9339a10147377d6ada3d66c1537d9 + commit: 4a457f44e68132ce2bd978ab45a3188e64327abc + original: + subdir: slotting + git: https://github.com/input-output-hk/cardano-base + commit: 4a457f44e68132ce2bd978ab45a3188e64327abc snapshots: - completed: size: 418 diff --git a/test_pool.json b/test_pool.json new file mode 100644 index 0000000..f3bb350 --- /dev/null +++ b/test_pool.json @@ -0,0 +1 @@ +{"name": "test", "description": "This is a test pool", "ticker": "testy", "homepage": "https://github.com/input-output-hk/cardano-db-sync/tree/master/cardano-db/src/Cardano/Db"}