Skip to content

Commit

Permalink
Merge pull request #92 from input-output-hk/hkm/download-fix
Browse files Browse the repository at this point in the history
Avoid multiple downloads in stack-to-nix
  • Loading branch information
rvl committed Dec 3, 2020
2 parents 595ca11 + 4ba8f4d commit 9a47eb9
Show file tree
Hide file tree
Showing 10 changed files with 246 additions and 68 deletions.
2 changes: 1 addition & 1 deletion .buildkite/build.sh
Expand Up @@ -17,7 +17,7 @@ cabal new-configure

echo
echo "+++ Run stable version of plan-to-nix"
nix build '(let haskellNix = import (builtins.fetchTarball "https://github.com/input-output-hk/haskell.nix/archive/master.tar.gz") {}; in (import haskellNix.sources.nixpkgs-default haskellNix.nixpkgsArgs).haskell-nix.nix-tools)' -o nt
nix build '(let haskellNix = import (builtins.fetchTarball "https://github.com/input-output-hk/haskell.nix/archive/master.tar.gz") {}; in (import haskellNix.sources.nixpkgs haskellNix.nixpkgsArgs).haskell-nix.nix-tools.ghc883)' -o nt
./nt/bin/plan-to-nix --output .buildkite/nix1 --plan-json dist-newstyle/cache/plan.json

# Replace currently broken plan-to-nix output
Expand Down
2 changes: 1 addition & 1 deletion .buildkite/fixed.nix
@@ -1,5 +1,5 @@
{ haskellNix ? import (builtins.fetchTarball https://github.com/input-output-hk/haskell.nix/archive/master.tar.gz) {}
, nixpkgs ? haskellNix.sources.nixpkgs-default }:
, nixpkgs ? haskellNix.sources.nixpkgs }:

let
pkgs = import nixpkgs haskellNix.nixpkgsArgs;
Expand Down
2 changes: 1 addition & 1 deletion cabal2nix/Main.hs
Expand Up @@ -44,7 +44,7 @@ main = getArgs >>= \case
[url,hash] | "http" `isPrefixOf` url ->
let subdir = "." in
fetch (\dir -> cabalFromPath url hash subdir $ dir </> subdir)
(Source url mempty UnknownHash subdir) >>= \case
(Source url mempty UnknownHash) >>= \case
(Just (DerivationSource{..}, genBindings)) -> genBindings derivHash
_ -> return ()
[path,file] -> doesDirectoryExist file >>= \case
Expand Down
32 changes: 9 additions & 23 deletions default.nix
@@ -1,35 +1,21 @@
{ haskellNixSrc ? builtins.fetchTarball {
url = "https://github.com/input-output-hk/haskell.nix/archive/61b1c8a06c74a83c0d2dc7d937d8daa6b32b2a2f.tar.gz";
sha256 = "1vi8is7h85sb8acymjcnkjm39fp5pal2wq9p7zdv5cmillzs2sza";
}
, nixpkgs ? (import haskellNixSrc {}).sources.nixpkgs-default
, pkgs ? import nixpkgs (import haskellNixSrc {}).nixpkgsArgs
, haskellCompiler ? "ghc883"
{ sourcesOverride ? {}
, sources ? (import ./nix/sources.nix {}) // sourcesOverride
, pkgs ? (import sources."haskell.nix" {}).pkgs
, compiler-nix-name ? "ghc883"
}:
let
project = pkgs.haskell-nix.cabalProject {
inherit compiler-nix-name;
src = pkgs.haskell-nix.haskellLib.cleanGit { src = ./.; name = "nix-tools"; };
ghc = pkgs.haskell-nix.compiler.${haskellCompiler};
modules = [{
nonReinstallablePkgs= [ "rts" "ghc-heap" "ghc-prim" "integer-gmp" "integer-simple" "base"
"deepseq" "array" "ghc-boot-th" "pretty" "template-haskell"
# ghcjs custom packages
"ghcjs-prim" "ghcjs-th"
"ghc-boot"
"ghc" "Win32" "array" "binary" "bytestring" "containers"
"directory" "filepath" "ghc-boot" "ghc-compact" "ghc-prim"
# "ghci" "haskeline"
"hpc"
"mtl" "parsec" "process" "text" "time" "transformers"
"unix" "xhtml"
# "stm" "terminfo"
];
}];
modules = [{ reinstallableLibGhc = true; }];
};
in
project // {
shell = project.shellFor {
tools = { cabal = "3.2.0.0"; };
buildInputs = [
pkgs.nix-prefetch-git
];
};
}

5 changes: 2 additions & 3 deletions lib/Distribution/Nixpkgs/Fetch.hs
Expand Up @@ -31,7 +31,6 @@ data Source = Source
, sourceRevision :: String -- ^ Revision to use. For protocols where this doesn't make sense (such as HTTP), this
-- should be the empty string.
, sourceHash :: Hash -- ^ The expected hash of the source, if available.
, sourceCabalDir :: String -- ^ Directory where Cabal file is found.
} deriving (Show, Eq, Ord, Generic)

instance NFData Source
Expand Down Expand Up @@ -69,7 +68,7 @@ instance FromJSON DerivationSource where
parseJSON _ = error "invalid DerivationSource"

fromDerivationSource :: DerivationSource -> Source
fromDerivationSource DerivationSource{..} = Source derivUrl derivRevision (Certain derivHash) "."
fromDerivationSource DerivationSource{..} = Source derivUrl derivRevision (Certain derivHash)

-- | Fetch a source, trying any of the various nix-prefetch-* scripts.
fetch :: forall a. (String -> MaybeT IO a) -- ^ This function is passed the output path name as an argument.
Expand Down Expand Up @@ -105,7 +104,7 @@ fetch f = runMaybeT . fetchers where
localArchive :: FilePath -> MaybeT IO (DerivationSource, a)
localArchive path = do
absolutePath <- liftIO $ canonicalizePath path
unpacked <- snd <$> fetchWith (False, "url", ["--unpack"]) (Source ("file://" ++ absolutePath) "" UnknownHash ".")
unpacked <- snd <$> fetchWith (False, "url", ["--unpack"]) (Source ("file://" ++ absolutePath) "" UnknownHash)
process (DerivationSource "" absolutePath "" "", unpacked)

process :: (DerivationSource, FilePath) -> MaybeT IO (DerivationSource, a)
Expand Down
91 changes: 56 additions & 35 deletions lib/Stack2nix.hs
Expand Up @@ -11,12 +11,12 @@ import Data.String (fromString)

import Control.Monad.Trans.Maybe
import Control.Monad.IO.Class (liftIO)
import Control.Monad (unless, forM)
import Control.Monad (unless, forM, forM_)
import Extra (unlessM)

import qualified Data.Map as M (fromListWith, toList)
import System.FilePath ((<.>), (</>), takeDirectory, dropFileName)
import System.Directory (createDirectoryIfMissing, doesDirectoryExist, doesFileExist, getCurrentDirectory)
import System.Directory (createDirectoryIfMissing, doesDirectoryExist, doesFileExist)
import System.IO (IOMode(..), openFile, hClose)
import Data.Yaml (decodeFileEither)

Expand Down Expand Up @@ -62,7 +62,7 @@ stackexpr args =
=<< resolveSnapshot (argStackYaml args) value

stack2nix :: Args -> Stack -> IO NExpr
stack2nix args stack@(Stack resolver compiler pkgs pkgFlags ghcOpts) =
stack2nix args (Stack resolver compiler pkgs pkgFlags ghcOpts) =
do let extraDeps = extraDeps2nix pkgs
flags = flags2nix pkgFlags
ghcOptions = ghcOptions2nix ghcOpts
Expand Down Expand Up @@ -147,7 +147,6 @@ writeDoc file doc =
-- makeRelativeToCurrentDirectory
packages2nix :: Args -> [Dependency] -> IO [(T.Text, Binding NExpr)]
packages2nix args pkgs =
do cwd <- getCurrentDirectory
fmap concat . forM pkgs $ \case
(LocalPath folder) ->
do cabalFiles <- findCabalFiles (argHpackUse args) (dropFileName (argStackYaml args) </> folder)
Expand All @@ -161,42 +160,64 @@ packages2nix args pkgs =
prettyNix <$> cabal2nix True (argDetailLevel args) src cabalFile
return (fromString pkg, fromString pkg $= mkPath False nix)
(DVCS (Git url rev) _ subdirs) ->
fmap concat . forM subdirs $ \subdir ->
do cacheHits <- liftIO $ cacheHits (argCacheFile args) url rev subdir
case cacheHits of
[] -> do
fetch (\dir -> cabalFromPath url rev subdir $ dir </> subdir)
(Source url rev UnknownHash subdir) >>= \case
(Just (DerivationSource{..}, genBindings)) -> genBindings derivHash
_ -> return []
hits ->
forM hits $ \( pkg, nix ) -> do
return (fromString pkg, fromString pkg $= mkPath False nix)
do hits <- forM subdirs $ \subdir -> liftIO $ cacheHits (argCacheFile args) url rev subdir
let generateBindings =
fetch (cabalFromPath url rev subdirs)
(Source url rev UnknownHash) >>= \case
(Just (DerivationSource{..}, genBindings)) -> genBindings derivHash
_ -> return []
if any null hits
then
-- If any of the subdirs were missing we need to fetch the files and
-- generate the bindings.
generateBindings
else do
let allHits = concat hits
(and <$> forM allHits (\( _, nix ) -> doesFileExist (argOutputDir args </> nix))) >>= \case
False ->
-- One or more of the generated binding files are missing
generateBindings
True ->
-- If the subdirs are all in the cache then the bindings should already be
-- generated too.
forM allHits $ \( pkg, nix ) ->
return (fromString pkg, fromString pkg $= mkPath False nix)
_ -> return []
where relPath = shortRelativePath (argOutputDir args) (dropFileName (argStackYaml args))
cabalFromPath
:: String -- URL
-> String -- Revision
-> FilePath -- Subdir
-> FilePath -- Local Directory
:: String -- URL
-> String -- Revision
-> [FilePath] -- Subdirs
-> FilePath -- Local Directory
-> MaybeT IO (String -> IO [(T.Text, Binding NExpr)])
cabalFromPath url rev subdir path = do
d <- liftIO $ doesDirectoryExist path
unless d $ fail ("not a directory: " ++ path)
cabalFiles <- liftIO $ findCabalFiles (argHpackUse args) path
return $ \sha256 ->
cabalFromPath url rev subdirs dir = do
-- Check that all the subdirs exist if not this
-- fail the MaybeT so that the next fetcher will be tried
forM_ subdirs $ \subdir -> do
let path = dir </> subdir
d <- liftIO $ doesDirectoryExist path
unless d $ fail ("not a directory: " ++ path)
-- If we got this far we are confident we have downloaded
-- with the right fetcher. Return an action that will
-- be used to generate the bindings.
return $ \sha256 -> fmap concat . forM subdirs $ \subdir -> do
let path = dir </> subdir
cabalFiles <- liftIO $ findCabalFiles (argHpackUse args) path
forM cabalFiles $ \cabalFile -> do
let pkg = cabalFilePkgName cabalFile
nix = pkg <.> "nix"
nixFile = argOutputDir args </> nix
subdir' = if subdir == "." then Nothing
else Just subdir
src = Just $ C2N.Git url rev (Just sha256) subdir'
createDirectoryIfMissing True (takeDirectory nixFile)
writeDoc nixFile =<<
prettyNix <$> cabal2nix True (argDetailLevel args) src cabalFile
liftIO $ appendCache (argCacheFile args) url rev subdir sha256 pkg nix
return (fromString pkg, fromString pkg $= mkPath False nix)
let pkg = cabalFilePkgName cabalFile
nix = pkg <.> "nix"
nixFile = argOutputDir args </> nix
subdir' = if subdir == "." then Nothing
else Just subdir
src = Just $ C2N.Git url rev (Just sha256) subdir'
createDirectoryIfMissing True (takeDirectory nixFile)
writeDoc nixFile =<<
prettyNix <$> cabal2nix True (argDetailLevel args) src cabalFile
-- Only update the cache if there is not already a record
cacheHits (argCacheFile args) url rev subdir >>= \case
[hit] | hit == (pkg, nix) -> return ()
_ -> appendCache (argCacheFile args) url rev subdir sha256 pkg nix
return (fromString pkg, fromString pkg $= mkPath False nix)

defaultNixContents :: String
defaultNixContents = unlines
Expand Down
38 changes: 38 additions & 0 deletions nix/sources.json
@@ -0,0 +1,38 @@
{
"haskell.nix": {
"branch": "master",
"description": "Alternative Haskell Infrastructure for Nixpkgs",
"homepage": "https://input-output-hk.github.io/haskell.nix",
"owner": "input-output-hk",
"repo": "haskell.nix",
"rev": "aa85608fe978276c0a9dcde6c31e61bf1829c2f9",
"sha256": "1yhds5s73myvb1aw6df5b2ka62kz6hzkfqdx3wgq68j5159xr0j5",
"type": "tarball",
"url": "https://github.com/input-output-hk/haskell.nix/archive/aa85608fe978276c0a9dcde6c31e61bf1829c2f9.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"niv": {
"branch": "master",
"description": "Easy dependency management for Nix projects",
"homepage": "https://github.com/nmattia/niv",
"owner": "nmattia",
"repo": "niv",
"rev": "f73bf8d584148677b01859677a63191c31911eae",
"sha256": "0jlmrx633jvqrqlyhlzpvdrnim128gc81q5psz2lpp2af8p8q9qs",
"type": "tarball",
"url": "https://github.com/nmattia/niv/archive/f73bf8d584148677b01859677a63191c31911eae.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"nixpkgs": {
"branch": "nixos-19.09",
"description": "A read-only mirror of NixOS/nixpkgs tracking the released channels. Send issues and PRs to",
"homepage": "https://github.com/NixOS/nixpkgs",
"owner": "NixOS",
"repo": "nixpkgs-channels",
"rev": "289466dd6a11c65a7de4a954d6ebf66c1ad07652",
"sha256": "0r5ja052s86fr54fm1zlhld3fwawz2w1d1gd6vbvpjrpjfyajibn",
"type": "tarball",
"url": "https://github.com/NixOS/nixpkgs-channels/archive/289466dd6a11c65a7de4a954d6ebf66c1ad07652.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
}
}
134 changes: 134 additions & 0 deletions nix/sources.nix
@@ -0,0 +1,134 @@
# This file has been generated by Niv.

let

#
# The fetchers. fetch_<type> fetches specs of type <type>.
#

fetch_file = pkgs: spec:
if spec.builtin or true then
builtins_fetchurl { inherit (spec) url sha256; }
else
pkgs.fetchurl { inherit (spec) url sha256; };

fetch_tarball = pkgs: spec:
if spec.builtin or true then
builtins_fetchTarball { inherit (spec) url sha256; }
else
pkgs.fetchzip { inherit (spec) url sha256; };

fetch_git = spec:
builtins.fetchGit { url = spec.repo; inherit (spec) rev ref; };

fetch_builtin-tarball = spec:
builtins.trace
''
WARNING:
The niv type "builtin-tarball" will soon be deprecated. You should
instead use `builtin = true`.
$ niv modify <package> -a type=tarball -a builtin=true
''
builtins_fetchTarball { inherit (spec) url sha256; };

fetch_builtin-url = spec:
builtins.trace
''
WARNING:
The niv type "builtin-url" will soon be deprecated. You should
instead use `builtin = true`.
$ niv modify <package> -a type=file -a builtin=true
''
(builtins_fetchurl { inherit (spec) url sha256; });

#
# Various helpers
#

# The set of packages used when specs are fetched using non-builtins.
mkPkgs = sources:
let
sourcesNixpkgs =
import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) {};
hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath;
hasThisAsNixpkgsPath = <nixpkgs> == ./.;
in
if builtins.hasAttr "nixpkgs" sources
then sourcesNixpkgs
else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then
import <nixpkgs> {}
else
abort
''
Please specify either <nixpkgs> (through -I or NIX_PATH=nixpkgs=...) or
add a package called "nixpkgs" to your sources.json.
'';

# The actual fetching function.
fetch = pkgs: name: spec:

if ! builtins.hasAttr "type" spec then
abort "ERROR: niv spec ${name} does not have a 'type' attribute"
else if spec.type == "file" then fetch_file pkgs spec
else if spec.type == "tarball" then fetch_tarball pkgs spec
else if spec.type == "git" then fetch_git spec
else if spec.type == "builtin-tarball" then fetch_builtin-tarball spec
else if spec.type == "builtin-url" then fetch_builtin-url spec
else
abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}";

# Ports of functions for older nix versions

# a Nix version of mapAttrs if the built-in doesn't exist
mapAttrs = builtins.mapAttrs or (
f: set: with builtins;
listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set))
);

# fetchTarball version that is compatible between all the versions of Nix
builtins_fetchTarball = { url, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchTarball;
in
if lessThan nixVersion "1.12" then
fetchTarball { inherit url; }
else
fetchTarball attrs;

# fetchurl version that is compatible between all the versions of Nix
builtins_fetchurl = { url, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchurl;
in
if lessThan nixVersion "1.12" then
fetchurl { inherit url; }
else
fetchurl attrs;

# Create the final "sources" from the config
mkSources = config:
mapAttrs (
name: spec:
if builtins.hasAttr "outPath" spec
then abort
"The values in sources.json should not have an 'outPath' attribute"
else
spec // { outPath = fetch config.pkgs name spec; }
) config.sources;

# The "config" used by the fetchers
mkConfig =
{ sourcesFile ? ./sources.json
, sources ? builtins.fromJSON (builtins.readFile sourcesFile)
, pkgs ? mkPkgs sources
}: rec {
# The sources, i.e. the attribute set of spec name to spec
inherit sources;

# The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers
inherit pkgs;
};
in
mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); }

0 comments on commit 9a47eb9

Please sign in to comment.