diff --git a/contrib/bash-completion/bob b/contrib/bash-completion/bob
index 11222065..1a6de593 100644
--- a/contrib/bash-completion/bob
+++ b/contrib/bash-completion/bob
@@ -29,6 +29,17 @@ __bob_complete_dir()
compgen -d -P "$2" -S / -- "$1" ) )
}
+# Complete file
+__bob_complete_file()
+{
+ local IFS=$'\n'
+ COMPREPLY=( $(for i in "${chroot[@]}" ; do eval ls "$i" || exit ; done
+ compgen -f -P "$2" -- "$1" ) )
+ for ((i=0; i < ${#COMPREPLY[@]}; i++)); do
+ [ -d "${COMPREPLY[$i]}" ] && COMPREPLY[$i]=${COMPREPLY[$i]}/
+ done
+}
+
__bob_commands="build dev clean graph help init jenkins ls project status \
query-scm query-recipe query-path query-meta show layers \
ls-recipes"
@@ -121,14 +132,18 @@ __bob_cook()
{
if [[ "$prev" = "--destination" ]] ; then
__bob_complete_dir "$cur"
+ elif [[ "$prev" == "--bundle" || "$prev" == "--unbundle" ]]; then
+ __bob_complete_file "$cur"
elif [[ "$prev" = "--download" ]] ; then
__bob_complete_words "yes no deps forced forced-deps forced-fallback"
elif [[ "$prev" = "--download-layer" ]] ; then
__bob_complete_words "yes= no= forced="
elif [[ "$prev" = "--always-checkout" ]] ; then
COMPREPLY=( )
+ elif [[ "$prev" = "--bundle-indeterministic" ]] ; then
+ __bob_complete_words "yes no fail"
else
- __bob_complete_path "--destination -j --jobs -k --keep-going -f --force -n --no-deps -p --with-provided --without-provided -A --no-audit --audit -b --build-only -B --checkout-only --normal --clean --incremental --always-checkout --resume -q --quiet -v --verbose --no-logfiles -D -c -e -E -M --upload --link-deps --no-link-deps --download --download-layer --shared --no-shared --install --no-install --sandbox --no-sandbox --slim-sandbox --dev-sandbox --strict-sandbox --clean-checkout --attic --no-attic"
+ __bob_complete_path "--destination -j --jobs -k --keep-going -f --force -n --no-deps -p --with-provided --without-provided -A --no-audit --audit -b --build-only -B --checkout-only --normal --clean --incremental --always-checkout --resume -q --quiet -v --verbose --no-logfiles -D -c -e -E -M --upload --link-deps --no-link-deps --download --download-layer --shared --no-shared --install --no-install --sandbox --no-sandbox --slim-sandbox --dev-sandbox --strict-sandbox --clean-checkout --attic --no-attic --bundle --bundle-exclude --bundle-indeterministic --bundle-vcs --unbundle"
fi
}
diff --git a/doc/manpages/bob-build-dev.rst b/doc/manpages/bob-build-dev.rst
index a57a6c6f..1a1d61dc 100644
--- a/doc/manpages/bob-build-dev.rst
+++ b/doc/manpages/bob-build-dev.rst
@@ -96,6 +96,23 @@ Options
This is the default unless the user changed it in ``default.yaml``.
+``--bundle BUNDLE``
+ Bundle all the sources needed to build the package. The output of this is
+ a zip-file containing all sources required to build the package. This also
+ enables `--always-checkout` and `--clean-checkout` and can not be used
+ along with `--build-only`.
+
+``--bundle-exclude BUNDLE_EXCLUDE``
+ Do not add packages matching a given regular expression (regex) to the
+ bundle. Can be specified multiple times.
+
+``--bundle-vcs``
+ Add files used by version control systems to the bundle.
+
+``--unbundle BUNDLE``
+ Try to download sources from BUNDLE first. The BUNDLE should be the zip
+ file created with the ``--bundle`` option.
+
``--clean``
Do clean builds by clearing the build directory before executing the build
commands. It will *not* clean all build results (e.g. like ``make clean``)
@@ -363,6 +380,9 @@ Options
``-q, --quiet``
Decrease verbosity (may be specified multiple times)
+``--unbundle``
+ Use bundle specified by ``--bundle`` as source input.
+
``-v, --verbose``
Increase verbosity (may be specified multiple times)
diff --git a/doc/manpages/bob-build.rst b/doc/manpages/bob-build.rst
index aa88335a..10c386c7 100644
--- a/doc/manpages/bob-build.rst
+++ b/doc/manpages/bob-build.rst
@@ -26,6 +26,8 @@ Synopsis
[--install | --no-install]
[--sandbox | --slim-sandbox | --dev-sandbox | --strict-sandbox | --no-sandbox]
[--clean-checkout] [--attic | --no-attic]
+ [--bundle BUNDLE | --unbundle BUNDLE]
+ [--bundle-exclude BUNDLE_EXCLUDE] [--bundle-vcs]
PACKAGE [PACKAGE ...]
Description
diff --git a/doc/manpages/bob-dev.rst b/doc/manpages/bob-dev.rst
index 23fadc4c..1b7dd3c5 100644
--- a/doc/manpages/bob-dev.rst
+++ b/doc/manpages/bob-dev.rst
@@ -26,6 +26,8 @@ Synopsis
[--install | --no-install]
[--sandbox | --slim-sandbox | --dev-sandbox | --strict-sandbox | --no-sandbox]
[--clean-checkout] [--attic | --no-attic]
+ [--bundle BUNDLE | --unbundle BUNDLE]
+ [--bundle-exclude BUNDLE_EXCLUDE] [--bundle-vcs]
PACKAGE [PACKAGE ...]
Description
diff --git a/doc/tutorial/compile.rst b/doc/tutorial/compile.rst
index c101286c..cbb2960d 100644
--- a/doc/tutorial/compile.rst
+++ b/doc/tutorial/compile.rst
@@ -350,3 +350,30 @@ the zlib packages: ::
.. raw:: html
+
+Using source bundles
+====================
+
+A source code bundle is a zip file containing all the sources required to build a
+package. Such a bundle can be used to compile on a air gapped system, to
+archive the build input, to transfer the sources to a reviewer, ...
+
+To create a bundle for a given package you need to build this package using
+:ref:`manpage-dev` or :ref:`manpage-build` with the ``--bundle`` option. This
+option takes one argument specifying the name of the bundle file. Several other
+`bundle` arguments are available to control what goes into the bundle. Refer to
+the manpages for a detailed description of those.
+
+For example to bundle the sources needed to build `my_package` use: ::
+
+ $ bob build my_package --bundle my_package_bundle.zip
+
+After that you can take the my_package_bundle.tar to another system and use: ::
+
+ $ bob build my_package --unbundle my_package_bundle.zip
+
+to build `my_package` from the bundled-sources.
+
+.. note::
+ The recipes and `bob` are not part of the bundle and need to be handled
+ separately. It's strongly recommended to use matching recipes.
diff --git a/pym/bob/archive.py b/pym/bob/archive.py
index 658290e0..309e01ef 100644
--- a/pym/bob/archive.py
+++ b/pym/bob/archive.py
@@ -25,15 +25,17 @@
SKIPPED, EXECUTED, WARNING, INFO, TRACE, ERROR, IMPORTANT
from .utils import asHexStr, removePath, isWindows, getBashPath, tarfileOpen, binStat, removePrefix
from .webdav import WebDav, HTTPException, HttpDownloadError, HttpUploadError, HttpNotFoundError, HttpAlreadyExistsError
-from tempfile import mkstemp, NamedTemporaryFile, TemporaryFile, gettempdir
+from tempfile import mkstemp, NamedTemporaryFile, TemporaryDirectory, TemporaryFile, gettempdir
import asyncio
import concurrent.futures
import concurrent.futures.process
import errno
import gzip
import io
+import re
import os
import os.path
+import pathlib
import shutil
import signal
import socket
@@ -42,6 +44,7 @@
import tarfile
import urllib.parse
import hashlib
+import zipfile
ARCHIVE_GENERATION = '-1'
ARTIFACT_SUFFIX = ".tgz"
@@ -95,9 +98,15 @@ def wantUploadJenkins(self, enable):
def canDownload(self):
return False
+ def canDownloadSrc(self):
+ return False
+
def canUpload(self):
return False
+ def canUploadSrc(self, step, freshCheckout=None):
+ return False
+
def canCache(self):
return False
@@ -123,6 +132,9 @@ async def downloadLocalFingerprint(self, step, key, executor=None):
def getArchiveName(self):
return "Dummy"
+ def finish(self, success):
+ return True
+
class ArtifactNotFoundError(Exception):
pass
@@ -194,13 +206,13 @@ def _extractAudit(self, filename=None, fileobj=None):
return Audit.fromByteStream(auditJson, filename)
- def _pack(self, name, fileobj, audit, content):
+ def _pack(self, name, fileobj, audit, content, filter):
pax = { 'bob-archive-vsn' : "1" }
with gzip.open(name or fileobj, 'wb', 6) as gzf:
with tarfileOpen(name, "w", fileobj=gzf,
format=tarfile.PAX_FORMAT, pax_headers=pax) as tar:
tar.add(audit, "meta/" + os.path.basename(audit))
- tar.add(content, arcname="content")
+ tar.add(content, arcname="content", filter=filter)
class JenkinsArchive(TarHelper):
@@ -224,9 +236,15 @@ def wantUploadJenkins(self, enable):
def canDownload(self):
return True
+ def canDownloadSrc(self):
+ return False
+
def canUpload(self):
return True
+ def canUploadSrc(self, step, freshCheckout=None):
+ return False
+
def canCache(self):
return True
@@ -263,7 +281,7 @@ def _uploadPackage(self, name, buildId, audit, content):
# Needed to gracefully handle ctrl+c.
signal.signal(signal.SIGINT, signal.default_int_handler)
try:
- self._pack(name, None, audit, content)
+ self._pack(name, None, audit, content, None)
except (tarfile.TarError, OSError) as e:
raise BuildError("Cannot pack artifact: " + str(e))
finally:
@@ -375,6 +393,9 @@ def __init__(self, spec):
self.__wantDownloadJenkins = False
self.__wantUploadLocal = False
self.__wantUploadJenkins = False
+ self.__srcUpload = "src-upload" in flags
+ self.__srcDownload = "src-download" in flags
+ self.__srcUploadVCS = spec.get("src-upload-vcs", False)
@property
def ignoreUploadErrors(self):
@@ -396,16 +417,31 @@ def canDownload(self):
return self.__useDownload and ((self.__wantDownloadLocal and self.__useLocal) or
(self.__wantDownloadJenkins and self.__useJenkins))
+ def canDownloadSrc(self):
+ return self.__srcDownload and ((self.__wantDownloadLocal and self.__useLocal) or
+ (self.__wantDownloadJenkins and self.__useJenkins))
+
def canUpload(self):
return self.__useUpload and ((self.__wantUploadLocal and self.__useLocal) or
(self.__wantUploadJenkins and self.__useJenkins))
+ def canUploadSrc(self, step, freshCheckout=None):
+ return (self.__srcUpload and (True if freshCheckout is None else freshCheckout) and
+ ((self.__wantUploadLocal and self.__useLocal) or
+ (self.__wantUploadJenkins and self.__useJenkins)))
+
def canCache(self):
return self.__useCache
def _openDownloadFile(self, buildId, suffix):
raise ArtifactNotFoundError()
+ def _srcUploadVcsFilter(tarinfo):
+ if tarinfo.isdir() and (".git" in tarinfo.name or
+ ".svn" in tarinfo.name):
+ return None
+ return tarinfo
+
def canManage(self):
return self.__managed and self._canManage()
@@ -421,7 +457,8 @@ def _namedErrorString(self, err):
async def downloadPackage(self, step, buildId, audit, content, caches=[],
executor=None):
- if not self.canDownload():
+ if not ((self.canDownload() and not step.isCheckoutStep()) or (self.canDownloadSrc()
+ and step.isCheckoutStep())):
return False
loop = asyncio.get_event_loop()
@@ -536,8 +573,13 @@ def _openUploadFile(self, buildId, suffix, overwrite):
raise ArtifactUploadError("not implemented")
async def uploadPackage(self, step, buildId, audit, content, executor=None):
- if not self.canUpload():
+ if step.isPackageStep() and not self.canUpload() or \
+ step.isCheckoutStep() and not self.canUploadSrc(step):
+ return
+
+ if step.isCheckoutStep() and not step.isDeterministic():
return
+
if not audit:
stepMessage(step, "UPLOAD", "skipped (no audit trail)", SKIPPED,
IMPORTANT)
@@ -549,19 +591,20 @@ async def uploadPackage(self, step, buildId, audit, content, executor=None):
with stepAction(step, "UPLOAD", content, details=details) as a:
try:
msg, kind = await loop.run_in_executor(executor, BaseArchive._uploadPackage,
- self, buildId, suffix, audit, content)
+ self, buildId, suffix, audit, content,
+ (BaseArchive._srcUploadVcsFilter if step.isCheckoutStep() and not self.__srcUploadVCS else None))
a.setResult(msg, kind)
except (concurrent.futures.CancelledError, concurrent.futures.process.BrokenProcessPool):
raise BuildError(self._namedErrorString("Upload of package interrupted."))
- def _uploadPackage(self, buildId, suffix, audit, content):
+ def _uploadPackage(self, buildId, suffix, audit, content, filter):
# Set default signal handler so that KeyboardInterrupt is raised.
# Needed to gracefully handle ctrl+c.
signal.signal(signal.SIGINT, signal.default_int_handler)
try:
with self._openUploadFile(buildId, suffix, False) as (name, fileobj):
- self._pack(name, fileobj, audit, content)
+ self._pack(name, fileobj, audit, content, filter)
except (ArtifactExistsError, HttpAlreadyExistsError):
return (self._namedErrorString("skipped ({} exists in archive)".format(content)), SKIPPED)
except (ArtifactUploadError, HttpUploadError, tarfile.TarError, OSError) as e:
@@ -697,6 +740,8 @@ def _getArchiveUri(self):
def getArchiveName(self):
return self.__name
+ def finish(self, success):
+ return True
class Tee:
def __init__(self, fileName, fileObj, buildId, caches, workspace):
@@ -902,6 +947,88 @@ def __exit__(self, exc_type, exc_value, traceback):
os.unlink(self.tmp.name)
return False
+class BundleArchiveDownloader:
+ def __init__(self, bundle, name):
+ self.__bundle = bundle
+ self.__name = name
+
+ def __enter__(self):
+ try:
+ self._zip = zipfile.ZipFile(self.__bundle, mode='r')
+ self.fd = self._zip.open(self.__name)
+ except KeyError as e:
+ raise ArtifactDownloadError(f"{self.__name} not found in {self.__bundle}.")
+ except OSError as e:
+ raise ArtifactDownloadError(str(e))
+ return (None, self.fd)
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.fd.close()
+ return False
+
+class BundleArchive(LocalArchive):
+ def __init__(self, spec):
+ self.__file = spec.get("path")
+ self.__mode = spec.get("mode")
+ self.__bundle = self.__mode == "bundle"
+ self.__exclude = spec.get("exclude")
+ spec["flags"] = ["src-upload" if self.__bundle else "src-download"]
+ if self.__bundle:
+ self.__tempdir = spec.get("tempdir")
+ spec["path"] = spec.get("tempdir")
+ super().__init__(spec)
+
+ def canDownload(self):
+ return False
+
+ def canUpload(self):
+ return False
+
+ def canDownloadSrc(self):
+ return not self.__bundle
+
+ def _canUploadSrc(self, step):
+ if self.__exclude is not None:
+ for p in self.__exclude:
+ if re.match(p, step.getPackage().getName()):
+ return False
+ return self.__bundle
+
+ def canUploadSrc(self, step, freshCheckout=None):
+ return self._canUploadSrc(step)
+
+ def _getZipPath(self, buildId, suffix):
+ packageResultId = buildIdToName(buildId)
+ return "/".join([packageResultId[0:2],
+ packageResultId[2:4],
+ packageResultId[4:]]) + suffix
+
+ def _openDownloadFile(self, buildId, suffix):
+ packageResultFile = self._getZipPath(buildId, suffix)
+ return BundleArchiveDownloader(self.__file,
+ self._getZipPath(buildId, suffix))
+ def finish(self, success):
+ if not self.__bundle:
+ return True
+ try:
+ if success:
+ print(f"Finalizing bundle: {self.__file}")
+ bundleDir = pathlib.Path(self.__tempdir)
+ with zipfile.ZipFile(self.__file, mode="a") as bundleZip:
+ names = bundleZip.namelist()
+ for file_path in bundleDir.rglob("*"):
+ rpath = str(file_path.relative_to(bundleDir))
+ if os.path.isdir(file_path):
+ rpath += os.sep
+ if rpath in names:
+ if not os.path.isdir(file_path):
+ print(f"Not adding {rpath}. Exists in bundle!")
+ continue
+ else:
+ bundleZip.write(file_path,
+ arcname=rpath)
+ except OSError as e:
+ raise BuildError("Unable to create bundle zip file" + str(e))
class HttpArchive(BaseArchive):
def __init__(self, spec):
@@ -1272,17 +1399,23 @@ def wantUploadJenkins(self, enable):
def canDownload(self):
return any(i.canDownload() for i in self.__archives)
+ def canDownloadSrc(self):
+ return any(i.canDownloadSrc() for i in self.__archives)
+
def canUpload(self):
return any(i.canUpload() for i in self.__archives)
+ def canUploadSrc(self, step, freshCheckout=None):
+ return any(i.canUploadSrc(step, freshCheckout) for i in self.__archives)
+
async def uploadPackage(self, step, buildId, audit, content, executor=None):
for i in self.__archives:
- if not i.canUpload(): continue
+ if not (i.canUpload() or i.canUploadSrc(step)): continue
await i.uploadPackage(step, buildId, audit, content, executor=executor)
async def downloadPackage(self, step, buildId, audit, content, executor=None):
for i in self.__archives:
- if not i.canDownload(): continue
+ if not (i.canDownload() or i.canDownloadSrc()): continue
caches = [ a for a in self.__archives if (a is not i) and a.canCache() ]
if await i.downloadPackage(step, buildId, audit, content, caches, executor):
return True
@@ -1314,6 +1447,9 @@ async def downloadLocalFingerprint(self, step, key, executor=None):
if ret is not None: break
return ret
+ def finish(self, success):
+ for i in self.__archives:
+ i.finish(success)
def getSingleArchiver(recipes, archiveSpec):
archiveBackend = archiveSpec.get("backend", "none")
@@ -1329,10 +1465,12 @@ def getSingleArchiver(recipes, archiveSpec):
return DummyArchive()
elif archiveBackend == "__jenkins":
return JenkinsArchive(archiveSpec)
+ elif archiveBackend == "__bundle":
+ return BundleArchive(archiveSpec)
else:
raise BuildError("Invalid archive backend: "+archiveBackend)
-def getArchiver(recipes, jenkins=None):
+def getArchiver(recipes, jenkins=None, bundle=None):
archiveSpec = recipes.archiveSpec()
if jenkins is not None:
jenkins = jenkins.copy()
@@ -1342,6 +1480,14 @@ def getArchiver(recipes, jenkins=None):
else:
archiveSpec = [jenkins, archiveSpec]
+ if bundle is not None and bundle.get("path") is not None:
+ bundle = bundle.copy()
+ bundle["backend"] = "__bundle"
+ if isinstance(archiveSpec, list):
+ archiveSpec = [bundle] + archiveSpec
+ else:
+ archiveSpec = [bundle, archiveSpec]
+
if isinstance(archiveSpec, list):
if len(archiveSpec) == 0:
return DummyArchive()
diff --git a/pym/bob/builder.py b/pym/bob/builder.py
index 4b1925eb..e1992cce 100644
--- a/pym/bob/builder.py
+++ b/pym/bob/builder.py
@@ -68,9 +68,11 @@ def invalidate(self):
CHECKOUT_STATE_VARIANT_ID = None # Key in checkout directory state for step variant-id
CHECKOUT_STATE_BUILD_ONLY = 1 # Key for checkout state of build-only builds
+CHECKOUT_STATE_BUNDLE = 2 # Store whether the checkout origin was a bundle or not
# Keys in checkout getDirectoryState that are not directories
-CHECKOUT_NON_DIR_KEYS = {CHECKOUT_STATE_VARIANT_ID, CHECKOUT_STATE_BUILD_ONLY}
+CHECKOUT_NON_DIR_KEYS = {CHECKOUT_STATE_VARIANT_ID, CHECKOUT_STATE_BUILD_ONLY,
+ CHECKOUT_STATE_BUNDLE}
def compareDirectoryState(left, right):
"""Compare two directory states while ignoring the SCM specs.
@@ -88,6 +90,10 @@ def compareDirectoryState(left, right):
right = { d : v[0] for d, v in right.items() if d != CHECKOUT_STATE_BUILD_ONLY }
return left == right
+def compareBundleState(left, right):
+ _r = right[CHECKOUT_STATE_BUNDLE] if CHECKOUT_STATE_BUNDLE in right else False
+ return left[CHECKOUT_STATE_BUNDLE] == _r
+
def checkoutsFromState(state):
"""Return only the tuples related to SCMs from the checkout state.
@@ -356,7 +362,7 @@ def fmt(step, props):
return fmt
def __init__(self, verbose, force, skipDeps, buildOnly, preserveEnv,
- envWhiteList, bobRoot, cleanBuild, noLogFile):
+ envWhiteList, bobRoot, cleanBuild, noLogFile, unbundle):
self.__wasRun= {}
self.__wasSkipped = {}
self.__wasDownloadTried = {}
@@ -397,6 +403,7 @@ def __init__(self, verbose, force, skipDeps, buildOnly, preserveEnv,
self.__executor = None
self.__attic = True
self.__slimSandbox = False
+ self.__unbundle = unbundle
def setExecutor(self, executor):
self.__executor = executor
@@ -1102,6 +1109,7 @@ async def _cookCheckoutStep(self, checkoutStep, depth):
checkoutState = checkoutStep.getScmDirectories().copy()
checkoutState[CHECKOUT_STATE_VARIANT_ID] = (checkoutDigest, None)
checkoutState[CHECKOUT_STATE_BUILD_ONLY] = checkoutBuildOnlyState(checkoutStep, checkoutInputHashes)
+ checkoutState[CHECKOUT_STATE_BUNDLE] = (self.__unbundle, None)
currentResultHash = HashOnce(checkoutStep)
if self.__buildOnly and (BobState().getResultHash(prettySrcPath) is not None):
inputChanged = checkoutBuildOnlyStateChanged(checkoutState, oldCheckoutState)
@@ -1147,7 +1155,10 @@ async def _cookCheckoutStep(self, checkoutStep, depth):
elif not checkoutStep.isDeterministic():
checkoutReason = "indeterministic"
elif not compareDirectoryState(checkoutState, oldCheckoutState):
- checkoutReason = "recipe changed"
+ if not compareBundleState(checkoutState, oldCheckoutState):
+ checkoutReason = "bundle mode changed"
+ else:
+ checkoutReason = "recipe changed"
elif (checkoutInputHashes != BobState().getInputHashes(prettySrcPath)):
checkoutReason = "dependency changed"
elif (checkoutStep.getMainScript() or checkoutStep.getPostRunCmds()) \
@@ -1171,8 +1182,10 @@ async def _cookCheckoutStep(self, checkoutStep, depth):
BobState().setAtticDirectoryState(atticPath, scmSpec)
del oldCheckoutState[scmDir]
BobState().setDirectoryState(prettySrcPath, oldCheckoutState)
- elif scmDigest != checkoutState.get(scmDir, (None, None))[0]:
+ elif (scmDigest != checkoutState.get(scmDir, (None, None))[0]) or \
+ not compareBundleState(checkoutState, oldCheckoutState):
canSwitch = (scmDir in scmMap) and scmDigest and \
+ compareBundleState(checkoutState, oldCheckoutState) and \
scmSpec is not None and \
scmMap[scmDir].canSwitch(getScm(scmSpec)) and \
os.path.exists(scmPath)
@@ -1233,11 +1246,26 @@ async def _cookCheckoutStep(self, checkoutStep, depth):
oldCheckoutHash = datetime.datetime.now()
BobState().setResultHash(prettySrcPath, oldCheckoutHash)
- with stepExec(checkoutStep, "CHECKOUT",
- "{} ({}) {}".format(prettySrcPath, checkoutReason, overridesString)) as a:
- await self._runShell(checkoutStep, "checkout", a, created)
- self.__statistic.checkouts += 1
- checkoutExecuted = True
+ wasDownloaded = False
+ if self.__archive.canDownloadSrc() and created:
+ audit= os.path.join(os.path.dirname(checkoutStep.getWorkspacePath()),
+ "audit.json.gz")
+ wasDownloaded = await self.__archive.downloadPackage(checkoutStep,
+ checkoutDigest, audit, prettySrcPath, executor=self.__executor)
+
+ if wasDownloaded:
+ if not os.path.exists(audit):
+ raise BuildError("Downloaded artifact misses its audit trail!")
+ checkoutHash = hashWorkspace(checkoutStep)
+ if Audit.fromFile(audit).getArtifact().getResultHash() != checkoutHash:
+ raise BuildError("Corrupt downloaded artifact! Extracted content hash does not match audit trail.")
+
+ if not wasDownloaded:
+ with stepExec(checkoutStep, "CHECKOUT",
+ "{} ({}) {}".format(prettySrcPath, checkoutReason, overridesString)) as a:
+ await self._runShell(checkoutStep, "checkout", a, created)
+ self.__statistic.checkouts += 1
+ checkoutExecuted = True
currentResultHash.invalidate() # force recalculation
# reflect new checkout state
BobState().setDirectoryState(prettySrcPath, checkoutState)
@@ -1280,6 +1308,13 @@ async def _cookCheckoutStep(self, checkoutStep, depth):
assert predicted, "Non-predicted incorrect Build-Id found!"
self.__handleChangedBuildId(checkoutStep, checkoutHash)
+ if self.__archive.canUploadSrc(checkoutStep, isFreshCheckout):
+ auditPath = os.path.join(os.path.dirname(checkoutStep.getWorkspacePath()),
+ "audit.json.gz")
+ await self.__archive.uploadPackage(checkoutStep, checkoutDigest,
+ auditPath,
+ checkoutStep.getStoragePath(), executor=self.__executor)
+
async def _cookBuildStep(self, buildStep, depth, buildBuildId):
# Add the execution path of the build step to the buildDigest to
# detect changes between sandbox and non-sandbox builds. This is
diff --git a/pym/bob/cmds/build/build.py b/pym/bob/cmds/build/build.py
index afc9607b..fe45fa64 100644
--- a/pym/bob/cmds/build/build.py
+++ b/pym/bob/cmds/build/build.py
@@ -22,6 +22,7 @@
import stat
import sys
import time
+import tempfile
from .state import DevelopDirOracle
@@ -224,6 +225,16 @@ def _downloadLayerArgument(arg):
help="Move scm to attic if inline switch is not possible (default).")
group.add_argument('--no-attic', action='store_false', default=None, dest='attic',
help="Do not move to attic, instead fail the build.")
+
+ group = parser.add_mutually_exclusive_group()
+ group.add_argument('--bundle', metavar='BUNDLE', default=None,
+ help="Bundle sources to BUNDLE")
+ group.add_argument('--unbundle', metavar='BUNDLE', default=None,
+ help="Prefer sources from BUNDLE.")
+ parser.add_argument('--bundle-exclude', action='append', default=[],
+ help="Do not add matching packages to bundle.")
+ parser.add_argument('--bundle-vcs', default=False, action='store_true',
+ help="Do not strip version control system informations from bundle.")
args = parser.parse_args(argv)
defines = processDefines(args.defines)
@@ -315,15 +326,38 @@ def _downloadLayerArgument(arg):
sandboxMode.stablePaths)
if develop: developPersister.prime(packages)
+ if args.bundle and args.build_mode == 'build-only':
+ parser.error("--bundle can't be used with --build-only")
+
+ bundleSpec = None
+ bundleTemp = None
+ if args.bundle is not None:
+ bundleTemp = tempfile.TemporaryDirectory(dir=os.getcwd(),
+ prefix=".bundle")
+ bundleSpec = {"path" : args.bundle,
+ "mode" : "bundle",
+ "flags" : ["src-upload"],
+ "src-upload-vcs" : args.bundle_vcs,
+ "exclude" : args.bundle_exclude,
+ "tempdir" : bundleTemp.name}
+ args.always_checkout += ['.*']
+ args.clean_checkout = True
+ if args.unbundle is not None:
+ bundleSpec = {"path" : args.unbundle,
+ "flags" : ["src-download"],
+ "mode" : "unbundle"}
+
+
+ archivers = getArchiver(recipes, bundle=bundleSpec)
verbosity = cfg.get('verbosity', 0) + args.verbose - args.quiet
setVerbosity(verbosity)
builder = LocalBuilder(verbosity, args.force,
args.no_deps, True if args.build_mode == 'build-only' else False,
args.preserve_env, envWhiteList, bobRoot, args.clean,
- args.no_logfiles)
+ args.no_logfiles, args.unbundle is not None)
builder.setExecutor(executor)
- builder.setArchiveHandler(getArchiver(recipes))
+ builder.setArchiveHandler(archivers)
builder.setLocalUploadMode(args.upload)
builder.setLocalDownloadMode(args.download)
builder.setLocalDownloadLayerMode(args.download_layer)
@@ -339,6 +373,7 @@ def _downloadLayerArgument(arg):
builder.setShareMode(args.shared, args.install)
builder.setAtticEnable(args.attic)
builder.setSlimSandbox(sandboxMode.slimSandbox)
+
if args.resume: builder.loadBuildState()
backlog = []
@@ -380,10 +415,14 @@ def _downloadLayerArgument(arg):
finally:
if args.jobs > 1: setTui(1)
builder.saveBuildState()
+ archivers.finish(success)
+ if bundleTemp is not None:
+ bundleTemp.cleanup()
runHook(recipes, 'postBuildHook', ["success" if success else "fail"] + results)
# tell the user
if results:
+
if len(results) == 1:
print("Build result is in", results[0])
else:
diff --git a/pym/bob/cmds/jenkins/exec.py b/pym/bob/cmds/jenkins/exec.py
index b07f42f4..4109bbec 100644
--- a/pym/bob/cmds/jenkins/exec.py
+++ b/pym/bob/cmds/jenkins/exec.py
@@ -223,7 +223,7 @@ def doJenkinsExecuteRun(argv, bobRoot):
with EventLoopWrapper() as (loop, executor):
setVerbosity(TRACE)
builder = LocalBuilder(TRACE, False, False, False, False, envWhiteList,
- bobRoot, False, True)
+ bobRoot, False, True, False)
builder.setBuildDistBuildIds(dependencyBuildIds)
builder.setExecutor(executor)
builder.setArchiveHandler(getArchiver(
diff --git a/pym/bob/input.py b/pym/bob/input.py
index e57396f4..4b1f4e45 100644
--- a/pym/bob/input.py
+++ b/pym/bob/input.py
@@ -3084,7 +3084,8 @@ def __init__(self):
'backend' : str,
schema.Optional('name') : str,
schema.Optional('flags') : schema.Schema(["download", "upload", "managed",
- "nofail", "nolocal", "nojenkins", "cache", "strictdownload"])
+ "nofail", "nolocal", "nojenkins", "cache", "strictdownload",
+ "src-upload", "src-download"]),
}
fileArchive = baseArchive.copy()
fileArchive["path"] = str
diff --git a/test/black-box/bundle/.gitignore b/test/black-box/bundle/.gitignore
new file mode 100644
index 00000000..75f416e7
--- /dev/null
+++ b/test/black-box/bundle/.gitignore
@@ -0,0 +1 @@
+bundle.zip
diff --git a/test/black-box/bundle/bundle.zip b/test/black-box/bundle/bundle.zip
new file mode 100644
index 00000000..24604583
Binary files /dev/null and b/test/black-box/bundle/bundle.zip differ
diff --git a/test/black-box/bundle/config.yaml b/test/black-box/bundle/config.yaml
new file mode 100644
index 00000000..9ad56157
--- /dev/null
+++ b/test/black-box/bundle/config.yaml
@@ -0,0 +1 @@
+bobMinimumVersion: "1.0.0"
diff --git a/test/black-box/bundle/indeterministic.yaml b/test/black-box/bundle/indeterministic.yaml
new file mode 100644
index 00000000..0e23fd0a
--- /dev/null
+++ b/test/black-box/bundle/indeterministic.yaml
@@ -0,0 +1,4 @@
+scmOverrides:
+ - match:
+ url: "*"
+ del: [digestSHA1, commit]
diff --git a/test/black-box/bundle/recipes/git.yaml b/test/black-box/bundle/recipes/git.yaml
new file mode 100644
index 00000000..049965d9
--- /dev/null
+++ b/test/black-box/bundle/recipes/git.yaml
@@ -0,0 +1,9 @@
+checkoutSCM:
+ scm: git
+ url: ${GIT_URL}
+ commit: ${GIT_COMMIT}
+
+buildScript: |
+ cp -r $1/* .
+packageScript: |
+ cp -r $1/* .
diff --git a/test/black-box/bundle/recipes/root.yaml b/test/black-box/bundle/recipes/root.yaml
new file mode 100644
index 00000000..e80db7cd
--- /dev/null
+++ b/test/black-box/bundle/recipes/root.yaml
@@ -0,0 +1,8 @@
+root: True
+
+depends:
+ - git
+ - tar
+
+buildScript: "true"
+packageScript: "true"
diff --git a/test/black-box/bundle/recipes/tar.yaml b/test/black-box/bundle/recipes/tar.yaml
new file mode 100644
index 00000000..a98e8fb5
--- /dev/null
+++ b/test/black-box/bundle/recipes/tar.yaml
@@ -0,0 +1,7 @@
+checkoutSCM:
+ scm: url
+ url: ${TAR_URL}
+ digestSHA1: ${TAR_SHA1}
+
+buildScript: "true"
+packageScript: "true"
diff --git a/test/black-box/bundle/run.sh b/test/black-box/bundle/run.sh
new file mode 100755
index 00000000..d3509a92
--- /dev/null
+++ b/test/black-box/bundle/run.sh
@@ -0,0 +1,145 @@
+#!/bin/bash -e
+. ../../test-lib.sh 2>/dev/null || { echo "Must run in script directory!" ; exit 1 ; }
+
+cleanup
+rm -rf default.yaml
+
+# trap 'rm -rf "${archiveDir}" "${srcDir}" "${srcDirTmp}" default.yaml bundle.zip' EXIT
+archiveDir=$(mktemp -d)
+srcDir=$(mktemp -d)
+srcDirTemp=$(mktemp -d)
+
+rm -rf $srcDir/*
+# setup sources for checkouts
+pushd $srcDir
+mkdir -p git_scm
+pushd git_scm
+git init -b master .
+git config user.email "bob@bob.bob"
+git config user.name test
+echo "Hello World!" > hello.txt
+git add hello.txt
+git commit -m "hello"
+echo "foo" > foo.txt
+git add foo.txt
+git commit -m "foo"
+
+GIT_URL=$(pwd)
+GIT_COMMIT=$(git rev-parse HEAD)
+popd #git_scm
+
+mkdir -p tar
+pushd tar
+dd if=/dev/zero of=test.dat bs=1K count=1
+tar cvf test.tar test.dat
+TAR_URL=$(pwd)/test.tar
+TAR_SHA1=$(sha1sum test.tar | cut -d ' ' -f1)
+popd #tar
+popd # srcDir
+
+function run_src_upload_tests () {
+ # cleanup
+ rm -rf work dev $archiveDir/*
+
+ cat > default.yaml < content/foo.dat
+ tar --pax-option bob-archive-vsn=1 -zcf "$(basename $A)" meta content
+ rm content meta -rf
+ popd
+ popd
+
+ expect_fail run_bob dev root -DTAR_URL=${TAR_URL} -DTAR_SHA1=${TAR_SHA1} -DGIT_URL=${GIT_URL} -DGIT_COMMIT=${GIT_COMMIT} --download yes
+
+ rm default.yaml
+}
+
+function _run_bob() {
+ run_bob dev root -DTAR_URL=${TAR_URL} -DTAR_SHA1=${TAR_SHA1} \
+ -DGIT_URL=${GIT_URL} -DGIT_COMMIT=${GIT_COMMIT} \
+ -v \
+ "$@"
+}
+
+function _run_bundle () {
+ _run_bob --bundle bundle.zip "$@"
+}
+
+function _run_unbundle () {
+ cleanup
+ run_bob dev root -DTAR_SHA1=${TAR_SHA1} \
+ -DGIT_COMMIT=${GIT_COMMIT} \
+ --unbundle bundle.zip "$@"
+}
+
+function run_bundle_tests () {
+ cleanup
+ _run_bundle
+ _run_unbundle -DTAR_URL="/nonexisting/test.tar" -DGIT_URL="/nonexisting/test.git"
+ expect_not_exist dev/src/git/1/workspace/.git
+
+ # editing code + build should work as usual
+ echo "hello" > dev/src/git/1/workspace/hello.txt
+ _run_bob -b
+ expect_exist dev/dist/git/1/workspace/hello.txt
+
+ rm dev/dist/git/1/workspace/hello.txt
+ _run_unbundle -DTAR_URL="/nonexisting/test.tar" -DGIT_URL="/nonexisting/test.git"
+ expect_exist dev/dist/git/1/workspace/hello.txt
+
+ # switching from bundle mode to normal mode should move to attic
+ touch dev/src/git/1/workspace/canary.txt
+ _run_bob
+ expect_not_exist dev/src/git/1/workspace/canary.txt
+
+ # switching from normal mode to bundle mode should move to attic
+ touch dev/src/git/1/workspace/canary.txt
+ _run_unbundle -DTAR_URL="/nonexisting/test.tar" -DGIT_URL="/nonexisting/test.git"
+ expect_not_exist dev/src/git/1/workspace/canary.txt
+
+ # we always bundle clean sources
+ cleanup
+ _run_bob
+ echo "hello" > dev/src/git/1/workspace/new.txt
+ # XXX: clean checkout does not clean url scms :/
+ # echo "hello" > dev/src/tar/1/workspace/new.txt
+ _run_bundle
+ expect_not_exist dev/src/git/1/workspace/new.txt
+ # expect_not_exist dev/src/tar/1/workspace/new.txt
+ _run_unbundle -DTAR_URL="/nonexisting/test.tar" -DGIT_URL="/nonexisting/test.git"
+
+ # test bundle-vcs option
+ cleanup bundle.zip
+ _run_bundle --bundle-vcs
+ _run_unbundle -DTAR_URL="/nonexisting/test.tar" -DGIT_URL="/nonexisting/test.git"
+ expect_exist dev/src/git/1/workspace/.git
+
+ # test exclude
+ cleanup bundle.zip
+ _run_bundle --bundle-exclude "ta*"
+ expect_fail _run_unbundle -DTAR_URL="/nonexisting/test.tar" -DGIT_URL="/nonexisting/test.git"
+ expect_not_exist dev/src/tar/1/workspace/test.dat
+}
+
+run_src_upload_tests
+run_bundle_tests
diff --git a/test/test-lib.sh b/test/test-lib.sh
index b2f26991..5667d7f3 100644
--- a/test/test-lib.sh
+++ b/test/test-lib.sh
@@ -144,14 +144,12 @@ exec_blackbox_test()
expect_fail()
{
- "$@" 2>&1 || if [[ $? -ne 1 ]] ; then
- echo "Unexpected return code: $*" >&2
+ "$@" 2>&1 || if [[ $? -eq 0 ]] ; then
+ echo "Expected command to fail: $*" >&2
return 1
else
return 0
fi
- echo "Expected command to fail: $*" >&2
- return 1
}
expect_output()
diff --git a/test/unit/test_archive.py b/test/unit/test_archive.py
index 5823da05..80f50311 100644
--- a/test/unit/test_archive.py
+++ b/test/unit/test_archive.py
@@ -61,6 +61,12 @@ def getPackage(self):
return DummyPackage()
def getWorkspacePath(self):
return "unused"
+ def isCheckoutStep(self):
+ return False
+ def isDeterministic(self):
+ return False
+ def isPackageStep(self):
+ return True
def run(coro):
with patch('bob.archive.signal.signal'):