diff --git a/.gbp.conf b/.gbp.conf new file mode 100644 index 00000000..e39adade --- /dev/null +++ b/.gbp.conf @@ -0,0 +1,25 @@ +[DEFAULT] +# Do not merge to packaging (after update) +merge=False +# Vendor/Distro name +vendor=Tizen +# Package is from Debian +upstream-tag=debian/%(version)s +# Don't use pristine-tar +pristine-tar=False +# Branch naming +packaging-branch=master +upstream-branch=upstream +# Tag format for releases +packaging-tag = tizen/%(upstreamversion)s-%(nowtime)s +# Subdir for RPM packaging data +packaging-dir=packaging +# Auto-generate patches against upstream +patch-export = True +patch-numbers = True +patch-export-ignore-path = (.gbp.conf|packaging/.*|debian/.*) +# Don't fail if the current branch does not match the main packaging branch +ignore-branch = True + +[git-rpm-ch] +changelog-revision=%(tagname)s diff --git a/README b/README index 9242c07e..45b971cb 100644 --- a/README +++ b/README @@ -12,9 +12,6 @@ The API documentation of the gbp module can be found at: http://honk.sigxcpu.org/projects/git-buildpackage/apidocs/ -The mailing list is at: +The documentation available for the RPM support can be found at - http://lists.sigxcpu.org/mailman/listinfo/git-buildpackage - git-buildpackage at lists.sigxcpu.org - -See the HACKING document for details on contributing to gbp development. + http://marquiz.github.com/git-buildpackage-rpm/ diff --git a/TODO b/TODO index baaa055c..295398a5 100644 --- a/TODO +++ b/TODO @@ -1 +1,18 @@ See https://honk.sigxcpu.org/piki/projects/git-buildpackage/ + +RPM-related things: +- unit tests: + - add unit tests for the rpm command line tools +- manpages for rpm tools + - git-buildpackage-rpm + - git-import-srpm + - git-import-orig-rpm + - gbp-pq-rpm +- better html-documentation for rpm support +- write a tool for automatic changelog creation/updating +- bare-repo support + - buildpackage-rpm + - import-orig + - import-srpm + - pristine-tar + - gbp-pq (very limited, if any) diff --git a/debian/changelog b/debian/changelog index 66a7a10c..1db1f293 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +git-buildpackage (0.7.0-tizen20151027) UNRELEASED; urgency=low + + * Rebase on top of upstream version 0.7.0 + + -- Markus Lehtonen Tue, 27 Oct 2015 10:05:38 +0200 + git-buildpackage (0.7.0) unstable; urgency=medium [ Guido Günther ] @@ -385,6 +391,69 @@ git-buildpackage (0.6.23) experimental; urgency=medium -- Guido Günther Sun, 18 Jan 2015 15:29:57 +0100 +git-buildpackage (0.6.22-tizen20150217) unstable; urgency=low + + * log: don't crash getLogger if getting the root logger + + -- Markus Lehtonen Tue, 17 Feb 2015 12:54:14 +0200 + +git-buildpackage (0.6.22-tizen20150206) unstable; urgency=low + + * Rebase on top of upstream version 0.6.22 + * Experimental support for BitBake + - Introduce import-bb tool. + This is the first tool in an effort of enabling gbp in the BitBake build + environment. Gbp-import-bb is a tool for importing packages from a + BitBake-based "combined" distro repository into individual per-package + Git repositories. + - Introduce pq-bb tool. + This is a tool for managing patch-queues for packages maintained in the + BitBake packaging format (.bb recipes). + - Introduce buildpackage-bb tool. + Initial version of the tool for building BitBake packages from Git. + NOTE: The buildpackage-bb tool itself is able to operate even without an + initialized BitBake build environment although the build likely fails in + this case. However, this makes it possible to export the packaging meta + data, for example. + - Introcude clone-bb tool. + This is a new tool for helping to clone remote per-package Git + repositories when working in BitBake-based "full distro" build + environment. This is useful in the case that individual packages are + actually maintained in per-package Git repositories (like Tizen). That + is, the full distro repository that the developer operates in is + composed of the packaging meta data from the individual per-package + repositories. When willing to contribute to a package the developer + would use clone-bb to clone the correct per-package repository and make + his changes there. + NOTE: clone-bb uses GBP_PACKAGING_REPO variable to determine the remote + repository URI. This variable should be defined in the package recipes + in order to make clone-bb usable. + - Introduce submit-bb tool. + This is a Tizen-specific tool for creating and pushing special submit + tags. + * pq-rpm: better error message + * buildpackage-rpm: look for remote upstream branches. + Consider remote branches, too, while looking for the upstream branch + when trying to guess if a package is native or not. + * rpm packaging changes + - enable CentOS 7. + - include python egg-info in -common. + * rpm: suppress stderr when unpacking src.rpm + * UpstreamSource: suppress stderr from tar in _determine_prefix() + * patchseries: strip numbering when guessing subject from filename + * Changes in logging + - don't propagate to ancestor loggers + - don't initialize handlers in GbpLogger init. + Better compatibility with 3rd party modules that have their own logging + initializations. + * Features/fixes in GitRepository + - GitRepository/__git_inout: correctly handle input error + - GitRepository.create_tag: add 'annotate' argument. + For forcing the creation of annotated tags. Causes the an editor to be + spawned if no message is given. + + -- Markus Lehtonen Fri, 06 Feb 2015 10:36:26 +0200 + git-buildpackage (0.6.22) unstable; urgency=medium * [3d8939d] git.vfs: fix close method. Preventing a infinite recursion @@ -470,6 +539,57 @@ git-buildpackage (0.6.16) experimental; urgency=medium -- Guido Günther Mon, 28 Jul 2014 15:57:49 +0200 +git-buildpackage (0.6.15-tizen20140828) unstable; urgency=low + + [ Markus Lehtonen ] + * Rebase on top of upstream version 0.6.15 + * Changes in buildpackage-rpm + - new 'commitish' keyword for spec vcs tag + - notify: catch RuntimeError when importing pynotify + - fix exit code in case of GitRepositoryError + - always create tarball for native packages + - check submodules from the exported treeish + - fix crash when running prebuild hook + - always run cleaner hook + - add --git-no-build option + - don't crash on invalid vcs format strings + - add '--git-native' option + - remove some outdated/unnecessary exception handling + - get rid of prepare_export_dir() + - handle invalid keys in orig prefix + - internal refactoring and fixes + * Changes in rpm-ch + - implement --commit option + - implement --commit-msg option + - rpm-ch: implement --meta-bts option + - rpm.policy: don't allow leading whitespace in bts meta tags + * docs: manpages for the rpm tools + * Misc. fixes and features + - buildpackage: refactor tarball compression + - gbp-dch: fix handling of the '--meta' option + - import-orig-rpm: download tarball into parent directory + - GitRepository/has_submodules: add treeish argument + - tristate: implement __nonzero__() method + * Logging: + - don't automatically setup gbp-specific logger + - fix auto colorizing for custom streams + * Tests + - multiple fixes and additions in ComponentTestBase class + - testutils: add directories arg to ls_* functions + - add unit tests for buildpackage-rpm + * Packaging + - rpm-packaging: recommend/require unzip + - recommend/require zipmerge + - depend/recommend rpmbuild + + [ Junchun Guan ] + * Add PKGBUILD to support arch linux + + [ Li Jinjing ] + * rpm packaging: use macro for python binary + + -- Markus Lehtonen Thu, 28 Aug 2014 11:40:05 +0300 + git-buildpackage (0.6.15) unstable; urgency=medium * [5cde49a] Revert "Determine build_dir upfront" @@ -519,6 +639,53 @@ git-buildpackage (0.6.13) unstable; urgency=medium -- Guido Günther Thu, 03 Apr 2014 21:31:38 +0200 +git-buildpackage (0.6.12-tizen20140521) unstable; urgency=low + + [ Markus Lehtonen ] + * Rebase on top of upstream version 0.6.12 + * Changes in rpm-ch + - add --message cmdline option + For giving the text for new changelog entry/entries, skipping git commit + messages entirely. + - implement '--all' option + If defined, git-rpm-ch uses all commits in the Git history. Also, + '--since' option is omitted. + - implement --color-scheme option + - use name (string) as the entry author + - internal refactoring + - fix crash in guessing the start commit + * Changes in pq-rpm + - implement --retain-history option + With this option defined gbp tries to preserve the history when + converting. That is, for each commit in the old branch create one + corresponding commit in the new orphan packaging branch. This works by + dumping packaging files and updating patches for each commit. However, + empty commits are not generated - these are caused e.g. by changes in + files that are ignored by patch-generation. + NOTE! Only valid for the 'convert' action. + - convert: don't try to commit empty set of patches + - make sure we always get a sane author + - slight unification of log messages + - decrease (default) verbosity a bit + - don't create pq branch when switching + * Changes in buildpackage-rpm + - buildpackage-rpm: fix desktop notifications + - buildpackage-rpm: remove --purge option + - guess/parse spec as early as possible + * Documentation + - Include man pages in RPM packages + - Add -doc subpackage to RPM packaging + - Build deb and rpm html docs in one docbook + - A multitude of small cosmetic fixes + * Other misc. internal changes + - GitRepository: implement create_tree() method + - rpm: suppress some verbosity when updating patches in spec file + + [ Li Jinjing ] + * Fix missing return value in get_current_branch method + + -- Markus Lehtonen Wed, 21 May 2014 16:32:46 +0300 + git-buildpackage (0.6.12) unstable; urgency=medium * [89f3005] Use a much simpler version to fix the command name in --help @@ -610,6 +777,107 @@ git-buildpackage (0.6.9) unstable; urgency=medium -- Guido Günther Sat, 15 Feb 2014 11:45:00 +0100 +git-buildpackage (0.6.8-tizen20140306) unstable; urgency=low + + [ Markus Lehtonen ] + * Rebase on top of upstream version 0.6.8 + * Introduce git-rpm-ch tool. + Initial version of the git-rpm-ch tool which is intended for maintaining + RPM changelogs. Supports both spec files and separate "OBS style" + changelog files. + Implement '--tag' command line option (and other related options for + signing) for creating and tagging a release. These correspond the + tagging options in git-buildpackage-rpm. + The git-buildpackage-rpm tool does not commit anything to git. However, + in rpm-ch the '--tag' option causes the changelog modifications (and, + all other staged changes) to be committed to git before creating the + tag. This makes it possible to create a release and document the + packaging/release tag name in the rpm changelog. + * Changes to gbp-pq-rpm: + - implement 'convert' subcommand. + The new subcommand is intended to be a one-time-callable command for + converting a package to use the "orphan-packaging" development model and + git layout (where packaging files are maintained in a separate branch + and code development in another branch). + - implement '--import-files' command line option. + For defining the packaging file(s) that will be imported into the + development/patch-queue branch. + By default, the local gbp conf files are imported (so that gbp sees the + same settings on the development/pq branc as on the packaging branch). + Files defined with this option will appear as one monolithic commit in + the development/patch-queue branch. + - implement --new-packaging-dir cmdline option. + Used for setting the packaging directory to be used in the new orphan + packaging branch. Defaults to --packaging-dir so that the gbp.conf files + are more likely to work without modification. + - fail gracefully on invalid config files + - support '%(upstreamversion)s' as a replacable string in the pq branch + name. + * Changes to git-buildpackage-rpm + - patch-export from development branch. + Adds git-buildpackage-rpm --patch-export support for the 'orphan + packaging' development model (where packaging files and development sources are kept in separate + branches). + New functionality: + 1. If patch-export is enabled and gbp detects that the current branch + has a development/patch-queue branch it exports the patches from there, + instead of the tip of the packaging branch. + 2. If gbp detects that the current (or exported) branch is a + development/patch-queue branch it automatically enables patch-export + and exports packaging files from the base branch (instead of the + development/patch-queue branch. + Also, add a new '--git-patch-export-rev' command line option with which + the user can explicitly set the treeish from which patches are generated + (i.e. HEAD..) + - fix spec file vcs update when doing git-tag-only + - change default export directory. + So that the git working directory is not dirtied, by default. + * Changes ti git-import-orig-rpm: + - disable --merge by default. + Merging upstream to packaging does not usually make sense with rpm + packages. Merely, it can create confusion cause unapplicable patches to + be generated. + - implement --create-missing-branches option. + - implement automatic downloading. + Automatically try to download the archive if the archive filename points + to a remote location. + - get archive from spec file. + Try to get archive path/filename from spec file if no file name is given + on the command line. This should make version bumps more + straightforward: just update version number in the spec file and run + 'git-import-orig-rpm'. + * git-import-orig: implement --create-missing-branches option. + * Fixes and new features in GitRepository class + - GitRepository/get_commit_info: correctly handle file renames/copies. + - GitRepository.create_branch: add 'force' option + - GitRepository.archive: add 'paths' option + - GitRepository/git_command: strip stderr output in case of error. + - GitRepository/add_remote_repo: fix the 'tags' argument. + - GitRepository: allow interactive commands. + - GitRepository: fix the 'edit' mode in commit methods. + - GitRepository.commit_files: add committer_info and edit arguments. + - GitRepository.commit_all: add committer_info argument. + * Fixes and new features in internal rpm/spec support + - rpm.SpecFile: add 'specpath' attribute + - rpm: correctly handle 'NoSource' and 'NoPatch' tags in spec. + - rpm: Implement spec_from_repo() helper function + - rpm: track all "section" directives in spec file. + - rpm: add gbp.rpm.changelog module. + This new module contains basic containers and functionality for parsing + and updating rpm changelogs. It is coupled with the rpm packaging policy + class which now has definitions for rpm changelog formatting. + - rpm: support for getting/setting changelog in spec. + SpecFile objects now support reading and writing of the %changelog + section. + - rpm: store full path of spec file sources + + [ Łukasz Stelmach ] + * Fix detection of prefix directory in tarballs + gbs import failed to import tarball properly if paths in the archive + were prefixed with "./". + + -- Markus Lehtonen Thu, 27 Feb 2014 11:30:59 +0200 + git-buildpackage (0.6.8) unstable; urgency=medium * [f5718b8] No need to document --verbose. Properly document --help @@ -636,6 +904,25 @@ git-buildpackage (0.6.7) unstable; urgency=low -- Guido Günther Fri, 29 Nov 2013 20:52:03 +0100 +git-buildpackage (0.6.6-tizen20131202) unstable; urgency=low + + * Rebase on top of upstream version 0.6.6 + * UpstreamSource: fix archive prefix guessing + * pq-rpm: create diff up to the last merge commit + Before, merge commits broke patch generation, or, (most probably) caused + the generated patches to be unapplicable (similar to how + git-format-patch behaves). + Now, changes up to the last merge commit are squashed into one diff if + merge commits are found in the revision list from which patches are to + be generated. Individual patches (one per commit) are generated from the + last merge commit up to the exported revision. + * pq-rpm: magic word HEAD for patch-squashing + When given as the squash-point, 'HEAD' translates to the end-commit. + This allows one to configure gbp to always squash all commits into one + monolithic diff. + + -- Markus Lehtonen Mon, 02 Dec 2013 12:55:14 +0200 + git-buildpackage (0.6.6) unstable; urgency=low [ Guan Junchun ] @@ -772,6 +1059,75 @@ git-buildpackage (0.6.4) unstable; urgency=low -- Guido Günther Sun, 06 Oct 2013 17:35:14 +0200 +git-buildpackage (0.6.3-tizen20131017) unstable; urgency=low + + [ Lingchaox Xin ] + * Fix `fatal: fetch --all does not take a repository argument` + + [ Markus Lehtonen ] + * UpstreamSource.guess_version: don't check for directories + * UpstreamSource: move version guessing logic to PkgPolicy + * rpm: UpstreamSource: move version guessing logic to PkgPolicy + * UpstreamSource: store the absolute path + * UpstreamSource: check that the sources exist + * UpstreamSource: implement prefix guessing + * rpm: UpstreamSource: implement prefix guessing + * UpstreamSource.pack: reuse unpacked dir if no filter is used + * UpstreamSource.unpack: return UpstreamSource instance + * import orig: import readline in the common module + * rpm: import orig: import readline in the common module + * buildpackage: fix handling of empty tarball prefix + * import-orig: new function for filtering/re-packing sources + * import-orig: rework sourcecode preparation and filtering + * tests.testutils: add ls_zip() helper + * rpm tests: test --filter option of git-import-srpm + * ComponentTestBase: more verbose output in case of branch mismatch + * import-orig-rpm: don't crash on invalid gbp.conf + * import-orig-rpm: remove unneeded try-except + * config: restore mangled env in doctests + * ComponentTestBase: fix env restore in teardown + * ComponentTestBase: keep tmpdir if GBP_TESTS_NOCLEAN is in env + * tests: add tests for git-import-orig-rpm + * ComponentTestBase: use eq_() ok_() from nose.tools + * rpm tests: slight modificatin of import-srpm tests + * Version bump, rebase on top of 0.6.3 + * docs: minor sgml syntax fix + * Disable cover-min-percentage setting of nose + * debian packaging: change to 1.0 source package format + * git: new class and method for remote repositories + * GitRepository.has_remote_repo: use get_remotes method + * GitRepository: deprecate the get_remote_repos method + * pq-rpm: drop unused 'tries' option + * common/pq: support more flexible pq-branch naming + * GitRepository.list_tree: add 'paths' option + * rpm: change guess_spec() to return SpecFile object + * rpm: minor docstring fixes + * rpm.SpecFile: support parsing spec as raw text data + * rpm: support guessing spec file from git treeish + * pq: don't overwrite patches in the same series + * pq-rpm: don't overwrite patches in the same series + * tests: use sanitize some submodule tests + * tests: use eq_ and ok_ from nose tools in submodule tests + * tests: add some docstrings to git submodule tests + * tests: upper case global variables in submodule tests + * tests: remove unused import in submodule tests + * buildpackage: refactor prefix handling + * common/pq: fix length of patch filenames + * pq: fix generation of series file + * pq.write_patch_file: drop unused argument + * pq: pass commit_info to format_patch() + * SpecFile.set_tag: change the default place for new tags + * buildpackage: support 'commit' variable for VCS tags + + [ Zhang Qiang ] + * remove 'repo' param to adapt to changes of write_patch_file + + [ Guan Junchun ] + * Fix gbs clone failure if repo url is like host:repo.git + * Clone a repository to a specific directory + + -- Markus Lehtonen Thu, 17 Oct 2013 13:45:10 +0300 + git-buildpackage (0.6.3) unstable; urgency=low * [60ffe95] --git-hooks doesn't take an argument @@ -908,6 +1264,27 @@ git-buildpackage (0.6.0) unstable; urgency=low -- Guido Günther Wed, 26 Jun 2013 16:36:41 +0200 +git-buildpackage (0.6.0git20130530-tizen20130822) unstable; urgency=low + + [ Markus Lehtonen ] + * GitRepository/add_files: consistently update all tracked files + * rpm: enhance spec parsing error messages + * rpm: simplify SpecFile._parse_filtered_spec() + * rpm: implement a wrapper module for rpm lib + * rpm: print a warning if importing gbp-specific librpm fails + * rpm: Improve logging in case of rpmlib error + + [ Lingchaox Xin ] + * GitRepository: Add force option to push method + * GitRepository: Add all option to pull method + * GitRepository: Add refspec option to fetch method + * Fix pylint warnings, follow Markus' sugesstion + * Add tags option in git push command + * Add all_remotes option in git fetch command + * Add clean method to GitRepository class + + -- Markus Lehtonen Thu, 22 Aug 2013 09:47:51 +0300 + git-buildpackage (0.6.0~git20130530) unstable; urgency=low [ Guido Günther ] diff --git a/debian/control b/debian/control index 94710914..6e07e8ad 100644 --- a/debian/control +++ b/debian/control @@ -12,47 +12,45 @@ Build-Depends: perl, pychecker, python (>> 2.6.6-3~), - python-coverage, - python-dateutil, python-epydoc, python-mock, python-nose, - python-nosexcover, python-pkg-resources, python-rpm, python-six, python-setuptools, sgml2x, -# For the testsuite - bzip2, - cpio, - devscripts (>= 2.13.8~), - git (>= 1:1.7.9.1-1~), - pristine-tar, - rpm, - unzip, - zipmerge Standards-Version: 3.9.6 Vcs-Git: git://honk.sigxcpu.org/git/git-buildpackage.git Vcs-Browser: https://git.sigxcpu.org/cgit/git-buildpackage/ Homepage: https://honk.sigxcpu.org/piki/projects/git-buildpackage/ X-Python-Version: >= 2.6 -Package: git-buildpackage + +Package: git-buildpackage-common Architecture: all Depends: ${python:Depends}, ${shlibs:Depends}, ${misc:Depends}, - devscripts (>= 2.13.5~), git (>= 1:1.7.9.1-1~), man-db, - python-dateutil, python-pkg-resources, python-six, Recommends: pristine-tar (>= 0.5), - cowbuilder | pbuilder | sbuild, python-requests -Suggests: python-notify, unzip, sudo +Suggests: python-notify, unzip +Description: Suite to help with packaging in Git repositories + This package contains the common API and scripts for Debian and rpm packaging + +Package: git-buildpackage +Architecture: all +Depends: ${python:Depends}, + ${shlibs:Depends}, + ${misc:Depends}, + devscripts, + git-buildpackage-common (= ${binary:Version}) +Recommends: cowbuilder | pbuilder | sbuild, +Suggests: sudo Description: Suite to help with Debian packages in Git repositories This package contains the following tools: * gbp import-{dsc,dscs}: import existing Debian source packages into a git @@ -70,11 +68,10 @@ Architecture: all Depends: ${python:Depends}, ${misc:Depends}, cpio, - git-buildpackage (= ${binary:Version}), + git-buildpackage-common (= ${binary:Version}), python-rpm, rpm, -Recommends: pristine-tar (>= 0.5) -Suggests: python-notify, unzip, zipmerge, mock +Suggests: zipmerge, mock Description: Suite to help with RPM packages in Git repositories This package contains the following tools: * gbp buildpackage-rpm: build a package out of a git repository, check for @@ -82,6 +79,20 @@ Description: Suite to help with RPM packages in Git repositories * gbp import-srpm: import existing RPM source packages into a git repository * gbp pq-rpm: manage patches easily + * git-import-orig-rpm: import a new upstream version into the git repository . These tools are currently in an experimental state. Branch names and repository layouts might change without lots of prior warning. + +Package: git-buildpackage-bb +Architecture: all +Depends: ${python:Depends}, + ${misc:Depends}, + git-buildpackage-common (= ${binary:Version}), + git-buildpackage-rpm (= ${binary:Version}), +Recommends: bitbake +Description: Suite to help with BitBake builds from Git repositories + This package contains the following tools: + * gbp import-bb: import sources from distribution repository + * gbp buildpackage-bb: build a package out of a Git repository + * gbp pq-bb: manage separate development and packaging branches diff --git a/debian/git-buildpackage-bb.install b/debian/git-buildpackage-bb.install new file mode 100644 index 00000000..39b39afa --- /dev/null +++ b/debian/git-buildpackage-bb.install @@ -0,0 +1,2 @@ +usr/lib/python2.?/dist-packages/gbp/bb/ +usr/lib/python2.?/dist-packages/gbp/scripts/*bb*.py* diff --git a/debian/doc-base b/debian/git-buildpackage-common.doc-base similarity index 69% rename from debian/doc-base rename to debian/git-buildpackage-common.doc-base index a7d60d01..b377b272 100644 --- a/debian/doc-base +++ b/debian/git-buildpackage-common.doc-base @@ -7,5 +7,5 @@ Abstract: git-buildpackage is a suite to help with Debian packages in Git Section: Programming Format: HTML -Index: /usr/share/doc/git-buildpackage/manual-html/index.html -Files: /usr/share/doc/git-buildpackage/manual-html/*.html +Index: /usr/share/doc/git-buildpackage-common/manual-html/index.html +Files: /usr/share/doc/git-buildpackage-common/manual-html/*.html diff --git a/debian/docs b/debian/git-buildpackage-common.docs similarity index 100% rename from debian/docs rename to debian/git-buildpackage-common.docs diff --git a/debian/examples b/debian/git-buildpackage-common.examples similarity index 100% rename from debian/examples rename to debian/git-buildpackage-common.examples diff --git a/debian/git-buildpackage-common.install b/debian/git-buildpackage-common.install new file mode 100644 index 00000000..b2d6bf65 --- /dev/null +++ b/debian/git-buildpackage-common.install @@ -0,0 +1,22 @@ +usr/bin/gbp +usr/lib/python2.?/dist-packages/gbp-* +usr/lib/python2.?/dist-packages/gbp/command_wrappers.py +usr/lib/python2.?/dist-packages/gbp/config.py +usr/lib/python2.?/dist-packages/gbp/errors.py +usr/lib/python2.?/dist-packages/gbp/format.py +usr/lib/python2.?/dist-packages/gbp/git/ +usr/lib/python2.?/dist-packages/gbp/__init__.py +usr/lib/python2.?/dist-packages/gbp/log.py +usr/lib/python2.?/dist-packages/gbp/notifications.py +usr/lib/python2.?/dist-packages/gbp/patch_series.py +usr/lib/python2.?/dist-packages/gbp/pkg/ +usr/lib/python2.?/dist-packages/gbp/scripts/clone.py +usr/lib/python2.?/dist-packages/gbp/scripts/common/ +usr/lib/python2.?/dist-packages/gbp/scripts/config.py +usr/lib/python2.?/dist-packages/gbp/scripts/__init__.py +usr/lib/python2.?/dist-packages/gbp/scripts/pull.py +usr/lib/python2.?/dist-packages/gbp/scripts/supercommand.py +usr/lib/python2.?/dist-packages/gbp/tmpfile.py +usr/lib/python2.?/dist-packages/gbp/tristate.py +usr/lib/python2.?/dist-packages/gbp/version.py +etc/git-buildpackage/gbp.conf diff --git a/debian/git-buildpackage-common.links b/debian/git-buildpackage-common.links new file mode 100644 index 00000000..e74ac0b9 --- /dev/null +++ b/debian/git-buildpackage-common.links @@ -0,0 +1 @@ +/usr/share/doc/git-buildpackage-common/manual-html/gbp.html /usr/share/doc/git-buildpackage-common/manual-html/index.html diff --git a/debian/git-buildpackage-common.manpages b/debian/git-buildpackage-common.manpages new file mode 100644 index 00000000..91ebab67 --- /dev/null +++ b/debian/git-buildpackage-common.manpages @@ -0,0 +1,5 @@ +docs/gbp.1 +docs/gbp-clone.1 +docs/gbp.conf.5 +docs/gbp-config.1 +docs/gbp-pull.1 diff --git a/debian/git-buildpackage-rpm.install b/debian/git-buildpackage-rpm.install index 54d7f958..5c9d5392 100644 --- a/debian/git-buildpackage-rpm.install +++ b/debian/git-buildpackage-rpm.install @@ -1,5 +1,3 @@ usr/bin/gbp-builder-mock /usr/share/git-buildpackage/ usr/lib/python2.?/dist-packages/gbp/rpm/ -usr/lib/python2.7/dist-packages/gbp/scripts/import_srpm.py -usr/lib/python2.7/dist-packages/gbp/scripts/pq_rpm.py -usr/lib/python2.7/dist-packages/gbp/scripts/buildpackage_rpm.py +usr/lib/python2.?/dist-packages/gbp/scripts/*rpm*.py* diff --git a/debian/git-buildpackage.install b/debian/git-buildpackage.install index 1cf7c4ca..73c25944 100644 --- a/debian/git-buildpackage.install +++ b/debian/git-buildpackage.install @@ -1,32 +1,10 @@ -usr/bin/gbp usr/bin/git-pbuilder -usr/lib/python2.?/dist-packages/gbp-* -usr/lib/python2.?/dist-packages/gbp/command_wrappers.py -usr/lib/python2.?/dist-packages/gbp/config.py usr/lib/python2.?/dist-packages/gbp/dch.py usr/lib/python2.?/dist-packages/gbp/deb/ -usr/lib/python2.?/dist-packages/gbp/errors.py -usr/lib/python2.?/dist-packages/gbp/format.py -usr/lib/python2.?/dist-packages/gbp/git/ -usr/lib/python2.?/dist-packages/gbp/__init__.py -usr/lib/python2.?/dist-packages/gbp/log.py -usr/lib/python2.?/dist-packages/gbp/notifications.py -usr/lib/python2.?/dist-packages/gbp/patch_series.py -usr/lib/python2.?/dist-packages/gbp/pkg/ usr/lib/python2.?/dist-packages/gbp/scripts/buildpackage.py -usr/lib/python2.?/dist-packages/gbp/scripts/clone.py -usr/lib/python2.?/dist-packages/gbp/scripts/common/ -usr/lib/python2.?/dist-packages/gbp/scripts/config.py usr/lib/python2.?/dist-packages/gbp/scripts/create_remote_repo.py usr/lib/python2.?/dist-packages/gbp/scripts/dch.py usr/lib/python2.?/dist-packages/gbp/scripts/import_dsc.py usr/lib/python2.?/dist-packages/gbp/scripts/import_dscs.py usr/lib/python2.?/dist-packages/gbp/scripts/import_orig.py -usr/lib/python2.?/dist-packages/gbp/scripts/__init__.py usr/lib/python2.?/dist-packages/gbp/scripts/pq.py -usr/lib/python2.?/dist-packages/gbp/scripts/pull.py -usr/lib/python2.?/dist-packages/gbp/scripts/supercommand.py -usr/lib/python2.?/dist-packages/gbp/tmpfile.py -usr/lib/python2.?/dist-packages/gbp/tristate.py -usr/lib/python2.?/dist-packages/gbp/version.py -etc/git-buildpackage/gbp.conf diff --git a/debian/git-buildpackage.manpages b/debian/git-buildpackage.manpages index 9dd9cc36..fc937453 100644 --- a/debian/git-buildpackage.manpages +++ b/debian/git-buildpackage.manpages @@ -1,13 +1,8 @@ -docs/gbp.1 docs/gbp-buildpackage.1 -docs/gbp-clone.1 -docs/gbp.conf.5 -docs/gbp-config.1 docs/gbp-create-remote-repo.1 docs/gbp-dch.1 docs/gbp-import-dsc.1 docs/gbp-import-dscs.1 docs/gbp-import-orig.1 docs/gbp-pq.1 -docs/gbp-pull.1 docs/git-pbuilder.1 diff --git a/debian/manpages b/debian/manpages deleted file mode 100644 index 09c93b1d..00000000 --- a/debian/manpages +++ /dev/null @@ -1,2 +0,0 @@ -docs/*.1 -docs/*.5 diff --git a/debian/rules b/debian/rules index 405bb88d..f668f9aa 100755 --- a/debian/rules +++ b/debian/rules @@ -1,5 +1,9 @@ #!/usr/bin/make -f - + +# HACK: disable checks, i.e. unit tests +DEB_BUILD_OPTIONS += nocheck +export WITHOUT_NOSETESTS := 1 + EXAMPLE_SCRIPTS=\ gbp-add-patch \ gbp-cowbuilder-sid \ @@ -46,6 +50,11 @@ override_dh_auto_install: override_dh_auto_clean: dh_auto_clean + # Remove renamed files - renames are not correctly expressed in debian.diff + rm -f debian/doc-base + rm -f debian/docs + rm -f debian/examples + rm -f debian/manpages rm -rf build/ make -C docs/ clean -rm gbp/version.py diff --git a/debian/source/format b/debian/source/format index 89ae9db8..d3827e75 100644 --- a/debian/source/format +++ b/debian/source/format @@ -1 +1 @@ -3.0 (native) +1.0 diff --git a/debian/source/options b/debian/source/options new file mode 100644 index 00000000..6862544b --- /dev/null +++ b/debian/source/options @@ -0,0 +1,2 @@ +extend-diff-ignore = "^(tests/data/rpm/.*|tests/component/rpm/data/.*|tests/component/deb/data/.*)" +diff-ignore diff --git a/docs/Makefile b/docs/Makefile index e6a349f1..76a64fe6 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -15,6 +15,8 @@ MAN1S = \ gbp-buildpackage-rpm \ gbp-import-srpm \ gbp-pq-rpm \ + gbp-rpm-ch \ + gbp-import-orig-rpm \ $(NULL) MAN5S = gbp.conf @@ -34,9 +36,11 @@ GTK_DOC_CATALOG_FILE ?= /usr/share/sgml/gtk-doc/gtk-doc.cat all: $(MANUAL) $(MANPAGES) -$(MANUAL): manual.sgml chapters/*.sgml manpages/*.sgml +$(MANUAL): main.sgml manual.sgml manual-rpm.sgml chapters/*.sgml manpages/*.sgml ifeq ($(HAVE_SGML2X),1) docbook-2-html -s local $< + rm -rf $(MANUAL) + mv main-html $(MANUAL) else docbook2html -d local-nosgml2x.dsl -c $(GTK_DOC_CATALOG_FILE) -o $(MANUAL) $< endif @@ -58,7 +62,7 @@ manpage.refs: $(BUILD_MAN_XREF_PAGES) git-pbuilder.1: ../bin/git-pbuilder pod2man $< $@ -manual.sgml: $(VERSION_ENT) +main.sgml: $(VERSION_ENT) $(VERSION_ENT): $(GBP_VERSION) echo '' > $(VERSION_ENT) diff --git a/docs/chapters/building-rpm.sgml b/docs/chapters/building-rpm.sgml new file mode 100644 index 00000000..866cd3f9 --- /dev/null +++ b/docs/chapters/building-rpm.sgml @@ -0,0 +1,132 @@ + + Building RPM packages from the &git; repository + + The &gbp-buildpackage-rpm; tool is used for building. + The tool creates the source tarball and copies it and the packaging files + to a separate build directory and builds the package there. By default, + &rpmbuild; is used as the builder command. You can define a different + builder command with the option. + + + + To build the (non-native) package when on packaging-branch, + using pristine-tar to create upstream tarball: + + $ &gbp-buildpackage-rpm; --pristine-tar + + During development, if you have unclean git tree (untracked files and/or + uncommitted changes) you may use: + + $ &gbp-buildpackage-rpm; --git-ignore-untracked + + or: + + $ &gbp-buildpackage-rpm; --git-ignore-new + + + + + Git-buildpackage-rpm always builds in a separate build directory + (./rpmbuild/ by default). You may change that and also + build a different revision that your current branch HEAD. The revision can + be any git "commit-ish", i.e. branch or tag name or a commit sha1. + Git-buildpackage also supports some "special revisions", i.e. + INDEX refer to the current index, + WC or WC.IGNORED refer to the current working + copy with all (even untracked and ignored) files, + WC.TRACKED refers to the current working copy of the files + tracked by git, + WC.UNTRACKED refers to the current working copy of all files + excluding ignore files. + Some examples: + + $ &gbp-buildpackage-rpm; --git-export-dir=/home/user/rpmbuild + $ &gbp-buildpackage-rpm; --git-export-dir=/home/user/rpmbuild --git-export=v1.2.3 + $ &gbp-buildpackage-rpm; --git-export=WC.UNTRACKED + $ &gbp-buildpackage-rpm; --git-export=INDEX + $ &gbp-buildpackage-rpm; --git-export=feature/cool-new-thing + $ &gbp-buildpackage-rpm; --git-export=8d55173610f + + + + + Automatic patch generation + + When developing a non-native package with packaging and sources + in the same branch + (see ) + you usually want for &gbp-buildpackage-rpm; to automatically generate + patches. In this mode, &gbp-buildpackage-rpm; generates the upstream + tarball and copies packaging files to the build dir. After that it + generates patches from commits between + + + + Working with separate development branch + + When developing a non-native package with packaging data and source + code in separate branches + (see ) + you use the &gbp-pq-rpm; tool to handle the patches. You work on the + source code on the development branch and then export + the patches to the packaging branch when building the RPM package. + + + + Create a development (or patch-queue) branch + by applying the patches in current packaging branch on top of the + upstream version. This will create a new branch, e.g. + development/master assuming your current branch is + master. Simply: + + $ &gbp-pq-rpm; import + + Now you can develop normally on the development branch (add, remove, + rebase, amend commits). Just make sure you stay based on the correct + upstream version, if doing git-rebase. After you're happy with your + changes and you're ready to build an RPM package, you have to + export the patches with &gbp-pq-rpm;. This will change + back to you packaging branch, generate patches from commits between + between upstream and the HEAD of the development branch + and update the spec file with the new patches: + + $ &gbp-pq-rpm; export + + Commit the changes to packaging branch, and build. For example: + + $ git add *patch *spec + $ git commit -a + $ &gbp-buildpackage-rpm; + + Of course you can build even without committing by using the + + + + Moving to a new upstream version is basically simple. Assuming you + have imported/pulled new upstream version to your git-tree, just: + + $ git checkout master + # Edit the spec file and change the 'Version:' tag to new upstream version + $ vim *spec + $ git commit *spec + $ &gbp-pq-rpm; rebase + + However, if the patches do not apply cleanly, you have to manually + apply and resolve the patches. + + + + diff --git a/docs/chapters/chapters-rpm.ent b/docs/chapters/chapters-rpm.ent new file mode 100644 index 00000000..165e0c02 --- /dev/null +++ b/docs/chapters/chapters-rpm.ent @@ -0,0 +1,3 @@ + + + diff --git a/docs/chapters/development-rpm.sgml b/docs/chapters/development-rpm.sgml new file mode 100644 index 00000000..1ae51251 --- /dev/null +++ b/docs/chapters/development-rpm.sgml @@ -0,0 +1,146 @@ + + Development flow + + + Development models + + The &gbp-buildpackage-rpm; toolset basically supports three different + models of package maintenance. + + + + Native package + + This means that you are the upstream, there is no separate + upstream with which you have to sync. Basically, only + + + + + Upstream package, alternative 1: packaging and sources in + the same branch + + This represents somewhat Debian-style package maintenance. + All changes (packaging and source code) are done to the same + branch, i.e., the + + + + Upstream package, alternative 2: packaging and sources in + separate branches + + In this model packaging files (spec and patches) are held in + + + + + + Starting from scratch with a non-native package + + In this case, you most probably want to package software not yet + found in your distro. First, create an empty repository: + + $ mkdir mypackage + $ cd mypackage + $ git init + + Then, import the upstream sources, create the packaging/development + branch and add the rpm packaging files. You have two choices: + + + + + packaging files and development sources in the same branch + + $ git-import-orig-rpm ../mypackage.tar.gz + # Optionally (recommended): add gbp.conf + $ vim .gbp.conf && git add .gbp.conf && git commit -m"Add gbp.conf" + # Add packaging files to source tree under subdir 'packaging' + $ mkdir packaging && cd packaging + $ vim mypackage.spec + $ git add . + $ git commit -m"Add packaging files" + + + + + development sources and packaging files in separate branches + + $ git-import-orig-rpm --no-merge ../mypackage.tar.gz + # Optionally (recommended): add gbp.conf + $ vim .gbp.conf && git add .gbp.conf && git commit -m"Add gbp.conf" + # Add packaging files (to root of master branch) + $ vim mypackage.spec + $ git add . + $ git commit -m"Add packaging files" + + + + + + + Converting an existing git repository of a non-native package</> + <para> + In this case, you already have a git repository containing the + upstream source, but it was created neither with &gbp-clone; nor + &gbp-import-srpm;. + You need to have a separate branch for upstream sources. + If you already have that, you can simply rename that branch to the + default upstream-branch: + <screen> + $ <command>git branch</> -m my-old-upstream-branch upstream + </screen> + OR just add the name of your upstream branch to gbp.conf. + Then, you just create a packaging/development branch(es) with git and + add packaging files to the packaging branch. If you want to maintain + sources and packaging in the same branch + (<xref linkend="gbp.rpm.development.models.nonnative1">) + do something like: + <screen> + $ <command>git checkout</> -b master upstream + # Optionally (recommended): add gbp.conf + $ <command>vim</> .gbp.conf && <command>git add</> .gbp.conf && <command>git commit</> -m"Add gbp.conf" + # Add packaging files to source tree, add and commit the packaging files + # ... + </screen> + If you want to maintain development sources and packaging in separate + branches + (<xref linkend="gbp.rpm.development.models.nonnative2">): + <screen> + $ <command>git checkout</> --orphan master + $ <command>rm</> .git/index + $ <command>git commit</> --allow-empty -m"Create packaging branch" + # Optionally (recommended): add gbp.conf + $ <command>vim</> .gbp.conf && <command>git add</> .gbp.conf && <command>git commit</> -m"Add gbp.conf" + # Next, add and commit the packaging files (.spec etc) + $ <command>vim</> mypackage.spec && <command>git add</> mypackage.spec && <command>git commit</> -m"Add packaging files" + # Now, you can create the development branch (and import possible patches) + $ &gbp-pq-rpm; import + </screen> + </para> + </sect1> +</chapter> + diff --git a/docs/chapters/intro-rpm.sgml b/docs/chapters/intro-rpm.sgml new file mode 100644 index 00000000..8cc04719 --- /dev/null +++ b/docs/chapters/intro-rpm.sgml @@ -0,0 +1,104 @@ +<chapter id="gbp.rpm.intro"> + <title>Introduction + + + Git-buildpackage is a Debian + toolset for maintaining and building packages in/from git repositories. + The still experimental RPM variants of the tool, extend the support + from Debian-only to building and maintaining RPM packages, too. + The documentation of git-buildpackage-rpm here reflects the the + Debian git-buildpackage documentation + + + + + The RPM versions of the tools can in their current state + do basically all the same tasks as the Debian versions, except for changelog generation. + However, the philosophy is somewhat different in some parts. The RPM tools + read the .spec file instead of changelog in determining packaging + information (version number, name etc). Another clear difference is + that &gbp-buildpackage-rpm; will always build in a separate build + directory whereas &gbp-buildpackage; (the Debian) tool builds in the + git working dir, by default. + Third, conceptual, difference (for non-native packages) is that you may + have packaging files in an orphan branch, without development sources: + i.e. you develop code in 'patch-queue' branch that doesn't contain any + packaging files, and, do 'pq-rpm export' to 'packaging' branch that only + contains packaging files (.spec file, patches etc.) but no sources. + The Debian/RPM tool equivalence is: + + &gbp-buildpackage; -> RPM: &gbp-buildpackage-rpm; + &gbp-import-dsc; -> RPM: &gbp-import-srpm; + &gbp-import-orig; -> RPM: &gbp-import-orig-rpm; + &gbp-pq; -> RPM: &gbp-pq-rpm; + &gbp-clone; -> RPM: &gbp-clone; (the same tool) + &gbp-pull; -> RPM: &gbp-pull; (the same tool) + &gbp-dch; -> RPM: not available + &gbp-import-dscs; -> RPM: not available + + + + + RPM Repository Layout + + + The required repository layout is similar to Debian: basically the only + requirement is that non-native packages must have clean upstream sources + in a separate branch. Other branches are: + + + + + + + + + + + + + + + + + + + RPM Workflow + + + The basic workflow is very similar to Debian: + + + + + Import a package via &gbp-import-srpm; OR clone from the + distro git with &gbp-clone; if the package is already maintained with + &gbp-buildpackage-rpm;. + + + + Develop, test, commit changes. + Once satisfied you can build the final package with + &gbp-buildpackage-rpm; (optionally with --git-tag to create a tag in + git) and push it to git server. + + + + + diff --git a/docs/common.ent b/docs/common.ent index 6a25178e..34c4b720 100644 --- a/docs/common.ent +++ b/docs/common.ent @@ -33,6 +33,8 @@ gbp buildpackage-rpm"> gbp import-srpm"> gbp pq-rpm"> + gbp import-orig-rpm"> + gbp rpm-ch"> rpmbuild"> gbp-builder-mock"> wget"> diff --git a/docs/main.sgml b/docs/main.sgml new file mode 100644 index 00000000..bd1d97d7 --- /dev/null +++ b/docs/main.sgml @@ -0,0 +1,27 @@ + + %COMMON; + + %VERSION; + + %MANPAGES; + + %CHAPTERS; + + %CHAPTERS.RPM; + + + +]> + + + + Building Packages with git-buildpackage suite + Version: &gbp-version; + + + &book.debian; + &book.rpm; + + diff --git a/docs/man.gbp-import-orig-rpm.sgml b/docs/man.gbp-import-orig-rpm.sgml new file mode 100644 index 00000000..621b11da --- /dev/null +++ b/docs/man.gbp-import-orig-rpm.sgml @@ -0,0 +1,11 @@ + + %COMMON; + + %MANPAGES; +]> + + +git-buildpackage-rpm Manual +&man.gbp.import.orig.rpm; + diff --git a/docs/man.gbp-rpm-ch.sgml b/docs/man.gbp-rpm-ch.sgml new file mode 100644 index 00000000..63fed8d0 --- /dev/null +++ b/docs/man.gbp-rpm-ch.sgml @@ -0,0 +1,11 @@ + + %COMMON; + + %MANPAGES; +]> + + +git-buildpackage-rpm Manual +&man.gbp.rpm.ch; + diff --git a/docs/manpages/gbp-buildpackage-rpm.sgml b/docs/manpages/gbp-buildpackage-rpm.sgml index b7138c8e..de3bfee1 100644 --- a/docs/manpages/gbp-buildpackage-rpm.sgml +++ b/docs/manpages/gbp-buildpackage-rpm.sgml @@ -19,6 +19,7 @@ &gbp-buildpackage-rpm; + [auto|on|off] @@ -29,6 +30,7 @@ =[auto|on|off] TREEISH BRANCH_NAME + BRANCH_NAME BUILD_CMD @@ -52,6 +54,7 @@ TREEISH DIRECTORY FILEPATH + PREFIX =DIRECTORY =DIRECTORY @@ -63,6 +66,13 @@ =ARCHITECTURE =OPTIONS =ROOT + + TREEISH + + THRESHOLD + REGEX + COMMITISH + =TAG_FORMAT @@ -142,6 +152,15 @@ + + + + + + Don't abort if there are untracked files in the source tree. + + + @@ -266,6 +285,28 @@ + + =BRANCH_NAME + + + + Name (format string) of the patch-queue/development branch. This + makes building easier when working with separate packaging and + development branches. + + + If is enabled and + &gbp-buildpackage-rpm; detects that the current branch has a + patch-queue/development branch it exports the patches from there + instead of the tip of the current branch (unless + is defined, of course). + Similarly, if the current branch is a patch-queue/development branch + &gbp-buildpackage-rpm; will automatically enable patch-export and + export packaging files from the packaging branch instead of the + current branch (unless ) is defined. + + + @@ -457,8 +498,12 @@ Instead of exporting the current branch head, export the treeish object TREEISH. The special name INDEX exports the current index, - WC) exports all files in the - current working directory. + WC.TRACKED exports all files tracked by + Git in the current working copy as is, + WC.UNTRACKED exports all untracked files + too whereas WC (or + WC.IGNORED) exports all files in the + current working directory, even ignored files. @@ -522,6 +567,17 @@ + + PREFIX + + + + Prefix (directory) to be used when generating tarballs. Special value + auto causes &gbp-buildpackage-rpm; to + guess the prefix. + + + @@ -610,6 +666,97 @@ + + + + + + Create patches from the commits between the upstream version and + export-treeish. That is, after exporting packaging files (from the + pacakging directory) &gbp-buildpackage-rpm; creates one patch per + commit (similar to git-format-patch) and updates the spec file in the + export dir. You use to + specify the tip commit of the patch series. + + + + + TREEISH + + + + Use TREEISH as the tip commit of the patch + series instead of the default - i.e. treeish from which the packaging + files are exported (which is defined with + . + + + + + + + + + Whether the patch files should start with a number or not. + + + + + THRESHOLD + + + + Compress (auto-generated) patches larger than given + THRESHOLD bytes. Special value 0 disabled + patch compression. + + + + + REGEX + + + + Exclude changes to path(s) matching REGEX + in patch generation. + + + + + COMMITISH + + + + Squash commits up to the given COMMITISH + into one monolitic diff. Could be used if one wants to squash commits + from an upstream release up to a stable update into a single diff + (commits on top of the stable would generate one patch per commit as + usual). The format is '<commit_ish>[:<filename_base>]', + i.e. commitish optionally followed by a colon and the desired + filename base for the diff (suffix '.diff' is automatically added by + &gbp-buildpackage-rpm;). Magic word 'HEAD' translates to the + patch-export-treeish when given as the squash-point. This allows one + to configure gbp to always squash all commits into one monolithic + diff. + + + + + =TAG_FORMAT + + + + &gbp-buildpackage-rpm; always automatically sets/updates the 'VCS:' + tag in the spec file after exporting. This option defines the format + string for the 'VCS:' tag. An empty value causes no 'VCS:' tag to be + inserted and possible old 'VCS:' tag to be removed. Otherwise, the + old 'VCS:' tag is updated or a new 'VCS:' tag is added if one does + not exist. In the format string '%(tagname)s' expands to the long tag + name (from git-describe) and '%(commit)s' expans to the sha1 of the + exported commit. + + + @@ -625,7 +772,7 @@ changes included. - &gbp-buildpackage-rpm; --git-ignore-branch --git-export=WC + &gbp-buildpackage-rpm; --git-ignore-branch --git-export=WC.UNTRACKED @@ -639,6 +786,8 @@ , , + , + , rpmbuild 8 diff --git a/docs/manpages/gbp-buildpackage.sgml b/docs/manpages/gbp-buildpackage.sgml index 29c97d01..b1dac697 100644 --- a/docs/manpages/gbp-buildpackage.sgml +++ b/docs/manpages/gbp-buildpackage.sgml @@ -19,6 +19,7 @@ &gbp-buildpackage; + [auto|on|off] @@ -42,6 +43,7 @@ COMMAND COMMAND COMMAND + tag-format tag-format @@ -150,6 +152,16 @@ + + + + + + Don't abort if there are untracked files in the source tree. + Uncommitted changes to tracked files cause an error normally. + + + @@ -419,6 +431,16 @@ + + + + + + Enable builder. Note: causes the + postbuild hook to be disabled, too. + + + @@ -508,9 +530,13 @@ Instead of exporting the current branch head, export the treeish object TREEISH. The special name - INDEX exports the current index whereas - the special name WC exports the current - working copy as is. + INDEX exports the current index, + WC.TRACKED exports all files tracked by + Git in the current working copy as is, + WC.UNTRACKED exports all untracked files + too whereas WC (or + WC.IGNORED) exports all files in the + current working directory, even ignored files. diff --git a/docs/manpages/gbp-clone.sgml b/docs/manpages/gbp-clone.sgml index 8c3cbdb6..1efbca5c 100644 --- a/docs/manpages/gbp-clone.sgml +++ b/docs/manpages/gbp-clone.sgml @@ -24,7 +24,10 @@ &man.common.options.synopsis; - branch_name + + branch_name + branch_name + branch_name depth repository @@ -62,6 +65,14 @@ developed on, default is master. + + =branch_name + + + The branch the packaging is being maintained on. + Alternative to the --debian-branch option. + + =branch_name diff --git a/docs/manpages/gbp-import-orig-rpm.sgml b/docs/manpages/gbp-import-orig-rpm.sgml new file mode 100644 index 00000000..8e2635be --- /dev/null +++ b/docs/manpages/gbp-import-orig-rpm.sgml @@ -0,0 +1,324 @@ + + +
+ &rpm-email; +
+ + &rpm-firstname; + &rpm-surname; + +
+ + gbp-import-orig-rpm + &rpm-mansection; + + + git-import-orig-rpm + gbp-import-orig-rpm + Import an upstream source into a git repository. + + + + &gbp-import-orig-rpm; + + &man.common.options.synopsis; + =VENDOR + VERSION + + BRANCH-NAME + BRANCH-NAME + DIRECTORY + + TAG-NAME + + GPG-KEYID + TAG-FORMAT + PATTERN + + + + + + + + UPSTREAM-SOURCE + + + + + DESCRIPTION + + &gbp-import-orig-rpm; is an basically identical to the &gbp-import-orig; + tool, with only some rpm-specific functionality added and some + Debian-specific functionality removed. + + + &gbp-import-orig-rpm; imports UPSTREAM-SOURCE + into the &git; repository. UPSTREAM-SOURCE can + either be a gzip, bzip2, lzma or xz compressed tar archive, a zip archive + or an already unpacked source tree. If it is already of the form + package-name-version.tar.gz, the version + information is read from the tarball's filename otherwise it can be given + on the command line via . If the source + package name or version can't be determined &gbp-import-orig-rpm; will + prompt for it unless is given. + + + &gbp-import-orig-rpm; tries to download the archive from a remote server if + a remote URL is given. In addition, if no + UPSTREAM-SOURCE is given &gbp-import-orig-rpm; + takes the archive URI from the spec file - this makes it possible to import + a new upstream version just by bumping the version number in the spec file + and running &gbp-import-orig-rpm; (assuming that the spec file contains + a full URL for the archive and its filename automatically follows the + package version e.g. by using the %{version} macro, of course). + + + The sources are placed on the upstream branch (default: + upstream) and tagged. + + + + OPTIONS + + &man.common.options.description; + + + =VENDOR + + + + Distribution vendor name. + + + + + =VERSION + VERSION + + + The upstream version number. + + + + + + + + Merge the upstream branch to the packaging branch after import. + + + + + =BRANCH-NAME + + + + The branch in the Git repository the upstream sources are put + onto. Default is upstream. + + + + + =BRANCH-NAME + + + + The branch in the Git repository the package is being developed on, + default is master. After importing the new + sources on the upstream branch, &gbp-import-orig-rpm; will try to + merge the new version onto this branch. + + + + + DIRECTORY + + + + Subdirectory that contains the RPM packaging files. + &gbp-import-orig-rpm; uses this to try to find a spec file which, in + turn, is used to get the upstream source archive URI if one is not + specified on the command line. + + + + + + + + + Create missing upstream branch if it does not exist. + + + + + =TAG-NAME + + + + Add TAG-NAME as additional parent to the + commit of the upstream tarball. Useful when upstream uses git and you + want to link to it's revision history. + + + + + + + + + GPG sign all created tags. + + + + + GPG-KEYID + + + + Use this keyid for gpg signing tags. + + + + + TAG-FORMAT + + + + Use this tag format when tagging upstream versions, + default is upstream/%(version)s. + + + + + MSG-FORMAT + + + + Use this format string for the commit message when importing upstream + versions, default is + Imported Upstream version %(version)s. + + + + + PATTERN + + + + Filter out files glob-matching pattern. Can be given multiple times. + + + + + + + + + Generate pristine-tar delta file. + + + + + + + + + If using a filter also filter the files out of the tarball + passed to pristine-tar. + + + + + FILENAME + + + + Filename to record to pristine-tar. This does not alter the tarball + content, just the filename with which the tarball can be checked out + with pristine-tar. + + + + + PREFIX + + + + Prefix (directory) to be used when importing sources into + pristine-tar. Only takes effect when + is used. Special value auto causes &gbp-import-orig-rpm; to guess + the prefix when importing unpacked sources, or, not to change the + prefix when importing source archives. + + + + Using this option will alter the source archive that is imported to + pristine-tar! That is, pristine-tar does not produce and identical + copy of the original tarball (but the mangled tarball, instead). + + + + + + + + + Run CMD after the import. + + + + + + + + Run command interactively, i.e. ask package name and version if + needed. + + + + + + + EXAMPLES + + Download and import a new upstream version using the informantion from the + spec file + + + &gbp-import-orig-rpm; + + + After manually downloading an upstream import it + + + &gbp-import-orig-rpm; ../upstream-tarball-0.1.tar.gz + + + Import unpacked sources + + + &gbp-import-orig-rpm; --orig-prefix=upstream-0.1 ../upstream/ + + + + &man.gbp.config-files; + + + SEE ALSO + + , + , + , + , + &man.seealso.common; + + + + AUTHOR + + &rpm-username; &rpm-email; + + +
diff --git a/docs/manpages/gbp-import-orig.sgml b/docs/manpages/gbp-import-orig.sgml index 5049a9ed..4ac355ad 100644 --- a/docs/manpages/gbp-import-orig.sgml +++ b/docs/manpages/gbp-import-orig.sgml @@ -26,6 +26,7 @@ [merge|replace] branch_name branch_name + tag-format gpg-keyid @@ -131,6 +132,15 @@
+ + + + + + Create missing upstream branch if it does not exist. + + + =tag-format diff --git a/docs/manpages/gbp-import-srpm.sgml b/docs/manpages/gbp-import-srpm.sgml index daf2d75f..7677e210 100644 --- a/docs/manpages/gbp-import-srpm.sgml +++ b/docs/manpages/gbp-import-srpm.sgml @@ -27,6 +27,7 @@ TAG-FORMAT DIRECTORY + PATTERN GPG-KEYID @@ -139,6 +140,20 @@ + + + + + + Import patches to the packaging branch. That is, apply and commit all + patches (that are not marked for manual maintenance) into the + packaging branch after importing other packaging files. The patch + files are automatically removed from the packaging directory and the + spec file if all patches are successufully applied. This option is + ignored if is used. + + + PATTERN @@ -232,6 +247,8 @@ , , + , + , , &man.seealso.common; diff --git a/docs/manpages/gbp-pq-rpm.sgml b/docs/manpages/gbp-pq-rpm.sgml index 42c1e685..8d9f60ac 100644 --- a/docs/manpages/gbp-pq-rpm.sgml +++ b/docs/manpages/gbp-pq-rpm.sgml @@ -20,18 +20,27 @@ &gbp-pq-rpm; &man.common.options.synopsis; + BRANCH-NAME DIRECTORY FILEPATH TAG-FORMAT + FILES + TREEISH + THRESHOLD + REGEX + COMMITISH + DIRECTORY + - + + @@ -120,12 +129,41 @@ + + + + + + Convert a package from the "joint-packaging" maintenance model and + git-layout to the "orphan-packaging" model. It takes the content of + the packaging directory, auto-generates patches and puts these into a + new orphan packaging branch. You can use the + to try to preserve as much of the + git history as possible. Converting is a one-time action - conversion + back to the "joint-packaging" model is not supported (yet). + + +
OPTIONS &man.common.options.description; + + =BRANCH_NAME + + + + Name (format string) of the development (patch-queue) branch. The + following string fields are accepted: "%(branch)s" (the base branch, + i.e. the packaging branch that the development branch is associated + to), "%(upstreamversion)s" (the upstream version), "%(release)s" (the + rpm patchlevel, i.e. Release), "%(version)s" (full rpm package + version). + + + DIRECTORY @@ -166,6 +204,68 @@ + + FILES + + + + Comma-separated list of additional file(s) to import from packaging + branch. These will appear as one monolithic patch in the development + (patch-queue) branch. By default, the local gbp conf files are + imported in order to try to ensure that gbp sees the same settings on + the development (pq) branch as on the packaging branch. + + + + + TREEISH + + + + Export patches from TREEISH instead of the + default which is HEAD of the development (patch-queue) branch. + + + + + THRESHOLD + + + + Compress patches larger than given + THRESHOLD bytes. Special value 0 disabled + patch compression. + + + + + REGEX + + + + Exclude changes to path(s) matching REGEX + in patch generation. + + + + + COMMITISH + + + + Squash commits up to the given COMMITISH + into one monolitic diff. Could be used if one wants to squash commits + from an upstream release up to a stable update into a single diff + (commits on top of the stable would generate one patch per commit as + usual). The format is '<commit_ish>[:<filename_base>]', + i.e. commitish optionally followed by a colon and the desired + filename base for the diff (suffix '.diff' is automatically added by + &gbp;). Magic word 'HEAD' translates to the patch-export-treeish when + given as the squash-point. This allows one to configure gbp to + always squash all commits into one monolithic diff. + + + @@ -175,6 +275,31 @@ + + DIRECTORY + + + + Directory where packaging files are put in the new orphan packaging + branch after convert. If is not + defined, packaging-dir is used. + + + + + + + + + Try to preserve as much history as possible when converting. That is, + for each commit in the old branch create one corresponding commit in + the new orphan packaging branch. However, commits that will not + generate any changes are skipped (i.e. no empty commits are + generated) - these are caused e.g. by changes in files that are + ignored by patch-generation. + + + diff --git a/docs/manpages/gbp-pull.sgml b/docs/manpages/gbp-pull.sgml index 185a0d0b..1867eb39 100644 --- a/docs/manpages/gbp-pull.sgml +++ b/docs/manpages/gbp-pull.sgml @@ -22,11 +22,15 @@ &gbp-pull; &man.common.options.synopsis; - + [merge|clean] + - branch_name + + branch_name + branch_name + branch_name depth @@ -47,11 +51,21 @@ &man.common.options.description; - + [merge|clean] - force a branch update even if this results in a non fast - forward update. Forcing a branch update - makes you lose your modifications. + Force a branch update even if this results in a non fast + forward update. + merge does a git-merge. + clean checks out a clean copy from upstream. + using clean + makes you lose your modifications. + + + + + + Update all remote-tracking branches that have identical name in the + remote repository. @@ -76,6 +90,14 @@ developed on, default is master. + + =branch_name + + + The branch the packaging is being maintained on. + Alternative to the --debian-branch option. + + =branch_name diff --git a/docs/manpages/gbp-rpm-ch.sgml b/docs/manpages/gbp-rpm-ch.sgml new file mode 100644 index 00000000..fbd28967 --- /dev/null +++ b/docs/manpages/gbp-rpm-ch.sgml @@ -0,0 +1,446 @@ + + +
+ &rpm-email; +
+ + &rpm-firstname; + &rpm-surname; + +
+ + gbp-rpm-ch + &rpm-mansection; + + + git-rpm-ch; + gbp-rpm-ch; + Generate the RPM changelog from git commit messages + + + + &gbp-rpm-ch; + &man.common.options.synopsis; + =VENDOR + BRANCH-NAME + TAG-FORMAT + + DIRECTORY + FILEPATH + FILEPATH + + + MESSAGE + COMMITISH + + META_TAGS + + + + NUMBER + REV-FORMAT + GIT-LOG-OPTIONS + + EDITOR + + + + + GPG-KEYID + CUSTOMIZATION-FILE + [PATH1 PATH2] + + + + DESCRIPTION + + &gbp-rpm-ch; reads git commit messages up to the current tip of the current + branch and updates the RPM changelog from them. + + + By default, &gbp-rpm-ch; tries to guess the last &git; commit documented in + the changelog. Alternatively, can be used to + tell &gbp-rpm-ch; at which point it should start in the &git; history, or, + to use all commits from the &git; history. + + + The additional path arguments can be used to restrict the repository paths + &gbp-rpm-ch; looks at. For even more detailed control, you can use + to restrict the generated changelog entries + further. E.g. by using + "--author=Foo Bar". + + + + OPTIONS + + &man.common.options.description; + + + =VENDOR + + + + Distribution vendor name. + + + + + =BRANCH-NAME + + + + The branch in the Git repository the package is being developed on, + default is master. + + + + + + + + + Don't check if the current branch matches + PACKAGING-BRANCH. + + + + + TAG-FORMAT + + + + Tag format used, when tagging releases, + default is %(vendor)s/%(version)s + + + + + DIRECTORY + + + + Subdirectory that contains the RPM packaging files. + + + + + FILEPATH + + + + Relative path to the changelog file to use. Special value + auto causes &gbp; to guess, + SPEC uses the spec file, + CHANGES uses a separate changelog file + (name derived spec file name with .spec suffix replaced by .changes). + Guessing logic is simple: use separate changelog file if it is found, + otherwise use the spec file. + + + + + FILEPATH + + + + Relative path to the spec file to use. Special value + auto causes &gbp; to search and guess. + Other values cause the option to be + ignored: the directory of the spec file is used, instead. + + + + + + + + + Use all commits from the Git history, overrides + . + + + + + COMMITTISH + + + + Start reading commit messages at + COMMITTISH. + + + + + META_TAGS + + + + Meta tags in the commit messages that are interpreted as bug tracking + system related references. The recognized bts references are added in + the generated changelog entries. See the META TAGS section below for + more information. The bts meta tag tracking feature can be disabled + by defining an empty string. + + + + + + + + + Do not create a new changelog section, just update the last + changelog section. + + + + + + + + + Include the full commit message in the changelog output. + + + + + GIT-LOG-OPTIONS + + + + Options passed on verbatim to git-log(1). + + + + + N + + + + Include N digits of the commit id in the + changelog entry. Default is to not include any commit ids at all. + + + + + REV-FORMAT + + + + Format string to use for revision field in the changelog header. The + following string fields are accepted: + %(upstreamversion)s the upstream version; + %(release)s the rpm patchlevel, i.e. + Release; %(version)s full rpm package + version; %(tagname)s tag/commit, i.e. + basically what git-describe would give. + If empty or not defined the default from packaging policy is used. + + + + + REGEX + + + + Ignore commit lines matching REGEX + when generating the changelog. + + + + + + + + + Use user.name and user.email from + git-config(1) for the changelog header. + + + + + + + + + Whether to spawn an editor: always, when doing a release or never. + + + + + + + + + The editor to use for editing the changelog. + + + + + + + + + Text to use for new changelog entries. Git history and the commit + messages, including and + options are ignored in this case. + + + + + + + + + Commit changes to git after modifying changelog. Importantly, in + addition to the changelog modifications all other staged changes are + committed, too, making it possible to update other files in the same + commit. + + + + + MSG-FORMAT + + + + Format string for the commit message when committing changes + (when is given). + + + + + + + + + Commit the changes and create a packaging (release) tag. Similarly to + , all staged changes are committed to git + before creating the tag. This option makes it possible to create a + release and correctly document the the tag name in the rpm changelog + (by using %(tagname)s in the + string). + + + + + + + + + Don't fail tag operations if a tag with the same version already + exists, but, overwrite the existing tag, instead. + + + + + + + + + GPG sign all created tags. + + + + + GPG-KEYID + + + + Use this keyid for gpg signing tags. + + + + + CUSTOMIZATION-FILE + + + + Load Python code from CUSTOMIZATION-FILE. + At the moment, the only useful thing the code can do is define a + custom ChangelogEntryFormatter class. + + + + + + + META TAGS + + Additional to the above options the formatting of the new changelog entries + (one-per-commit) in the changelog can be modified by special tags (called + Meta Tags) given in the git commit message. The tags must start at the + first column of a commit message but can appear on any line. They are of + the form : VALUE. Valid + Meta Tags are: + + + + : ACTION + + + + Supported actions are: Ignore which will + ignore this commit when generating new changelog entries. + Short which will only use the description + (the first line) of the commit message when generating the changelog + entry (useful when is given) and + Full which will use the full commit + message when generating the changelog entry (useful when + is not given). + + + + + : BUGNUMBER + + + + Indicate in the changelog entry that bug + BUGNUMBER was addressed in this commit. + The bts meta tags recognized by &gbp-rpm-ch; is actually defined by + the option. + + + + + + The following git commit message: + + + Document meta tags + + so one doesn't have to consult the manual + + Git-Rpm-Ch: Short + Closes: #636088 + + + Results in this changelog entry: + + + - Document meta tags (Closes: #636088) + + + + &man.gbp.config-files; + + + SEE ALSO + + , + , + , + , + &man.seealso.common; + + Cl2vcs, + + + + AUTHOR + + &rpm-username; &rpm-email; + + +
diff --git a/docs/manpages/manpages.ent b/docs/manpages/manpages.ent index c212150f..378caae1 100644 --- a/docs/manpages/manpages.ent +++ b/docs/manpages/manpages.ent @@ -15,5 +15,7 @@ + + %COMMON.OPTIONS; diff --git a/docs/manual-rpm.sgml b/docs/manual-rpm.sgml new file mode 100644 index 00000000..a748fad2 --- /dev/null +++ b/docs/manual-rpm.sgml @@ -0,0 +1,26 @@ + + + Building RPM Packages with git-buildpackage-rpm + Markus Lehtonen +
markus.lehtonen@linux.intel.com
+ Version: &gbp-version; +
+ + &ch.intro-rpm; + &ch.development-rpm; + &ch.building-rpm; + + + Command Reference + &man.gbp.buildpackage.rpm; + &man.gbp.import.srpm; + &man.gbp.pq.rpm; + &man.gbp.rpm.ch; + &man.gbp.import.orig.rpm; + + + Copyright + &gbp.copyright; + +
+ diff --git a/docs/manual.sgml b/docs/manual.sgml index af5e12fd..c79af07b 100644 --- a/docs/manual.sgml +++ b/docs/manual.sgml @@ -1,15 +1,3 @@ - - %COMMON; - - %VERSION; - - %MANPAGES; - - %CHAPTERS; - -]> - Building Debian Packages with git-buildpackage diff --git a/gbp-rpm.conf b/gbp-rpm.conf new file mode 100644 index 00000000..c9d07beb --- /dev/null +++ b/gbp-rpm.conf @@ -0,0 +1,143 @@ +# Configuration file for git-buildpackage rpm tools + +[DEFAULT] +# Default build command +#builder = rpmbuild -ba +# Default clean command: +#cleaner = git clean -fd +# Default branch for upstream sources +#upstream-branch = upstream +# Default branch for the packaging files +#packaging-branch = master +# Name of the distribution vendor +#vendor=myvendor +# Default tag formats to be used +#upstream-tag = upstream/%(version)s +#packaging-tag = packaging/%(version)s +# Use pristine-tar +#pristine-tar = True +# Don't check if packaging-branch == current branch +#ignore-branch = True +# Use color when on a terminal, alternatives: on/true, off/false or auto +#color = auto +# Directory containing rpm packaging files +#packaging-dir=rpm +# Spec file to be used +#spec-file = gbp.spec +# Compress auto-generated patches +#patch-compress=100k +# Squash commits until certain tree-ish into one diff +#patch-squash = stable-updates:stable +# Export patches with numbering in filenames +#patch-numbers = False + +### +### Options only affecting git-buildpackage-rpm +### +[git-buildpackage-rpm] +# Look for a tag matching the upstream version when creating a tarball +#upstream-tree = tag +# Uncomment this to automatically GPG sign tags +#sign-tags = True +# Keyid to GPG sign tags with +#keyid = 0xdeadbeef +# Push to a remote repository after a successful tag +#posttag = git-push git.example.com +# Run rpmlint after a successful build (for all rpm's found under build dir) +#postbuild = find $GBP_BUILD_DIR -name '*rpm' -exec rpmlint -i {} \; +# Run a script before build +#prebuild = GIT_DIR=$GBP_GIT_DIR my_prebuild.sh +# Build/export in a non-default directory +#export-dir = ../build-area/ +# Special directory to look for pre-built orig source archives +#tarball-dir = ../tarballs/ +# Build despite of unclean repository, i.e. untracked files are present +#ignore-untracked = True +# Ignore all local changes (i.e. build despite of modified files) +#ignore-new = True +# Commit-ish to build +#export = HEAD +# Use best compression +#compression-level = best +# Don't send notifications, alternatives: on/true, off/false or auto +#notify = off +# Transparently handle submodules +#submodules = True +# Rpmbuild related options +#rpmbuild-builddir=BUILD_DIR +#rpmbuild-rpmdir=RPM_DIR +#rpmbuild-sourcedir=SOURCE_DIR +#rpmbuild-specdir=SPEC_DIR +#rpmbuild-srpmdir=SRPM_DIR +#rpmbuild-buildrootdir=BUILDROOT_DIR +# Generate patches against upstream +#patch-export = True + +### +### Options only affecting git-import-orig-rpm +### +[git-import-orig-rpm] +# Set a different upstream branch to import to: +#upstream-branch = newupstream +# Set a different branch to merge to: +#packaging-branch = pkgclean +# Don't merge new upstream to packaging branch by default: +#merge = False +# Filter out files when importing +#filter = .svn +# Filter out files from tarball passed to pristine tar: +#filter-pristine-tar = True +# Name used in storing tarballs in pristine-tar branch +#pristine-tarball-name = %(name)s_%(version)s%(filename_ext)s +# Run hook after the import +#postimport = my_postimport.sh +# Commit message for new upstream version +#import-msg = New upstream version %(version)s + +### +### Options only affecting git-import-srpm +### +[git-import-srpm] +# Set a different upstream branch +#upstream-branch = svn-upstream +# Filter out files when importing +#filter = [ 'CVS', '.cvsignore' ] +# Force committer of upstream source / packaging to be the same as author +#author-is-committer = True + +### +### Options only affecting gbp-pq-rpm +### +[gbp-pq-rpm] +# Name of the patch-queue / development branch +#pq-branch = %(branch)s-devel + +### +### Options only affecting gbp-clone +### +[gbp-clone] +# Track pristine-tar branch +#pristine-tar = True + +### +### Options only affecting gbp-pull +### +[gbp-pull] +# Pull pristine-tar branch +#pristine-tar = True + +### +### Options only affecting gbp-create-remote-repo +### +[gbp-create-remote-repo] +# Disable remote branch tracking +#track = False + +# Options only affecting git-rpm-changelog +[git-rpm-changelog] +# Changelog filename, relative to the git topdir +#changelog-file = git-buildpackage.changelog +# Format string for the revision part of the changelog header +#changelog-revision = %(tagname)s +# Preferred editor +#editor-cmd = vim diff --git a/gbp/bb/__init__.py b/gbp/bb/__init__.py new file mode 100644 index 00000000..fea8fc1e --- /dev/null +++ b/gbp/bb/__init__.py @@ -0,0 +1,501 @@ +# vim: set fileencoding=utf-8 : +# +# (C) 2014-2015 Intel Corporation +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, please see +# +"""Bitbake helper functionality""" + +import os +import re +import shutil +import string +import subprocess +import sys +import tempfile +from collections import defaultdict + +import gbp.log +from gbp.errors import GbpError +from gbp.git.repository import GitRepository, GitRepositoryError +from gbp.scripts.common.buildpackage import dump_tree + +bb = None + +# pylint: disable=bad-continuation + + +def import_bb(): + """Import bitbake lib""" + bb_bin = subprocess.Popen(['which', 'bitbake'], stdout=subprocess.PIPE, + stderr=subprocess.PIPE).communicate()[0] + if bb_bin: + bb_lib_path = os.path.dirname(bb_bin) + '/../lib' + sys.path.insert(0, bb_lib_path) + try: + return __import__('bb') + except ImportError: + print "ERROR: Unable to find bitbake/lib, try initializing build " \ + "environment with the 'oe-init-build-env' script\n" + # Return None instead of raising (ImportError) so that building of + # this package succeeds in Debian. Otherwise dpkg-buildpackage fails + # because of an import error in epydoc. + return None + +def init_tinfoil(config_only=False, tracking=False): + """Initialize the Bitbake tinfoil module""" + import bb.tinfoil + try: + tinfoil = bb.tinfoil.Tinfoil(tracking=tracking) + except (SystemExit, bb.BBHandledException): + raise GbpError("Failed to initialize tinfoil") + tinfoil.prepare(config_only=config_only) + return tinfoil + + +def pkg_version(data): + """Get package version as a dict""" + return {'upstreamversion': data.getVar('PV', True), + 'release': data.getVar('PR', True), + 'version': data.getVar('PV', True) + '-' + data.getVar('PR', True)} + + +class BBFile(object): + """Class representing .bb meta data""" + var_ops = r'\+=|=\+|\?=|\?\?=|:=|=' + vardef_re = re.compile( + r'(^(?P\w+)\s*(?P%s)\s*)(?P\S.*)' % var_ops) + + + def __init__(self, path, cfg_data=None): + self.bb_file = os.path.basename(path) + self.bb_dir = os.path.abspath(os.path.dirname(path)) + + self._pkg_data = None + self._variables = {} + self.includes = [] + self.localfiles = [] + + if cfg_data is not None: + self.parse_bb(path, cfg_data) + else: + self.naive_parse_bb(path) + + @property + def version(self): + """Get version information as a dict""" + return {'upstreamversion': self.getVar('PV', True), + 'release': self.getVar('PR', True)} + + @property + def bb_path(self): + """Full path of the bb file""" + return os.path.join(self.bb_dir, self.bb_file) + + def parse_bb(self, path, cfg_data): + """Parse bb meta file""" + self._pkg_data = bb.cache.Cache.loadDataFull(path, [], cfg_data) + + # Determine local packaging files + uris = (self.getVar('SRC_URI', True) or "").split() + fetcher = bb.fetch2.Fetch(uris, self._pkg_data) + bb_dir = os.path.dirname(self.getVar('FILE')) + # Also check for file existence as fetcher incorrecly returns some + # non-existent .bbclass files under the recipe directory + self.includes = [path for path in self.getVar('BBINCLUDED').split() if + path.startswith(bb_dir) and os.path.exists(path)] + self.localfiles = [path for path in fetcher.localpaths() if + path.startswith(bb_dir)] + + def naive_parse_bb(self, path): + """Naive parsing of standalone recipes""" + # Some variable defaults + # e.g. take package name and version directly from recipe file name + self._variables['FILE'] = os.path.abspath(path) + fn_base, _fn_ext = os.path.splitext(os.path.basename(path)) + split_base = fn_base.rsplit('_', 1) + if len(split_base) == 2: + self._variables['PN'] = split_base[0] + self._variables['PV'] = split_base[1] + else: + self._variables['PN'] = fn_base + self._variables['PV'] = '1.0' + self._variables['PR'] = 'r0' + + def var_parse_cb(lines): + """Callback function for parsing variables""" + unwrapped = self.unwrap_lines(lines) + match = self.vardef_re.match(unwrapped) + if match: + var = match.groupdict() + value = self.unquote_val(var['value']) + + if (var['name'] not in self._variables or + var['op'] in ('=', ':=')): + self._variables[var['name']] = value + elif var['op'] in ('+=', '=+'): + self._variables[var['name']] += ' ' + value + else: + splitted = unwrapped.split(None, 1) + if (len(splitted) > 1 and + splitted[0] in ('include', 'require')): + inc_fname = splitted[1].strip() + inc_path = os.path.join(os.path.dirname(path), + inc_fname) + self.includes.append(os.path.abspath(inc_path)) + return lines + self.parse_file(inc_path, var_parse_cb) + return lines + + # Parse variables from file + self.parse_file(path, var_parse_cb) + + # Find local files + filedirs = [self.getVar('PN') + '-' + self.getVar('PV'), + self.getVar('PN'), 'files'] + uris = (self.getVar('SRC_URI') or "").split() + for uri_str in uris: + uri = bb.fetch2.URI(uri_str) + if uri.scheme == 'file': + found = False + for path in [os.path.join(self.bb_dir, dirn, uri.path) for dirn + in filedirs]: + if os.path.exists(path): + self.localfiles.append(path) + found = True + break + if not found: + gbp.log.warn("Seemingly local file '%s' not found under " + "'%s'" % (uri_str, self.bb_dir)) + + def _expand_single(self, match): + """Expand single occurrence of a variable reference""" + if match.group(1) in self._variables: + return self._variables[match.group(1)] + return match.group(0) + + def expand_val(self, val, rec=0): + """Expand variable""" + expanded = re.sub(r'\${(\w+)}', self._expand_single, val) + if expanded == val: + return expanded + elif rec < 20: + return self.expand_val(expanded, rec +1) + else: + raise GbpError("Too many recursions when expanding variable value") + + def getVar(self, var, expand=True): + """Get variable""" + if self._pkg_data: + return self._pkg_data.getVar(var, expand) + elif var in self._variables: + if expand: + return self.expand_val(self._variables[var]) + else: + return self._variables[var] + return None + + @staticmethod + def unquote_val(val): + """Unquote / strip variable value""" + return val.strip(string.whitespace + r'"\'\\') + + @staticmethod + def unwrap_lines(lines): + """Return a joined string of multiple lines""" + return ''.join([re.sub(r'\\\s*$', '', line) for line in lines]) + + @staticmethod + def var_to_str(var, values, oper='+='): + """Create a well formatted string buffer containing a multiline variable + assignment""" + indent = ' ' * (len(var) + 2 + len(oper)) + linebuf = ['%s %s "%s \\\n' % (var, oper, values[0])] + for val in values[1:]: + linebuf.append(indent + ' ' + val + '\\\n') + linebuf.append(indent + '"\n') + return linebuf + + @staticmethod + def parse_file(filepath, cb_func): + """Parse recipe""" + ret_buf = [] + with open(filepath) as fobj: + multiline = [] + for line in fobj.readlines(): + stripped = line.rstrip() + if not multiline: + if not stripped.endswith('\\'): + ret_buf.extend(cb_func([line])) + else: + multiline = [line] + else: + multiline.append(line) + if not stripped.endswith('\\'): + ret_buf.extend(cb_func(multiline)) + multiline = [] + return ret_buf + + @staticmethod + def set_var_val(filepath, var, val): + """Set variable value in a recipe""" + class _Setter(object): + """Class for handling variable injections""" + def __init__(self): + self.was_set = False + + def set_cb(self, lines): + """Parser callback for setting variable value""" + unwrapped = BBFile.unwrap_lines(lines) + match = BBFile.vardef_re.match(unwrapped) + if match and match.group('name') == var: + if not self.was_set: + self.was_set = True + print "Setting value %s = %s" % (var, val) + return ['%s = "%s"\n' % (var, val)] + else: + return [] + return lines + + # Parse file and set values + setter = _Setter() + linebuf = BBFile.parse_file(filepath, setter.set_cb) + + # Write file + with open(filepath, 'w') as fobj: + if not setter.was_set: + fobj.write('%s = "%s"\n') + fobj.writelines(linebuf) + + @staticmethod + def substitute_var_val(filepath, var, pattern, repl): + """Update variable in a recipe""" + def subst_cb(lines): + """Parser callback for substituting variable values""" + unwrapped = BBFile.unwrap_lines(lines) + match = BBFile.vardef_re.match(unwrapped) + if match and match.group('name') == var: + filtered = [] + for line in lines: + line = re.sub(pattern, repl, line) + # Drop empty lines + if not re.match(r'\s*\\\s*', line): + filtered.append(line) + return filtered + return lines + + # Parse file and substitute values + linebuf = BBFile.parse_file(filepath, subst_cb) + + # Write file + with open(filepath, 'w') as fobj: + fobj.writelines(linebuf) + + @staticmethod + def append_var_val(filepath, var, new_vals): + """Update variable in a recipe""" + if not new_vals: + return + + class _Finder(object): + """Class for recording definitions of variables""" + def __init__(self): + self.line_ind = 0 + self.last_occurrence = -1 + + def find_last_occurrence_cb(self, lines): + """Get the point of insertion for the variable""" + unwrapped = BBFile.unwrap_lines(lines) + match = BBFile.vardef_re.match(unwrapped) + if match and match.group('name') == var: + self.last_occurrence = self.line_ind + len(lines) - 1 + self.line_ind += len(lines) + return lines + + finder = _Finder() + linebuf = BBFile.parse_file(filepath, finder.find_last_occurrence_cb) + + # Prepare for appending values + quote = None + if finder.last_occurrence >= 0: + last_line = linebuf[finder.last_occurrence].rstrip() + # Guess indentation + match = BBFile.vardef_re.match(last_line) + if match: + indent = ' ' * (len(match.group(1)) + 1) + else: + indent = re.match(r'(\s*)', last_line).group(1) + + # Guess point of insertion for new values and mangle the last line + if re.match(r'^\s*$', last_line[:-1]): + # Insert before the last line if it's an empty line (with a + # quotation character only) + insert_ind = finder.last_occurrence + indent += ' ' + else: + # Else, remove the quotation character and append after the + # last line + quote = last_line[-1] + last_line = last_line[:-1] + ' \\\n' + linebuf[finder.last_occurrence] = last_line + insert_ind = finder.last_occurrence + 1 + else: + indent = ' ' * (len(var) + 4) + + # Write file + with open(filepath, 'w') as fobj: + if finder.last_occurrence > -1: + fobj.writelines(linebuf[:insert_ind]) + for val in new_vals: + fobj.write(indent + val + ' \\\n') + if quote: + fobj.write(indent + quote + '\n') + fobj.writelines(linebuf[insert_ind:]) + else: + fobj.writelines(BBFile.var_to_str(var, new_vals, '+=')) + fobj.writelines(linebuf) + +def guess_bb_file(file_list, bbappend): + """Guess bb recipe from a list of filenames""" + recipes = [] + file_exts = ['.bb'] if not bbappend else ['.bb', '.bbappend'] + for ext in file_exts: + for filepath in file_list: + if filepath.endswith(ext): + gbp.log.debug("Found bb recipe file %s" % filepath) + recipes.append(filepath) + if len(recipes) == 0: + raise GbpError("No recipes found.") + return sorted(recipes)[-1] + +def bb_from_repo(cfg_data, repo, treeish, bb_path): + """Get and parse a bb recipe from a Git treeish""" + try: + tmpdir = tempfile.mkdtemp(prefix='gbp-bb_') + # Dump whole bb directory + dump_tree(repo, tmpdir, '%s:%s' % (treeish, os.path.dirname(bb_path)), + False) + fpath = os.path.join(tmpdir, os.path.basename(bb_path)) + return BBFile(fpath, cfg_data) + except GitRepositoryError as err: + raise GbpError("Git error: %s" % err) + finally: + shutil.rmtree(tmpdir) + +def guess_bb_path_from_fs(topdir, recursive=True, bbappend=False): + """Guess a bitbake recipe file""" + file_list = [] + if not topdir: + topdir = '.' + for root, dirs, files in os.walk(topdir): + file_list.extend([os.path.join(root, fname) for fname in files]) + if not recursive: + del dirs[:] + # Skip .git dir in any case + if '.git' in dirs: + dirs.remove('.git') + return guess_bb_file(file_list, bbappend) + +def guess_bb_path_from_repo(repo, treeish=None, topdir='', recursive=True, + bbappend=False): + """Guess a bitbake recipe path from a git repository""" + topdir = topdir.rstrip('/') + ('/') if topdir else '' + # Search from working copy + if not treeish: + abspath = guess_bb_path_from_fs(os.path.join(repo.path, topdir), + recursive, bbappend) + return os.path.relpath(abspath, repo.path) + + # Search from treeish + try: + file_list = [nam for (mod, typ, sha, nam) in + repo.list_tree(treeish, recursive, topdir) if typ == 'blob'] + except GitRepositoryError as err: + raise GbpError("Failed to search bb recipe from treeish %s, " + "Git error: %s" % (treeish, err)) + return guess_bb_file(file_list, bbappend) + +def guess_bb_path(options, repo, treeish=None, bbappend=False): + """Guess recipe path, relative to repo rootdir""" + bb_path = options.bb_file + if options.bb_file: + if not treeish: + path = os.path.join(repo.path, bb_path) + if not os.path.exists(path): + raise GbpError("'%s' does not exist" % bb_path) + else: + try: + repo.show("%s:%s" % (treeish, bb_path)) + except GbpError as err: + raise GbpError(str(err)) + else: + bb_path = guess_bb_path_from_repo(repo, treeish, options.meta_dir, + bbappend=bbappend) + return bb_path + +def parse_bb(cfg_data, options, repo, treeish=None, bbappend=False): + """Find and parse a bb recipe from a repository""" + try: + bb_path = guess_bb_path(options, repo, treeish, bbappend=bbappend) + gbp.log.debug("Using recipe '%s'" % bb_path) + options.meta_dir = os.path.dirname(bb_path) + if treeish: + pkg_data = bb_from_repo(cfg_data, repo, treeish, bb_path) + else: + full_path = os.path.join(repo.path, bb_path) + pkg_data = BBFile(full_path, cfg_data) + except GbpError as err: + raise GbpError("Can't parse bb recipe: %s" % err) + return pkg_data + + +def guess_pkg_from_dir(pkg_dir, tinfoil): + """Guess a package from a directory in configured bitbake environment""" + abspath = os.path.abspath(pkg_dir) + layer_dirs = tinfoil.config_data.getVar('BBLAYERS').split() + gbp.log.debug("Checking if %s is in %s" % (abspath, layer_dirs)) + layer_dir = '' + for path in layer_dirs: + if abspath.startswith(path): + layer_dir = path + if not layer_dir: + raise GbpError("%s not under configured layers" % abspath) + + bb_files = [path for path in tinfoil.cooker_data.pkg_fn + if os.path.dirname(path) == abspath] + if len(bb_files): + bb_file = bb_files[-1] + gbp.log.debug("Found %d recipes in %s, choosing %s" % + (len(bb_files), pkg_dir, os.path.basename(bb_file))) + else: + raise GbpError("No recipes found in %s" % pkg_dir) + return bb_file + +def guess_pkg(tinfoil, pkg): + """Guess package (recipe) from configured bitbake environment""" + if pkg in tinfoil.cooker_data.pkg_pn: + pkg_bb = tinfoil.cooker_data.pkg_pn[pkg][0] + elif not os.path.isdir(pkg): + abspath = os.path.abspath(pkg) + if abspath in tinfoil.cooker_data.pkg_fn: + pkg_bb = abspath + else: + raise GbpError("Package %s not found in any configured layer" % pkg) + elif os.path.exists(pkg): + pkg_bb = guess_pkg_from_dir(pkg, tinfoil) + else: + raise GbpError("Unable to find %s" % pkg) + return pkg_bb + + +# Initialize module +bb = import_bb() diff --git a/gbp/command_wrappers.py b/gbp/command_wrappers.py index a4643c54..5f173b5b 100644 --- a/gbp/command_wrappers.py +++ b/gbp/command_wrappers.py @@ -23,12 +23,34 @@ import os import os.path import signal +import sys +from tempfile import TemporaryFile + import gbp.log as log class CommandExecFailed(Exception): """Exception raised by the Command class""" pass +class StdfProxy(object): + """ + Relay input data to stdout/stderr. Designed to work around a problem where + Python nose replaces sys.stdout/stderr with a custom 'Tee' object that is + not a file object (compatible) and thus causes a crash with Popen. + """ + def __init__(self, stdfname): + self.stdfname = stdfname + self.safed = getattr(sys, self.stdfname) + self.filed = TemporaryFile() + setattr(sys, self.stdfname, self.filed) + + def closedown(self): + self.filed.seek(0) + self.safed.write(self.filed.read()) + setattr(sys, self.stdfname, self.safed) + + def __getattr__(self, name): + return getattr(self.filed, name) class Command(object): """ @@ -67,8 +89,20 @@ def default_sigpipe(): log.debug("%s %s %s" % (self.cmd, self.args, args)) self._reset_state() - stdout_arg = subprocess.PIPE if self.capture_stdout else None - stderr_arg = subprocess.PIPE if self.capture_stderr else None + if self.capture_stdout: + stdout_arg = subprocess.PIPE + elif hasattr(sys.stdout, 'fileno'): + stdout_arg = sys.stdout + else: + # For nosetests where sys.stdout is replaced by a "Tee" object + stdout_arg = StdfProxy('stdout') + if self.capture_stderr: + stderr_arg = subprocess.PIPE + elif hasattr(sys.stderr, 'fileno'): + stderr_arg = sys.stderr + else: + # For nosetests where sys.stderr is replaced by a "Tee" object + stderr_arg = StdfProxy('stderr') cmd = [ self.cmd ] + self.args + args if self.shell: # subprocess.call only cares about the first argument if shell=True @@ -87,6 +121,11 @@ def default_sigpipe(): self.err_reason = "execution failed: %s" % str(err) self.retcode = 1 raise + finally: + if isinstance(stdout_arg, StdfProxy): + stdout_arg.closedown() + if isinstance(stderr_arg, StdfProxy): + stderr_arg.closedown() self.retcode = popen.returncode if self.retcode < 0: @@ -223,7 +262,8 @@ def __init__(self, archive, dir, filters=[], compression=None): class PackTarArchive(Command): """Wrap tar to pack a compressed tar archive""" - def __init__(self, archive, dir, dest, filters=[], compression=None): + def __init__(self, archive, dir, dest, filters=[], compression=None, + transform=None): self.archive = archive self.dir = dir exclude = [("--exclude=%s" % _filter) for _filter in filters] @@ -231,8 +271,14 @@ def __init__(self, archive, dir, dest, filters=[], compression=None): if not compression: compression = '-a' - Command.__init__(self, 'tar', exclude + - ['-C', dir, compression, '-cf', archive, dest]) + args = exclude + ['-C', dir, compression, '-cf', archive ] + + if transform != None: + args.append('--transform=%s' % transform) + + args.append(dest) + + Command.__init__(self, 'tar', args) self.run_error = 'Couldn\'t repack "%s": {err_reason}' % self.archive diff --git a/gbp/config.py b/gbp/config.py index 934dbe95..0ff2f895 100644 --- a/gbp/config.py +++ b/gbp/config.py @@ -20,6 +20,7 @@ from six.moves import configparser from copy import copy import os.path +import tempfile try: from gbp.version import gbp_version @@ -32,7 +33,8 @@ no_upstream_branch_msg = """ Repository does not have branch '%s' for upstream sources. If there is none see file:///usr/share/doc/git-buildpackage/manual-html/gbp.import.html#GBP.IMPORT.CONVERT -on howto create it otherwise use --upstream-branch to specify it. +on howto create it or check the --create-missing-branches option. Otherwise, +use --upstream-branch to specify it. """ def expand_path(option, opt, value): @@ -49,6 +51,11 @@ def check_tristate(option, opt, value): return val +def optparse_split_cb(option, opt_str, value, parser): + """Split option string into a list""" + setattr(parser.values, option.dest, value.split(',')) + + def safe_option(f): def _decorator(self, *args, **kwargs): obj = self @@ -93,6 +100,7 @@ class GbpOptionParser(OptionParser): @type def_config_files: dict (type, path) """ defaults = { 'debian-branch' : 'master', + 'packaging-branch' : 'master', 'upstream-branch' : 'upstream', 'upstream-tree' : 'TAG', 'pristine-tar' : 'False', @@ -108,6 +116,7 @@ class GbpOptionParser(OptionParser): 'prebuild' : '', 'postexport' : '', 'postimport' : '', + 'build' : 'True', 'hooks' : 'True', 'debian-tag' : 'debian/%(version)s', 'debian-tag-msg' : '%(pkg)s Debian release %(version)s', @@ -122,6 +131,7 @@ class GbpOptionParser(OptionParser): 'overlay' : 'False', 'tarball-dir' : '', 'ignore-new' : 'False', + 'ignore-untracked': 'False', 'ignore-branch' : 'False', 'meta' : 'True', 'meta-closes' : 'Closes|LP', @@ -166,11 +176,16 @@ class GbpOptionParser(OptionParser): 'drop': 'False', 'commit': 'False', 'upstream-vcs-tag': '', + 'tmp-dir': '/var/tmp/gbp/', } help = { 'debian-branch': ("Branch the Debian package is being developed on, " "default is '%(debian-branch)s'"), + 'packaging-branch': + ("Branch the packaging is being maintained on, " + "rpm counterpart of the 'debian-branch' option, " + "default is '%(packaging-branch)s'"), 'upstream-branch': "Upstream branch, default is '%(upstream-branch)s'", 'upstream-tree': @@ -217,6 +232,9 @@ class GbpOptionParser(OptionParser): "Meta bug number format, default is '%(meta-closes-bugnum)s'", 'ignore-new': "Build with uncommited changes in the source tree, default is '%(ignore-new)s'", + 'ignore-untracked': + "Build with untracked files in the source tree, default is " + "'%(ignore-untracked)s'", 'ignore-branch': ("Build although debian-branch != current branch, " "default is '%(ignore-branch)s'"), @@ -292,6 +310,8 @@ class GbpOptionParser(OptionParser): 'postimport': ("hook run after a successful import, " "default is '%(postimport)s'"), + 'build': + ("Enable running builder, default is %(build)s"), 'hooks': ("Enable running all hooks, default is %(hooks)s"), 'time-machine': @@ -324,6 +344,9 @@ class GbpOptionParser(OptionParser): "after export. Default is '%(drop)s'"), 'commit': "commit changes after export, Default is '%(commit)s'", + 'tmp-dir': + ("Base directory under which temporary directories are " + "created, default is '%(tmp-dir)s'"), } def_config_files = {'/etc/git-buildpackage/gbp.conf': 'system', @@ -367,13 +390,26 @@ def get_config_files(klass, no_local=False): files = [fname for fname in files if fname.startswith('/')] return files - def _read_config_file(self, parser, repo, filename): + def _read_config_file(self, parser, repo, filename, git_treeish): """Read config file""" str_fields = {} if repo: str_fields['git_dir'] = repo.git_dir if not repo.bare: str_fields['top_dir'] = repo.path + + # Read per-tree config file + if repo and git_treeish and filename.startswith('%(top_dir)s/'): + with tempfile.TemporaryFile() as tmp: + relpath = filename.replace('%(top_dir)s/', '') + try: + config = repo.show('%s:%s' % (git_treeish, relpath)) + tmp.writelines(config) + except GitRepositoryError: + pass + tmp.seek(0) + parser.readfp(tmp) + return try: filename = filename % str_fields except KeyError: @@ -386,7 +422,7 @@ def _warn_old_config_section(self, oldcmd, cmd): gbp.log.warn("Old style config section [%s] found " "please rename to [%s]" % (oldcmd, cmd)) - def parse_config_files(self): + def parse_config_files(self, git_treeish=None): """ Parse the possible config files and set appropriate values default values @@ -403,7 +439,7 @@ def parse_config_files(self): repo = None # Read all config files for filename in config_files: - self._read_config_file(parser, repo, filename) + self._read_config_file(parser, repo, filename, git_treeish) self.config.update(dict(parser.defaults())) # Make sure we read any legacy sections prior to the real subcommands @@ -445,7 +481,8 @@ def parse_config_files(self): else: self.config['filter'] = [] - def __init__(self, command, prefix='', usage=None, sections=[]): + def __init__(self, command, prefix='', usage=None, sections=[], + git_treeish=None): """ @param command: the command to build the config parser for @type command: C{str} @@ -461,7 +498,7 @@ def __init__(self, command, prefix='', usage=None, sections=[]): self.sections = sections self.prefix = prefix self.config = {} - self.parse_config_files() + self.parse_config_files(git_treeish) self.valid_options = [] if self.command.startswith('git-') or self.command.startswith('gbp-'): @@ -517,23 +554,21 @@ def get_default(self, option_name, **kwargs): return default @safe_option - def add_config_file_option(self, option_name, dest, help=None, **kwargs): + def add_config_file_option(self, option_name, help=None, **kwargs): """ set a option for the command line parser, the default is read from the config file param option_name: name of the option type option_name: string - param dest: where to store this option - type dest: string param help: help text type help: string """ if not help: help = self.help[option_name] - OptionParser.add_option(self, "--%s%s" % (self.prefix, option_name), dest=dest, + OptionParser.add_option(self, "--%s%s" % (self.prefix, option_name), default=self.get_default(option_name, **kwargs), help=help % self.config, **kwargs) - def add_boolean_config_file_option(self, option_name, dest): + def add_boolean_config_file_option(self, option_name, dest=None): self.add_config_file_option(option_name=option_name, dest=dest, action="store_true") neg_help = "negates '--%s%s'" % (self.prefix, option_name) self.add_config_file_option(option_name="no-%s" % option_name, dest=dest, help=neg_help, action="store_false") @@ -630,13 +665,13 @@ class GbpOptionParserRpm(GbpOptionParser): """ defaults = dict(GbpOptionParser.defaults) defaults.update({ - 'tmp-dir' : '/var/tmp/gbp/', 'vendor' : 'Downstream', 'packaging-branch' : 'master', 'packaging-dir' : '', 'packaging-tag-msg' : ('%(pkg)s (vendor)s release ' '%(version)s'), 'packaging-tag' : 'packaging/%(version)s', + 'pq-branch' : 'development/%(branch)s', 'export-sourcedir' : 'SOURCES', 'export-specdir' : 'SPECS', 'export-dir' : '../rpmbuild', @@ -648,13 +683,26 @@ class GbpOptionParserRpm(GbpOptionParser): 'mock-root' : '', 'mock-options' : '', 'native' : 'auto', + 'spec-vcs-tag' : '', + 'patch-export' : 'False', + 'patch-compress' : '0', + 'patch-squash' : '', + 'patch-ignore-path' : '', + 'patch-import' : 'True', + 'import-files' : ['.gbp.conf', + 'debian/gbp.conf'], + 'merge' : 'False', + 'pristine-tarball-name' : 'auto', + 'orig-prefix' : 'auto', + 'changelog-file' : 'auto', + 'changelog-revision' : '', + 'spawn-editor' : 'always', + 'editor-cmd' : 'vim', + 'meta-bts' : '(Close|Closes|Fixes|Fix)', }) help = dict(GbpOptionParser.help) help.update({ - 'tmp-dir': - "Base directory under which temporary directories are " - "created, default is '%(tmp-dir)s'", 'vendor': "Distribution vendor name, default is '%(vendor)s'", 'packaging-branch': @@ -670,6 +718,9 @@ class GbpOptionParserRpm(GbpOptionParser): 'packaging-tag-msg': ("Format string for packaging tag messages, " "default is '%(packaging-tag-msg)s'"), + 'pq-branch': + "format string for the patch-queue branch name, default is " + "'%(pq-branch)s'", 'spec-file': "Spec file to use, causes the packaging-dir option to be " "ignored, default is '%(spec-file)s'", @@ -697,6 +748,79 @@ class GbpOptionParserRpm(GbpOptionParser): "default is '%(mock-options)s'"), 'native': "Treat this package as native, default is '%(native)s'", + 'spec-vcs-tag': + "Set/update the 'VCS:' tag in the spec file, empty value " + "removes the tag entirely, default is '%(spec-vcs-tag)s'", + 'patch-export': + "Create patches between upstream and export-treeish, default " + "is '%(patch-export)s'", + 'patch-compress': + "Compress (auto-generated) patches larger than given number of " + "bytes, 0 never compresses, default is " + "'%(patch-compress)s'", + 'patch-squash': + "Squash commits (from upstream) until given tree-ish into one " + "big diff, format is '[:]'. " + "Default is '%(patch-squash)s'", + 'patch-ignore-path': + "Exclude changes to path(s) matching regex, default is " + "'%(patch-ignore-path)s'", + 'patch-import': + "Import patches to the packaging branch, default is " + "'%(patch-import)s'", + 'import-files': + "Comma-separated list of additional file(s) to import from " + "packaging branch. These will appear as one monolithic patch " + "in the pq/development branch. Default is %(import-files)s", + 'pristine-tarball-name': + "Filename to record to pristine-tar, set to 'auto' to not " + "mangle the file name, default is '%(pristine-tarball-name)s'", + 'orig-prefix': + "Prefix (dir) to be used when generating/importing tarballs, " + "default is '%(orig-prefix)s'", + 'changelog-file': + "Changelog file to be used, default is '%(changelog-file)s'", + 'changelog-revision': + "Format string for the revision field in the changelog header. " + "If empty or not defined the default from packaging policy is " + "used.", + 'editor-cmd': + "Editor command to use", + 'git-author': + "Use name and email from git-config for the changelog header, " + "default is '%(git-author)s'", + 'meta-bts': + "Meta tags for the bts commands, default is '%(meta-bts)s'", }) +class GbpOptionParserBB(GbpOptionParserRpm): + """Commandline and config file option parser for the -bb commands""" + defaults = dict(GbpOptionParserRpm.defaults) + defaults.update( { + 'builder' : 'bitbake', + 'export-dir' : '', + 'meta-dir' : '', + 'bb-file' : '', + 'bb-vcs-info' : '', + 'submit-tag' : 'submit/%(target)s/%(nowtime)s', + 'target' : 'tizen', + } ) + + help = dict(GbpOptionParserRpm.help) + help.update( { + 'meta-dir': + "Subdir where bitbake meta files are stored, default " + "is '%(meta-dir)s'", + 'bb-file': + "Bitbake recipe file to build", + 'bb-vcs-info': + "Format string for the VCS information automatically " + "set in the recipe file, default is '%(bb-vcs-info)s'", + 'submit-tag': + "Submit tag format, default is '%(submit-tag)s'", + 'target': + "Submit target used in submit tag, default is " + "'%(target)s'", + } ) + # vim:et:ts=4:sw=4:et:sts=4:ai:set list listchars=tab\:»·,trail\:·: diff --git a/gbp/deb/upstreamsource.py b/gbp/deb/upstreamsource.py index ca235ef9..f8605e45 100644 --- a/gbp/deb/upstreamsource.py +++ b/gbp/deb/upstreamsource.py @@ -22,7 +22,8 @@ class DebianUpstreamSource(UpstreamSource): """Upstream source class for Debian""" - def __init__(self, name, unpacked=None): + def __init__(self, name, unpacked=None, **kwargs): super(DebianUpstreamSource, self).__init__(name, unpacked, - DebianPkgPolicy) + DebianPkgPolicy, + **kwargs) diff --git a/gbp/git/__init__.py b/gbp/git/__init__.py index 81e1422d..40755628 100644 --- a/gbp/git/__init__.py +++ b/gbp/git/__init__.py @@ -17,7 +17,8 @@ """Accessing Git from python""" import calendar -import dateutil.parser +import datetime +import rfc822 from gbp.git.modifier import GitModifier from gbp.git.commit import GitCommit @@ -28,6 +29,23 @@ from gbp.git.vfs import GitVfs +class FixedOffset(datetime.tzinfo): + """Fixed offset in seconds east from UTC.""" + + ZERO = datetime.timedelta(0) + + def __init__(self, offset): + datetime.tzinfo.__init__(self) + self._offset = datetime.timedelta(seconds=offset) + + def utcoffset(self, dtime): + return self._offset + self.dst(dtime) + + def dst(self, dtime): + assert dtime.tzinfo is self + return self.ZERO + + def rfc822_date_to_git(rfc822_date): """Parse a date in RFC822 format, and convert to a 'seconds tz' C{str}ing. @@ -38,9 +56,10 @@ def rfc822_date_to_git(rfc822_date): >>> rfc822_date_to_git('Sat, 5 Apr 2008 17:01:32 +0200') '1207407692 +0200' """ - d = dateutil.parser.parse(rfc822_date) - seconds = calendar.timegm(d.utctimetuple()) - tz = d.strftime("%z") - return '%d %s' % (seconds, tz) + parsed = rfc822.parsedate_tz(rfc822_date) + date = datetime.datetime(*parsed[:6], tzinfo=FixedOffset(parsed[-1])) + seconds = calendar.timegm(date.utctimetuple()) + tzone = date.strftime("%z") + return '%d %s' % (seconds, tzone) # vim:et:ts=4:sw=4:et:sts=4:ai:set list listchars=tab\:»·,trail\:·: diff --git a/gbp/git/repository.py b/gbp/git/repository.py index 8142a255..54383b4a 100644 --- a/gbp/git/repository.py +++ b/gbp/git/repository.py @@ -21,9 +21,9 @@ import os.path import re from collections import defaultdict +import select import gbp.log as log -from gbp.errors import GbpError from gbp.git.modifier import GitModifier from gbp.git.commit import GitCommit from gbp.git.errors import GitError @@ -77,29 +77,47 @@ class GitRepository(object): all methods. """ - def _check_bare(self): - """Check whether this is a bare repository""" - out, dummy, ret = self._git_inout('rev-parse', ['--is-bare-repository'], - capture_stderr=True) + def _check_dirs(self): + """Get top level dir and git meta data dir""" + out, dummy, ret = self._git_inout('rev-parse', ['--git-dir'], + capture_stderr=True) if ret: raise GitRepositoryError( - "Failed to get repository state at '%s'" % self.path) - self._bare = False if out.strip() != 'true' else True - self._git_dir = '' if self._bare else '.git' + "Failed to get repository git dir at '%s'" % self.path) + + # Set git meta data dir + git_dir = out.strip() + if os.path.isabs(git_dir): + self._git_dir = git_dir + else: + self._git_dir = os.path.abspath(os.path.join(self.path, git_dir)) + + # Set top level dir correctly (in case repo was initialized + # from a subdir, for example) + if self.bare: + self._path = self._git_dir + else: + out, dummy, ret = self._git_inout('rev-parse', ['--show-toplevel'], + capture_stderr=True) + self._path = os.path.abspath(out.strip()) def __init__(self, path): self._path = os.path.abspath(path) - self._bare = False try: - out, dummy, ret = self._git_inout('rev-parse', ['--show-cdup'], + # Check for bare repository + out, dummy, ret = self._git_inout('rev-parse', ['--is-bare-repository'], capture_stderr=True) - if ret or out.strip(): + if ret: raise GitRepositoryError("No Git repository at '%s': '%s'" % (self.path, out)) + self._bare = False if out.strip() != 'true' else True + + self._check_dirs() + except GitRepositoryError: raise # We already have a useful error message except: - raise GitRepositoryError("No Git repository at '%s'" % self.path) - self._check_bare() + raise GitRepositoryError("No Git repository at '%s' (or any parent dir)" % self.path) + @staticmethod def __build_env(extra_env): @@ -142,7 +160,7 @@ def _git_getoutput(self, command, args=[], extra_env=None, cwd=None): return output, popen.returncode def _git_inout(self, command, args, input=None, extra_env=None, cwd=None, - capture_stderr=False): + capture_stderr=False, capture_stdout=True): """ Run a git command with input and return output @@ -161,30 +179,99 @@ def _git_inout(self, command, args, input=None, extra_env=None, cwd=None, """ if not cwd: cwd = self.path - return self.__git_inout(command, args, input, extra_env, cwd, capture_stderr) + ret = 0 + stdout = '' + stderr = '' + try: + for outdata in self.__git_inout(command, args, input, extra_env, + cwd, capture_stderr, + capture_stdout): + stdout += outdata[0] + stderr += outdata[1] + except GitRepositoryError as err: + ret = err.returncode + return stdout, stderr, ret + + def _git_inout2(self, command, args, stdin=None, extra_env=None, cwd=None, + capture_stderr=False): + """ + Quite similar to C{_git_inout()} but returns stdout output of the git + command as a Python generator object, instead. Also, stderr is not + returned. + + @note: The caller must consume the iterator that is returned, in order + to make sure that the git command runs and terminates. + """ + if not cwd: + cwd = self.path + stderr = '' + try: + for outdata in self.__git_inout(command, args, stdin, extra_env, + cwd, capture_stderr, True): + stderr += outdata[1] + yield outdata[0] + except GitRepositoryError as err: + err.stderr = stderr + raise err @classmethod - def __git_inout(cls, command, args, input, extra_env, cwd, capture_stderr): + def __git_inout(cls, command, args, stdin, extra_env, cwd, capture_stderr, + capture_stdout): """ - As _git_inout but can be used without an instance + Run a git command without a a GitRepostitory instance. + + Returns the git command output (stdout, stderr) as a Python generator + object. + + @note: The caller must consume the iterator that is returned, in order + to make sure that the git command runs and terminates. """ + def rm_polled_fd(file_obj, select_list): + file_obj.close() + select_list.remove(file_obj) + cmd = ['git', command] + args env = cls.__build_env(extra_env) + stdout_arg = subprocess.PIPE if capture_stdout else None + stdin_arg = subprocess.PIPE if stdin else None stderr_arg = subprocess.PIPE if capture_stderr else None - stdin_arg = subprocess.PIPE if input else None log.debug(cmd) popen = subprocess.Popen(cmd, stdin=stdin_arg, - stdout=subprocess.PIPE, + stdout=stdout_arg, stderr=stderr_arg, env=env, close_fds=True, cwd=cwd) - (stdout, stderr) = popen.communicate(input) - return stdout, stderr, popen.returncode - - def _git_command(self, command, args=[], extra_env=None): + out_fds = [popen.stdout] if capture_stdout else [] + if capture_stderr: + out_fds.append(popen.stderr) + in_fds = [popen.stdin] if stdin else [] + w_ind = 0 + while out_fds or in_fds: + ready = select.select(out_fds, in_fds, []) + # Write in chunks of 512 bytes + if ready[1]: + popen.stdin.write(stdin[w_ind:w_ind+512]) + w_ind += 512 + if w_ind > len(stdin): + rm_polled_fd(popen.stdin, in_fds) + # Read in chunks of 4k + stdout = popen.stdout.read(4096) if popen.stdout in ready[0] else '' + stderr = popen.stderr.read(4096) if popen.stderr in ready[0] else '' + if popen.stdout in ready[0] and not stdout: + rm_polled_fd(popen.stdout, out_fds) + if popen.stderr in ready[0] and not stderr: + rm_polled_fd(popen.stderr, out_fds) + yield stdout, stderr + + if popen.wait(): + err = GitRepositoryError('git-%s failed' % command) + err.returncode = popen.returncode + raise err + + def _git_command(self, command, args=[], extra_env=None, interactive=False): """ Execute git command with arguments args and environment env at path. @@ -196,16 +283,19 @@ def _git_command(self, command, args=[], extra_env=None): @param extra_env: extra environment variables to set when running command @type extra_env: C{dict} """ + capture_stdout = not interactive try: stdout, stderr, ret = self._git_inout(command=command, args=args, input=None, extra_env=extra_env, - capture_stderr=True) + capture_stderr=True, + capture_stdout=capture_stdout) except Exception as excobj: raise GitRepositoryError("Error running git %s: %s" % (command, excobj)) if ret: - raise GitRepositoryError("Error running git %s: %s" % (command, stderr)) + raise GitRepositoryError("Error running git %s: %s" % + (command, stderr.strip())) def _cmd_has_feature(self, command, feature): @@ -232,6 +322,7 @@ def _cmd_has_feature(self, command, feature): section_re = re.compile(r'^(?P
[A-Z].*)') option_re = re.compile(r'--?(?P[a-zA-Z\-]+).*') optopt_re = re.compile(r'--\[(?P[a-zA-Z\-]+)\]-?') + backspace_re = re.compile(".\b") man_section = None for line in help.splitlines(): if man_section == "OPTIONS" and line.startswith(' -'): @@ -249,7 +340,7 @@ def _cmd_has_feature(self, command, feature): # Check man section match = section_re.match(line) if match: - man_section = match.group('section') + man_section = backspace_re.sub('', match.group('section')) return False @property @@ -609,7 +700,8 @@ def get_upstream_branch(self, local_branch): #{ Tags - def create_tag(self, name, msg=None, commit=None, sign=False, keyid=None): + def create_tag(self, name, msg=None, commit=None, sign=False, keyid=None, + annotate=False): """ Create a new tag. @@ -624,15 +716,18 @@ def create_tag(self, name, msg=None, commit=None, sign=False, keyid=None): @type sign: C{bool} @param keyid: the GPG keyid used to sign the tag @type keyid: C{str} + @param annotate: Create an annotated tag + @type annotate: C{bool} """ - args = [] - args += [ '-m', msg ] if msg else [] + args = GitArgs() + args.add_true(msg, ['-m', msg]) if sign: - args += [ '-s' ] - args += [ '-u', keyid ] if keyid else [] - args += [ name ] - args += [ commit ] if commit else [] - self._git_command("tag", args) + args.add('-s') + args.add_true(keyid, ['-u', keyid]) + args.add_true(annotate, '-a') + args.add(name) + args.add_true(commit, commit) + self._git_command("tag", args.args, interactive=True) def delete_tag(self, tag): """ @@ -899,10 +994,11 @@ def rev_parse(self, name, short=0): args = GitArgs("--quiet", "--verify") args.add_cond(short, '--short=%d' % short) args.add(name) - sha, ret = self._git_getoutput('rev-parse', args.args) + sha, stderr, ret = self._git_inout('rev-parse', args.args, + capture_stderr=True) if ret: raise GitRepositoryError("revision '%s' not found" % name) - return self.strip_sha1(sha[0], short) + return self.strip_sha1(sha.splitlines()[0], short) @staticmethod def strip_sha1(sha1, length=0): @@ -945,6 +1041,19 @@ def checkout(self, treeish): """ self._git_command("checkout", ["--quiet", treeish]) + def checkout_files(self, treeish, paths): + """ + Checkout files from a treeish. Branch will not be changed. + + @param treeish: the treeish from which to check out files + @type treeish: C{str} + @param paths: list of files to checkout + @type paths: C{list} of C{str} + """ + args = GitArgs("--quiet", treeish) + args.add_true(paths, '--', paths) + self._git_command("checkout", args.args) + def has_treeish(self, treeish): """ Check if the repository has the treeish object I{treeish}. @@ -1119,10 +1228,25 @@ def get_remote_repos(self): @deprecated: Use get_remotes() instead @return: remote repositories - @rtype: C{list} of C{str} + @rtype: C{dict} of C{list} of C{str} """ - out = self._git_getoutput('remote')[0] - return [ remote.strip() for remote in out ] + stdout, stderr, ret = self._git_inout('remote', ['-v'], + capture_stderr=True) + if ret: + raise GitRepositoryError('Failed to get remotes: %s' % stderr) + + remotes = {} + for rem in stdout.splitlines(): + name, url_urltype = rem.split('\t', 1) + url, urltype = url_urltype.rsplit(' ', 1) + urltype = urltype.strip('()') + if not name in remotes: + remotes[name] = [''] + if urltype == 'fetch': + remotes[name][0] = url + else: + remotes[name].append(url) + return remotes def has_remote_repo(self, name): """ @@ -1256,7 +1380,7 @@ def push_tag(self, repo, tag): #{ Files - def add_files(self, paths, force=False, index_file=None, work_tree=None): + def add_files(self, paths, force=False, untracked=True, index_file=None, work_tree=None): """ Add files to a the repository @@ -1264,15 +1388,17 @@ def add_files(self, paths, force=False, index_file=None, work_tree=None): @type paths: list or C{str} @param force: add files even if they would be ignored by .gitignore @type force: C{bool} + @param untracked: add also previously untracked files + @type untracked: C{bool} @param index_file: alternative index file to use @param work_tree: alternative working tree to use """ extra_env = {} - if isinstance(paths, six.string_types): - paths = [ paths ] - - args = [ '-f' ] if force else [] + args = GitArgs() + args.add_true(force, '-f') + args.add_cond(untracked, '-A', '-u') + args.add(paths) if index_file: extra_env['GIT_INDEX_FILE'] = index_file @@ -1280,7 +1406,7 @@ def add_files(self, paths, force=False, index_file=None, work_tree=None): if work_tree: extra_env['GIT_WORK_TREE'] = work_tree - self._git_command("add", args + paths, extra_env) + self._git_command("add", args.args, extra_env) def remove_files(self, paths, verbose=False): """ @@ -1345,16 +1471,23 @@ def write_file(self, filename, filters=True): if not ret: return self.strip_sha1(sha1) else: - raise GbpError("Failed to hash %s: %s" % (filename, stderr)) + raise GitRepositoryError("Failed to hash %s: %s" % (filename, + stderr)) #} #{ Comitting - def _commit(self, msg, args=[], author_info=None): + def _commit(self, msg, args=[], author_info=None, + committer_info=None, edit=False): extra_env = author_info.get_author_env() if author_info else None - self._git_command("commit", ['-q', '-m', msg] + args, extra_env=extra_env) + if committer_info: + extra_env.update(committer_info.get_committer_env()) + default_args = ['-q', '-m', msg] + (['--edit'] if edit else []) + self._git_command("commit", default_args + args, extra_env=extra_env, + interactive=edit) - def commit_staged(self, msg, author_info=None, edit=False): + def commit_staged(self, msg, author_info=None, edit=False, + committer_info=None): """ Commit currently staged files to the repository @@ -1364,10 +1497,11 @@ def commit_staged(self, msg, author_info=None, edit=False): @type author_info: L{GitModifier} @param edit: whether to spawn an editor to edit the commit info @type edit: C{bool} + @param committer_info: committer information + @type committer_info: L{GitModifier} """ - args = GitArgs() - args.add_true(edit, '--edit') - self._commit(msg=msg, args=args.args, author_info=author_info) + self._commit(msg=msg, author_info=author_info, + committer_info=committer_info, edit=edit) def commit_all(self, msg, author_info=None, edit=False): """ @@ -1377,11 +1511,10 @@ def commit_all(self, msg, author_info=None, edit=False): @param author_info: authorship information @type author_info: L{GitModifier} """ - args = GitArgs('-a') - args.add_true(edit, '--edit') - self._commit(msg=msg, args=args.args, author_info=author_info) + self._commit(msg=msg, args=['-a'], author_info=author_info, edit=edit) - def commit_files(self, files, msg, author_info=None): + def commit_files(self, files, msg, author_info=None, committer_info=None, + edit=False): """ Commit the given files to the repository @@ -1391,10 +1524,33 @@ def commit_files(self, files, msg, author_info=None): @type msg: C{str} @param author_info: authorship information @type author_info: L{GitModifier} + @param committer_info: committer information + @type committer_info: L{GitModifier} + @param edit: whether to spawn an editor to edit the commit info + @type edit: C{bool} """ - if isinstance(files, six.string_types): - files = [ files ] - self._commit(msg=msg, args=files, author_info=author_info) + args = GitArgs('--') + args.add(files) + self._commit(msg=msg, args=args.args, author_info=author_info, + committer_info=committer_info, edit=edit) + + def create_tree(self, unpack_dir): + """ + Create a tree object out of a directory content + + @param unpack_dir: content to add + @type unpack_dir: C{str} + @return: the tree object hash + @rtype: C{str} + """ + git_index_file = os.path.join(self.path, self._git_dir, 'gbp_index') + try: + os.unlink(git_index_file) + except OSError: + pass + self.add_files('.', force=True, index_file=git_index_file, + work_tree=unpack_dir) + return self.write_tree(git_index_file) def commit_dir(self, unpack_dir, msg, branch, other_parents=None, author={}, committer={}, create_missing_branch=False): @@ -1418,15 +1574,7 @@ def commit_dir(self, unpack_dir, msg, branch, other_parents=None, doesn't already exist. @type create_missing_branch: C{bool} """ - - git_index_file = os.path.join(self.path, self._git_dir, 'gbp_index') - try: - os.unlink(git_index_file) - except OSError: - pass - self.add_files('.', force=True, index_file=git_index_file, - work_tree=unpack_dir) - tree = self.write_tree(git_index_file) + tree = self.create_tree(unpack_dir) if branch: try: @@ -1489,7 +1637,7 @@ def commit_tree(self, tree, msg, parents, author={}, committer={}): if not ret: return self.strip_sha1(sha1) else: - raise GbpError("Failed to commit tree: %s" % stderr) + raise GitRepositoryError("Failed to commit tree: %s" % stderr) #{ Commit Information @@ -1689,7 +1837,7 @@ def diff(self, obj1, obj2=None, paths=None, stat=False, summary=False, options.add('--stat=%s' % stat) options.add_true(summary, '--summary') options.add_true(text, '--text') - options.add_true(ignore_submodules, '--ignore-submodules=all') + options.add_true(ignore_submodules, '--ignore-submodules') options.add(obj1) options.add_true(obj2, obj2) if paths: @@ -1727,7 +1875,7 @@ def diff_status(self, obj1, obj2): return result #} - def archive(self, format, prefix, output, treeish, **kwargs): + def archive(self, format, prefix, output, treeish, paths=None): """ Create an archive from a treeish @@ -1735,17 +1883,30 @@ def archive(self, format, prefix, output, treeish, **kwargs): @type format: C{str} @param prefix: prefix to prepend to each filename in the archive @type prefix: C{str} - @param output: the name of the archive to create - @type output: C{str} + @param output: the name of the archive to create, empty string or + C{None} gives data as return value + @type output: C{str} or C{None} @param treeish: the treeish to create the archive from @type treeish: C{str} - @param kwargs: additional commandline options passed to git-archive + @param paths: List of paths to include in the archive + @type paths: C{list} of C{str} + + @return: archive data as a generator object + @rtype: C{None} or C{generator} of C{str} """ - args = [ '--format=%s' % format, '--prefix=%s' % prefix, - '--output=%s' % output, treeish ] - out, ret = self._git_getoutput('archive', args, **kwargs) - if ret: - raise GitRepositoryError("Unable to archive %s" % treeish) + args = GitArgs('--format=%s' % format, '--prefix=%s' % prefix) + args.add_true(output, '--output=%s' % output) + args.add(treeish) + args.add("--") + args.add_cond(paths, paths) + + if output: + out, err, ret = self._git_inout('archive', args.args) + if ret: + raise GitRepositoryError("Unable to archive %s: %s" % (treeish, + err)) + else: + return self._git_inout2('archive', args.args) def collect_garbage(self, auto=False): """ @@ -1867,18 +2028,20 @@ def create(klass, path, description=None, bare=False): try: if not os.path.exists(abspath): os.makedirs(abspath) + stderr = '' try: - stdout, stderr, ret = klass.__git_inout(command='init', - args=args.args, - input=None, - extra_env=None, - cwd=abspath, - capture_stderr=True) + for out in klass.__git_inout(command='init', + args=args.args, + stdin=None, + extra_env=None, + cwd=abspath, + capture_stderr=True, + capture_stdout=True): + stderr += out[1] + except GitRepositoryError: + raise GitRepositoryError("Error running git init: %s" % stderr) except Exception as excobj: raise GitRepositoryError("Error running git init: %s" % excobj) - if ret: - raise GitRepositoryError("Error running git init: %s" % stderr) - if description: with open(os.path.join(abspath, git_dir, "description"), 'w') as f: description += '\n' if description[-1] != '\n' else '' @@ -1932,18 +2095,20 @@ def clone(klass, path, remote, depth=0, recursive=False, mirror=False, try: if not os.path.exists(abspath): os.makedirs(abspath) - + stderr = '' try: - stdout, stderr, ret = klass.__git_inout(command='clone', - args=args.args, - input=None, - extra_env=None, - cwd=abspath, - capture_stderr=True) + for out in klass.__git_inout(command='clone', + args=args.args, + stdin=None, + extra_env=None, + cwd=abspath, + capture_stderr=True, + capture_stdout=True): + stderr += out[1] + except GitRepositoryError: + raise GitRepositoryError("Error running git clone: %s" % stderr) except Exception as excobj: raise GitRepositoryError("Error running git clone: %s" % excobj) - if ret: - raise GitRepositoryError("Error running git clone: %s" % stderr) if not name: try: diff --git a/gbp/log.py b/gbp/log.py index 9248ec94..1581b17a 100644 --- a/gbp/log.py +++ b/gbp/log.py @@ -20,9 +20,11 @@ import os import sys import logging -from logging import (DEBUG, INFO, WARNING, ERROR, CRITICAL, getLogger) +from logging import DEBUG, INFO, WARNING, ERROR, CRITICAL import gbp.tristate +# Initialize default logger +LOGGER = logging.getLogger(__name__) COLORS = dict([('none', 0)] + list(zip(['black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white'], range(30, 38)))) @@ -52,7 +54,7 @@ class GbpStreamHandler(logging.StreamHandler): OFF_SEQ = "\033[0m" def __init__(self, stream=None, color='auto'): - super(GbpStreamHandler, self).__init__(stream) + logging.StreamHandler.__init__(self, stream) self._color = gbp.tristate.Tristate(color) self._color_scheme = DEFAULT_COLOR_SCHEME.copy() msg_fmt = "%(color)s%(name)s:%(levelname)s: %(message)s%(coloroff)s" @@ -88,35 +90,41 @@ def format(self, record): record.color = self.COLOR_SEQ % self._color_scheme[record.levelno] record.coloroff = self.OFF_SEQ record.levelname = record.levelname.lower() - return super(GbpStreamHandler, self).format(record) + return logging.StreamHandler.format(self, record) class GbpLogger(logging.Logger): """Logger class for git-buildpackage""" - def __init__(self, name, color='auto', *args, **kwargs): - super(GbpLogger, self).__init__(name, *args, **kwargs) - self._default_handlers = [GbpStreamHandler(sys.stdout, color), - GbpStreamHandler(sys.stderr, color)] - self._default_handlers[0].addFilter(GbpFilter([DEBUG, INFO])) - self._default_handlers[1].addFilter(GbpFilter([WARNING, ERROR, - CRITICAL])) - for hdlr in self._default_handlers: + def __init__(self, name, *args, **kwargs): + logging.Logger.__init__(self, name, *args, **kwargs) + self.default_handlers = [] + + def init_default_handlers(self, color='auto'): + """Initialize and set default handlers to logger""" + self.default_handlers = [GbpStreamHandler(sys.stdout, color), + GbpStreamHandler(sys.stderr, color)] + self.default_handlers[0].addFilter(GbpFilter([DEBUG, INFO])) + self.default_handlers[1].addFilter(GbpFilter([WARNING, ERROR, + CRITICAL])) + for hdlr in self.default_handlers: self.addHandler(hdlr) + # We don't want to propagate as we add our own handlers + self.propagate = False def set_color(self, color): """Set/unset colorized output of the default handlers""" - for hdlr in self._default_handlers: + for hdlr in self.default_handlers: hdlr.set_color(color) def set_color_scheme(self, color_scheme={}): """Set the color scheme of the default handlers""" - for hdlr in self._default_handlers: + for hdlr in self.default_handlers: hdlr.set_color_scheme(color_scheme) def set_format(self, fmt): """Set the format of the default handlers""" - for hdlr in self._default_handlers: + for hdlr in self.default_handlers: hdlr.set_format(fmt) @@ -162,8 +170,22 @@ def _parse_color_scheme(color_scheme=""): except KeyError: pass return scheme +def getLogger(*args, **kwargs): + """Gbp-specific function""" + if not issubclass(logging.getLoggerClass(), GbpLogger): + logging.setLoggerClass(GbpLogger) + color = kwargs.pop('color') if 'color' in kwargs else 'auto' + logger = logging.getLogger(*args, **kwargs) + if hasattr(logger, 'default_handlers') and not logger.default_handlers: + logger.init_default_handlers(color) + return logger + def setup(color, verbose, color_scheme=""): """Basic logger setup""" + # Initialize, if not done yet + if not isinstance(LOGGER, GbpLogger): + initialize() + LOGGER.set_color(color) LOGGER.set_color_scheme(_parse_color_scheme(color_scheme)) if verbose: @@ -171,9 +193,8 @@ def setup(color, verbose, color_scheme=""): else: LOGGER.setLevel(INFO) - -# Initialize the module -logging.setLoggerClass(GbpLogger) - -LOGGER = getLogger("gbp") +def initialize(): + """Initialize the logger module""" + global LOGGER + LOGGER = getLogger("gbp") diff --git a/gbp/pkg/__init__.py b/gbp/pkg/__init__.py index f08ed3a8..ab4e5016 100644 --- a/gbp/pkg/__init__.py +++ b/gbp/pkg/__init__.py @@ -20,6 +20,9 @@ import os import re import glob +import stat +import subprocess +import zipfile import six @@ -27,10 +30,10 @@ from gbp.errors import GbpError # compression types, extra options and extensions -compressor_opts = { 'gzip' : [ '-n', 'gz' ], - 'bzip2' : [ '', 'bz2' ], - 'lzma' : [ '', 'lzma' ], - 'xz' : [ '', 'xz' ] } +compressor_opts = { 'gzip' : [ ['-n'], 'gz' ], + 'bzip2' : [ [], 'bz2' ], + 'lzma' : [ [], 'lzma' ], + 'xz' : [ [], 'xz' ] } # Map frequently used names of compression types to the internal ones: compressor_aliases = { 'bz2' : 'bzip2', @@ -133,6 +136,75 @@ def is_valid_upstreamversion(cls, version): raise NotImplementedError("Class needs to provide upstreamversion_re") return True if cls.upstreamversion_re.match(version) else False + @classmethod + def is_valid_orig_archive(cls, filename): + "Is this a valid orig source archive" + (base, arch_fmt, compression) = parse_archive_filename(filename) + if arch_fmt == 'tar' and compression: + return True + return False + + @classmethod + def guess_upstream_src_version(cls, filename, extra_regex=r''): + """ + Guess the package name and version from the filename of an upstream + archive. + + @param filename: filename (archive or directory) from which to guess + @type filename: C{string} + @param extra_regex: additional regex to apply, needs a 'package' and a + 'version' group + @return: (package name, version) or ('', '') + @rtype: tuple + + >>> PkgPolicy.guess_upstream_src_version('foo-bar_0.2.orig.tar.gz') + ('foo-bar', '0.2') + >>> PkgPolicy.guess_upstream_src_version('foo-Bar_0.2.orig.tar.gz') + ('foo-Bar', '0.2.orig') + >>> PkgPolicy.guess_upstream_src_version('git-bar-0.2.tar.gz') + ('git-bar', '0.2') + >>> PkgPolicy.guess_upstream_src_version('git-bar-0.2-rc1.tar.gz') + ('git-bar', '0.2-rc1') + >>> PkgPolicy.guess_upstream_src_version('git-bar-0.2:~-rc1.tar.gz') + ('git-bar', '0.2:~-rc1') + >>> PkgPolicy.guess_upstream_src_version('git-Bar-0A2d:rc1.tar.bz2') + ('git-Bar', '0A2d:rc1') + >>> PkgPolicy.guess_upstream_src_version('git-1.tar.bz2') + ('git', '1') + >>> PkgPolicy.guess_upstream_src_version('kvm_87+dfsg.orig.tar.gz') + ('kvm', '87+dfsg') + >>> PkgPolicy.guess_upstream_src_version('foo-Bar-a.b.tar.gz') + ('', '') + >>> PkgPolicy.guess_upstream_src_version('foo-bar_0.2.orig.tar.xz') + ('foo-bar', '0.2') + >>> PkgPolicy.guess_upstream_src_version('foo-bar_0.2.tar.gz') + ('foo-bar', '0.2') + >>> PkgPolicy.guess_upstream_src_version('foo-bar_0.2.orig.tar.lzma') + ('foo-bar', '0.2') + >>> PkgPolicy.guess_upstream_src_version('foo-bar-0.2.zip') + ('foo-bar', '0.2') + >>> PkgPolicy.guess_upstream_src_version('foo-bar-0.2.tlz') + ('foo-bar', '0.2') + """ + version_chars = r'[a-zA-Z\d\.\~\-\:\+]' + basename = parse_archive_filename(os.path.basename(filename))[0] + + version_filters = map ( lambda x: x % version_chars, + ( # Debian upstream tarball: package_'.orig.tar.gz' + r'^(?P[a-z\d\.\+\-]+)_(?P%s+)\.orig', + # Upstream 'package-.tar.gz' + # or Debian native 'package_.tar.gz' + # or directory 'package-': + r'^(?P[a-zA-Z\d\.\+\-]+)(-|_)(?P[0-9]%s*)')) + if extra_regex: + version_filters = extra_regex + version_filters + + for filter in version_filters: + m = re.match(filter, basename) + if m: + return (m.group('package'), m.group('version')) + return ('', '') + @staticmethod def guess_upstream_src_version(filename, extra_regex=r''): """ @@ -243,11 +315,20 @@ class UpstreamSource(object): @cvar _unpacked: path to the unpacked source tree @type _unpacked: string """ - def __init__(self, name, unpacked=None, pkg_policy=PkgPolicy): + def __init__(self, name, unpacked=None, pkg_policy=PkgPolicy, prefix=None): self._orig = False + self._tarball = False self._pkg_policy = pkg_policy - self._path = name + self._path = os.path.abspath(name) + if not os.path.exists(self._path): + raise GbpError('UpstreamSource: unable to find %s' % self._path) self.unpacked = unpacked + self._filename_base, \ + self._archive_fmt, \ + self._compression = parse_archive_filename(os.path.basename(self.path)) + self._prefix = prefix + if self._prefix is None: + self._determine_prefix() self._check_orig() if self.is_dir(): @@ -264,18 +345,11 @@ def _check_orig(self): """ if self.is_dir(): self._orig = False + self._tarball = False return - parts = self._path.split('.') - try: - if parts[-1] == 'tgz': - self._orig = True - elif parts[-2] == 'tar': - if (parts[-1] in compressor_opts or - parts[-1] in compressor_aliases): - self._orig = True - except IndexError: - self._orig = False + self._tarball = True if self.archive_fmt == 'tar' else False + self._orig = self._pkg_policy.is_valid_orig_archive(os.path.basename(self.path)) def is_orig(self): """ @@ -285,6 +359,13 @@ def is_orig(self): """ return self._orig + def is_tarball(self): + """ + @return: C{True} if source is a tarball, C{False} otherwise + @rtype: C{bool} + """ + return self._tarball + def is_dir(self): """ @return: C{True} if if upstream sources are an unpacked directory, @@ -297,6 +378,83 @@ def is_dir(self): def path(self): return self._path.rstrip('/') + + @staticmethod + def _get_topdir_files(file_list): + """Parse content of the top directory from a file list + + >>> UpstreamSource._get_topdir_files([]) + set([]) + >>> UpstreamSource._get_topdir_files([('-', 'foo/bar')]) + set([('d', 'foo')]) + >>> UpstreamSource._get_topdir_files([('d', 'foo/'), ('-', 'foo/bar')]) + set([('d', 'foo')]) + >>> UpstreamSource._get_topdir_files([('d', 'foo'), ('-', 'foo/bar')]) + set([('d', 'foo')]) + >>> UpstreamSource._get_topdir_files([('-', 'fob'), ('d', 'foo'), ('d', 'foo/bar'), ('-', 'foo/bar/baz')]) + set([('-', 'fob'), ('d', 'foo')]) + >>> UpstreamSource._get_topdir_files([('-', './foo/bar')]) + set([('d', 'foo')]) + >>> UpstreamSource._get_topdir_files([('-', 'foo/bar'), ('-', '.foo/bar')]) + set([('d', '.foo'), ('d', 'foo')]) + """ + topdir_files = set() + for typ, path in file_list: + split = re.sub('^(?:./|../)*', '', path).split('/') + if len(split) == 1: + topdir_files.add((typ, path)) + else: + topdir_files.add(('d', split[0])) + return topdir_files + + def _determine_prefix(self): + """Determine the prefix, i.e. the "leading directory name""" + self._prefix = '' + if self.is_dir(): + # For directories we presume that the prefix is just the dirname + self._prefix = os.path.basename(self.path.rstrip('/')) + else: + files = [] + if self._archive_fmt == 'zip': + archive = zipfile.ZipFile(self.path) + for info in archive.infolist(): + typ = 'd' if stat.S_ISDIR(info.external_attr >> 16) else '?' + files.append((typ, info.filename)) + elif self._archive_fmt == 'tar': + popen = subprocess.Popen(['tar', '-t', '-v', '-f', self.path], + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + out, _err = popen.communicate() + if popen.returncode: + raise GbpError("Listing tar archive content failed") + for line in out.splitlines(): + fields = line.split(None, 5) + files.append((fields[0][0], fields[-1])) + else: + raise GbpError("Unsupported archive format %s, unable to " + "determine prefix for '%s'" % + (self._archive_fmt, self.path)) + # Determine prefix from the archive content + topdir_files = self._get_topdir_files(files) + if len(topdir_files) == 1: + typ, name = topdir_files.pop() + if typ == 'd': + self._prefix = name + + @property + def archive_fmt(self): + """Archive format of the sources, e.g. 'tar'""" + return self._archive_fmt + + @property + def compression(self): + """Compression format of the sources, e.g. 'gzip'""" + return self._compression + + @property + def prefix(self): + """Prefix, i.e. the 'leading directory name' of the sources""" + return self._prefix + def unpack(self, dir, filters=[]): """ Unpack packed upstream sources into a given directory @@ -311,18 +469,27 @@ def unpack(self, dir, filters=[]): if type(filters) != type([]): raise GbpError("Filters must be a list") - self._unpack_archive(dir, filters) - self.unpacked = self._unpacked_toplevel(dir) + if self._unpack_archive(dir, filters): + ret = type(self)(dir, prefix=self._prefix) + else: + ret = self + src_dir = os.path.join(dir, self._prefix) + ret.unpacked = src_dir if os.path.isdir(src_dir) else dir + return ret def _unpack_archive(self, dir, filters): """ - Unpack packed upstream sources into a given directory. + Unpack packed upstream sources into a given directory. Return True if + the output was filtered, otherwise False. """ ext = os.path.splitext(self.path)[1] if ext in [ ".zip", ".xpi" ]: self._unpack_zip(dir) else: self._unpack_tar(dir, filters) + if filters: + return True + return False def _unpack_zip(self, dir): try: @@ -330,16 +497,6 @@ def _unpack_zip(self, dir): except gbpc.CommandExecFailed: raise GbpError("Unpacking of %s failed" % self.path) - def _unpacked_toplevel(self, dir): - """unpacked archives can contain a leading directory or not""" - unpacked = glob.glob('%s/*' % dir) - unpacked.extend(glob.glob("%s/.*" % dir)) # include hidden files and folders - # Check that dir contains nothing but a single folder: - if len(unpacked) == 1 and os.path.isdir(unpacked[0]): - return unpacked[0] - else: - return dir - def _unpack_tar(self, dir, filters): """ Unpack a tarball to I{dir} applying a list of I{filters}. Leave the @@ -352,7 +509,7 @@ def _unpack_tar(self, dir, filters): # unpackArchive already printed an error message raise GbpError - def pack(self, newarchive, filters=[]): + def pack(self, newarchive, filters=[], newprefix=None): """ Recreate a new archive from the current one @@ -360,6 +517,8 @@ def pack(self, newarchive, filters=[]): @type newarchive: string @param filters: tar filters to apply @type filters: array of strings + @param newprefix: new prefix, None implies that prefix is not mangled + @type newprefix: string or None @return: the new upstream source @rtype: UpstreamSource """ @@ -372,17 +531,30 @@ def pack(self, newarchive, filters=[]): if type(filters) != type([]): raise GbpError("Filters must be a list") + run_dir = os.path.dirname(self.unpacked.rstrip('/')) + pack_this = os.path.basename(self.unpacked.rstrip('/')) + transform = None + if newprefix is not None: + newprefix = newprefix.strip('/.') + if newprefix: + transform = 's!%s!%s!' % (pack_this, newprefix) + else: + transform = 's!%s!%s!' % (pack_this, '.') try: - unpacked = self.unpacked.rstrip('/') repackArchive = gbpc.PackTarArchive(newarchive, - os.path.dirname(unpacked), - os.path.basename(unpacked), - filters) + run_dir, + pack_this, + filters, + transform=transform) repackArchive() except gbpc.CommandExecFailed: # repackArchive already printed an error raise GbpError - return type(self)(newarchive) + new = type(self)(newarchive) + # Reuse the same unpacked dir if the content matches + if not filters: + new.unpacked = self.unpacked + return new @staticmethod def known_compressions(): diff --git a/gbp/rpm/__init__.py b/gbp/rpm/__init__.py index 119d55ed..4102ade9 100644 --- a/gbp/rpm/__init__.py +++ b/gbp/rpm/__init__.py @@ -224,20 +224,28 @@ def ignorepatches(self): def _patches(self): """Get all patch tags as a dict""" + patches = {} if 'patch' not in self._tags: return {} - return {patch['num']: patch for patch in self._tags['patch']['lines']} + for patch in self._tags['patch']['lines']: + patches[patch['num']] = patch + return patches def _sources(self): """Get all source tags as a dict""" + sources = {} if 'source' not in self._tags: return {} - return {src['num']: src for src in self._tags['source']['lines']} + for src in self._tags['source']['lines']: + sources[src['num']] = src + return sources def sources(self): """Get all source tags as a dict""" - return {src['num']: src['linevalue'] - for src in self._sources().values()} + sources = {} + for src in self._sources().values(): + sources[src['num']] = src['linevalue'] + return sources def _macro_replace(self, matchobj): macro_dict = {'name': self.name, diff --git a/gbp/rpm/changelog.py b/gbp/rpm/changelog.py new file mode 100644 index 00000000..3ed89f12 --- /dev/null +++ b/gbp/rpm/changelog.py @@ -0,0 +1,246 @@ +# vim: set fileencoding=utf-8 : +# +# (C) 2014-2015 Intel Corporation +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, please see +# +"""An RPM Changelog""" + +import datetime +import re + +import gbp.log + + +class ChangelogError(Exception): + """Problem parsing changelog""" + pass + + +class _ChangelogHeader(object): + """The header part of one changelog section""" + + def __init__(self, pkgpolicy, time=None, **kwargs): + self._pkgpolicy = pkgpolicy + self._data = {'time': time} + self._data.update(kwargs) + + def __contains__(self, key): + return key in self._data + + def __getitem__(self, key): + if key in self._data: + return self._data[key] + return None + + def __str__(self): + keys = dict(self._data) + keys['time'] = self._data['time'].strftime( + self._pkgpolicy.Changelog.header_time_format) + try: + return self._pkgpolicy.Changelog.header_format % keys + '\n' + except KeyError as err: + raise ChangelogError("Unable to format changelog header, missing " + "property %s" % err) + + +class _ChangelogEntry(object): + """An entry (one 'change') in an RPM changelog""" + + def __init__(self, pkgpolicy, author, text): + """ + @param pkgpolicy: RPM packaging policy + @type pkgpolicy: L{RpmPkgPolicy} + @param author: author of the change + @type author: C{str} + @param text: message of the changelog entry + @type text: C{str} or C{list} of C{str} + """ + self._pkgpolicy = pkgpolicy + self.author = author + if isinstance(text, str): + self._text = text.splitlines() + else: + self._text = text + # Strip trailing empty lines + while text and not text[-1].strip(): + text.pop() + + def __str__(self): + # Currently no (re-)formatting, just raw text + string = "" + for line in self._text: + string += line + '\n' + return string + + +class _ChangelogSection(object): + """One section (set of changes) in an RPM changelog""" + + def __init__(self, pkgpolicy, *args, **kwargs): + self._pkgpolicy = pkgpolicy + self.header = _ChangelogHeader(pkgpolicy, *args, **kwargs) + self.entries = [] + self._trailer = '\n' + + + def __str__(self): + text = str(self.header) + for entry in self.entries: + text += str(entry) + # Add "section separator" + text += self._trailer + return text + + def set_header(self, *args, **kwargs): + """Change the section header""" + self.header = _ChangelogHeader(self._pkgpolicy, *args, **kwargs) + + def append_entry(self, entry): + """Add a new entry to the end of the list of entries""" + self.entries.append(entry) + return entry + + +class Changelog(object): + """An RPM changelog""" + + def __init__(self, pkgpolicy): + self._pkgpolicy = pkgpolicy + self.sections = [] + + def __str__(self): + string = "" + for section in self.sections: + string += str(section) + return string + + def create_entry(self, *args, **kwargs): + """Create and return new entry object""" + return _ChangelogEntry(self._pkgpolicy, *args, **kwargs) + + def add_section(self, *args, **kwargs): + """Add new empty section""" + section = _ChangelogSection(self._pkgpolicy, *args, **kwargs) + self.sections.insert(0, section) + return section + + +class ChangelogParser(object): + """Parser for RPM changelogs""" + + def __init__(self, pkgpolicy): + self._pkgpolicy = pkgpolicy + self.section_match_re = pkgpolicy.Changelog.section_match_re + self.section_split_re = pkgpolicy.Changelog.section_split_re + self.header_split_re = pkgpolicy.Changelog.header_split_re + self.header_name_split_re = pkgpolicy.Changelog.header_name_split_re + self.body_name_re = pkgpolicy.Changelog.body_name_re + + def raw_parse_string(self, string): + """Parse changelog - only splits out raw changelog sections.""" + changelog = Changelog(self._pkgpolicy) + ch_section = "" + for line in string.splitlines(): + if re.match(self.section_match_re, line, re.M | re.S): + if ch_section: + changelog.sections.append(ch_section) + ch_section = line + '\n' + elif ch_section: + ch_section += line + '\n' + else: + raise ChangelogError("First line in changelog is invalid") + if ch_section: + changelog.sections.append(ch_section) + return changelog + + def raw_parse_file(self, changelog): + """Parse changelog file - only splits out raw changelog sections.""" + try: + with open(changelog) as ch_file: + return self.raw_parse_string(ch_file.read()) + except IOError as err: + raise ChangelogError("Unable to read changelog file: %s" % err) + + def _parse_section_header(self, text): + """Parse one changelog section header""" + # Try to split out time stamp and "changelog name" + match = re.match(self.header_split_re, text, re.M) + if not match: + raise ChangelogError("Unable to parse changelog header: %s" % text) + try: + time = datetime.datetime.strptime(match.group('ch_time'), + "%a %b %d %Y") + except ValueError: + raise ChangelogError("Unable to parse changelog header: invalid " + "timestamp '%s'" % match.group('ch_time')) + # Parse "name" part which consists of name and/or email and an optional + # revision + name_text = match.group('ch_name') + match = re.match(self.header_name_split_re, name_text) + if not match: + raise ChangelogError("Unable to parse changelog header: invalid " + "name / revision '%s'" % name_text) + kwargs = match.groupdict() + return _ChangelogSection(self._pkgpolicy, time=time, **kwargs) + + def _create_entry(self, author, text): + """Create a new changelog entry""" + return _ChangelogEntry(self._pkgpolicy, author=author, text=text) + + def _parse_section_entries(self, text, default_author): + """Parse entries from a string and add them to a section""" + entries = [] + entry_text = [] + author = default_author + for line in text.splitlines(): + match = re.match(self.body_name_re, line) + if match: + if entry_text: + entries.append(self._create_entry(author, entry_text)) + author = match.group('name') + else: + if line.startswith("-"): + if entry_text: + entries.append(self._create_entry(author, entry_text)) + entry_text = [line] + else: + if not entry_text: + gbp.log.info("First changelog entry (%s) is garbled, " + "entries should start with a dash ('-')" % + line) + entry_text.append(line) + if entry_text: + entries.append(self._create_entry(author, entry_text)) + + return entries + + + def parse_section(self, text): + """Parse one section""" + # Check that the first line(s) look like a changelog header + match = re.match(self.section_split_re, text, re.M | re.S) + if not match: + raise ChangelogError("Doesn't look like changelog header: %s..." % + text.splitlines()[0]) + # Parse header + section = self._parse_section_header(match.group('ch_header')) + header = section.header + # Parse entries + default_author = header['name'] if 'name' in header else header['email'] + for entry in self._parse_section_entries(match.group('ch_body'), + default_author): + section.append_entry(entry) + + return section + diff --git a/gbp/rpm/policy.py b/gbp/rpm/policy.py index 80e0abde..c7f2a4b8 100644 --- a/gbp/rpm/policy.py +++ b/gbp/rpm/policy.py @@ -17,13 +17,15 @@ """Default packaging policy for RPM""" import re + from gbp.pkg import PkgPolicy, parse_archive_filename +from gbp.scripts.common.pq import parse_gbp_commands class RpmPkgPolicy(PkgPolicy): """Packaging policy for RPM""" # Special rpmlib python module for GBP (only) - python_rpmlib_module_name = "rpm" + python_rpmlib_module_name = "rpm_tizen" alnum = 'a-zA-Z0-9' # Valid characters for RPM pkg name @@ -46,6 +48,9 @@ class RpmPkgPolicy(PkgPolicy): "and can only containg alphanumerics or characters " "in %s" % list(version_whitelist_chars)) + # Time stamp format to be used in tag names + tag_timestamp_format = "%Y%m%d" + @classmethod def is_valid_orig_archive(cls, filename): """ @@ -70,3 +75,131 @@ def is_valid_orig_archive(cls, filename): return True return False + class Changelog(object): + """Container for changelog related policy settings""" + + # Regexps for splitting/parsing the changelog section (of + # Tizen / Fedora style changelogs) + section_match_re = r'^\*' + section_split_re = r'^\*\s*(?P\S.*?)$\n(?P.*)' + header_split_re = r'(?P\S.*\s[0-9]{4})\s+(?P\S.*$)' + header_name_split_re = r'(?P[^<]*)\s+<(?P[^>]+)>((\s*-)?\s+(?P\S+))?$' + body_name_re = r'\[(?P.*)\]' + + # Changelog header format (when writing out changelog) + header_format = "* %(time)s %(name)s <%(email)s> %(revision)s" + header_time_format = "%a %b %d %Y" + header_rev_format = "%(version)s" + + + class ChangelogEntryFormatter(object): + """Helper class for generating changelog entries from git commits""" + + # Maximum length for a changelog entry line + max_entry_line_length = 76 + # Regexp for matching bug tracking system ids (e.g. "bgo#123") + bug_id_re = r'[A-Za-z0-9#_\-]+' + + @classmethod + def _parse_bts_tags(cls, lines, meta_tags): + """ + Parse and filter out bug tracking system related meta tags from + commit message. + + @param lines: commit message + @type lines: C{list} of C{str} + @param meta_tags: meta tags (regexp) to look for + @type meta_tags: C{str} + @return: bts-ids per meta tag and the non-mathced lines + @rtype: (C{dict}, C{list} of C{str}) + """ + if not meta_tags: + return ({}, lines[:]) + + tags = {} + other_lines = [] + bts_re = re.compile(r'^(?P%s):\s*(?P.*)' % meta_tags, + re.I) + bug_id_re = re.compile(cls.bug_id_re) + for line in lines: + match = bts_re.match(line) + if match: + tag = match.group('tag') + ids_str = match.group('ids') + bug_ids = [bug_id.strip() for bug_id in + bug_id_re.findall(ids_str)] + if tag in tags: + tags[tag] += bug_ids + else: + tags[tag] = bug_ids + else: + other_lines.append(line) + return (tags, other_lines) + + @classmethod + def _extra_filter(cls, lines, ignore_re): + """ + Filter out specific lines from the commit message. + + @param lines: commit message + @type lines: C{list} of C{str} + @param ignore_re: regexp for matching ignored lines + @type ignore_re: C{str} + @return: filtered commit message + @rtype: C{list} of C{str} + """ + if ignore_re: + match = re.compile(ignore_re) + return [line for line in lines if not match.match(line)] + else: + return lines + + @classmethod + def compose(cls, commit_info, **kwargs): + """ + Generate a changelog entry from a git commit. + + @param commit_info: info about the commit + @type commit_info: C{commit_info} object from + L{gbp.git.repository.GitRepository.get_commit_info()}. + @param kwargs: additional arguments to the compose() method, + currently we recognize 'full', 'id_len' and 'ignore_re' + @type kwargs: C{dict} + @return: formatted changelog entry + @rtype: C{list} of C{str} + """ + # Parse and filter out gbp command meta-tags + cmds, body = parse_gbp_commands(commit_info, 'gbp-rpm-ch', + ('ignore', 'short', 'full'), ()) + body = body.splitlines() + if 'ignore' in cmds: + return None + + # Parse and filter out bts-related meta-tags + bts_tags, body = cls._parse_bts_tags(body, kwargs['meta_bts']) + + # Additional filtering + body = cls._extra_filter(body, kwargs['ignore_re']) + + # Generate changelog entry + subject = commit_info['subject'] + commitid = commit_info['id'] + if kwargs['id_len']: + text = ["- [%s] %s" % (commitid[0:kwargs['id_len']], subject)] + else: + text = ["- %s" % subject] + + # Add all non-filtered-out lines from commit message, unless 'short' + if (kwargs['full'] or 'full' in cmds) and not 'short' in cmds: + # Add all non-blank body lines. + text.extend([" " + line for line in body if line.strip()]) + + # Add bts tags and ids in the end + for tag, ids in bts_tags.iteritems(): + bts_msg = " (%s: %s)" % (tag, ', '.join(ids)) + if len(text[-1]) + len(bts_msg) >= cls.max_entry_line_length: + text.append(" ") + text[-1] += bts_msg + + return text + diff --git a/gbp/scripts/buildpackage.py b/gbp/scripts/buildpackage.py index b001d1c4..1e209699 100755 --- a/gbp/scripts/buildpackage.py +++ b/gbp/scripts/buildpackage.py @@ -37,11 +37,12 @@ from gbp.errors import GbpError import gbp.log import gbp.notifications -from gbp.scripts.common.buildpackage import (index_name, wc_name, +from gbp.scripts.common.buildpackage import (index_name, wc_names, git_archive_submodules, git_archive_single, dump_tree, write_wc, drop_index) from gbp.pkg import compressor_opts, compressor_aliases, parse_archive_filename +from gbp.tmpfile import init_tmpdir, del_tmpdir def git_archive(repo, cp, output_dir, treeish, comp_type, comp_level, with_submodules): "create a compressed orig tarball in output_dir using git_archive" @@ -60,7 +61,7 @@ def git_archive(repo, cp, output_dir, treeish, comp_type, comp_level, with_submo comp_type, comp_level, comp_opts) else: - git_archive_single(treeish, output, prefix, + git_archive_single(repo, treeish, output, prefix, comp_type, comp_level, comp_opts) except (GitRepositoryError, CommandExecFailed): gbp.log.err("Error generating submodules' archives") @@ -124,8 +125,9 @@ def write_tree(repo, options): if options.export_dir: if options.export == index_name: tree = repo.write_tree() - elif options.export == wc_name: - tree = write_wc(repo) + elif options.export in wc_names: + tree = write_wc(repo, wc_names[options.export]['force'], + wc_names[options.export]['untracked']) else: tree = options.export if not repo.has_treeish(tree): @@ -341,7 +343,7 @@ def guess_comp_type(repo, comp_type, cp, tarball_dir): def check_tag(options, repo, source): """Perform specified consistency checks on git history""" - tag = repo.version_to_tag(options.debian_tag, source.changelog.version) + tag = repo.version_to_tag(options.packaging_tag, source.changelog.version) if (options.tag or options.tag_only) and not options.retag: if repo.has_tag(tag): raise GbpError("Tag '%s' already exists" % tag) @@ -392,6 +394,15 @@ def setup_pbuilder(options, repo, native): os.getenv('DIST') or '(sid)')) +def disable_builder(options): + """Disable builder (and postbuild hook)""" + gbp.log.info("Disabling builder and postbuild hook") + options.builder = '' + options.postbuild = '' + options.pbuilder = None + options.qemubuilder = None + + def disable_hooks(options): """Disable all hooks (except for builder)""" for hook in ['cleaner', 'postexport', 'prebuild', 'postbuild', 'posttag']: @@ -417,9 +428,10 @@ def changes_file_suffix(dpkg_args): return os.getenv('ARCH', None) or du.get_arch() -def build_parser(name, prefix=None): +def build_parser(name, prefix=None, git_treeish=None): try: - parser = GbpOptionParserDebian(command=os.path.basename(name), prefix=prefix) + parser = GbpOptionParserDebian(command=os.path.basename(name), + prefix=prefix, git_treeish=git_treeish) except configparser.ParsingError as err: gbp.log.err(err) return None @@ -436,12 +448,15 @@ def build_parser(name, prefix=None): parser.add_option_group(export_group) parser.add_boolean_config_file_option(option_name = "ignore-new", dest="ignore_new") + parser.add_boolean_config_file_option(option_name = "ignore-untracked", + dest="ignore_untracked") parser.add_option("--git-verbose", action="store_true", dest="verbose", default=False, help="verbose command execution") parser.add_config_file_option(option_name="color", dest="color", type='tristate') parser.add_config_file_option(option_name="color-scheme", dest="color_scheme") parser.add_config_file_option(option_name="notify", dest="notify", type='tristate') + parser.add_config_file_option(option_name="tmp-dir", dest="tmp_dir") tag_group.add_option("--git-tag", action="store_true", dest="tag", default=False, help="create a tag after a successful build") tag_group.add_option("--git-tag-only", action="store_true", dest="tag_only", default=False, @@ -450,7 +465,7 @@ def build_parser(name, prefix=None): help="don't fail if the tag already exists") tag_group.add_boolean_config_file_option(option_name="sign-tags", dest="sign_tags") tag_group.add_config_file_option(option_name="keyid", dest="keyid") - tag_group.add_config_file_option(option_name="debian-tag", dest="debian_tag") + tag_group.add_config_file_option(option_name="debian-tag", dest="packaging_tag") tag_group.add_config_file_option(option_name="debian-tag-msg", dest="debian_tag_msg") tag_group.add_config_file_option(option_name="upstream-tag", dest="upstream_tag") orig_group.add_config_file_option(option_name="upstream-tree", dest="upstream_tree") @@ -468,7 +483,7 @@ def build_parser(name, prefix=None): orig_group.add_config_file_option(option_name="compression-level", dest="comp_level", help="Compression level, default is '%(compression-level)s'") branch_group.add_config_file_option(option_name="upstream-branch", dest="upstream_branch") - branch_group.add_config_file_option(option_name="debian-branch", dest="debian_branch") + branch_group.add_config_file_option(option_name="debian-branch", dest="packaging_branch") branch_group.add_boolean_config_file_option(option_name = "ignore-branch", dest="ignore_branch") branch_group.add_boolean_config_file_option(option_name = "submodules", dest="with_submodules") cmd_group.add_config_file_option(option_name="builder", dest="builder", @@ -489,6 +504,7 @@ def build_parser(name, prefix=None): cmd_group.add_config_file_option(option_name="arch", dest="pbuilder_arch") cmd_group.add_boolean_config_file_option(option_name = "pbuilder-autoconf", dest="pbuilder_autoconf") cmd_group.add_config_file_option(option_name="pbuilder-options", dest="pbuilder_options") + cmd_group.add_boolean_config_file_option(option_name="build", dest="build") cmd_group.add_boolean_config_file_option(option_name="hooks", dest="hooks") export_group.add_config_file_option(option_name="export-dir", dest="export_dir", type="path", help="before building the package export the source into EXPORT_DIR, default is '%(export-dir)s'") @@ -501,7 +517,8 @@ def build_parser(name, prefix=None): return parser -def parse_args(argv, prefix): +def parse_args(argv, prefix, git_treeish=None): + """Parse config and command line arguments""" args = [ arg for arg in argv[1:] if arg.find('--%s' % prefix) == 0 ] dpkg_args = [ arg for arg in argv[1:] if arg.find('--%s' % prefix) == -1 ] @@ -510,12 +527,15 @@ def parse_args(argv, prefix): if arg in dpkg_args: args.append(arg) - parser = build_parser(argv[0], prefix=prefix) + parser = build_parser(argv[0], prefix=prefix, git_treeish=git_treeish) if not parser: return None, None, None options, args = parser.parse_args(args) gbp.log.setup(options.color, options.verbose, options.color_scheme) + if not options.build: + disable_builder(options) + dpkg_args = [] if not options.hooks: disable_hooks(options) if options.retag: @@ -548,6 +568,8 @@ def main(argv): source = None branch = None + gbp.log.initialize() + options, gbp_args, dpkg_args = parse_args(argv, prefix) if not options: @@ -561,14 +583,27 @@ def main(argv): else: repo_dir = os.path.abspath(os.path.curdir) + # Determine tree-ish to be exported + try: + tree = write_tree(repo, options) + except GbpError as err: + gbp.log.err(err) + return 1 + # Re-parse config options with using the per-tree config file(s) from the + # exported tree-ish + options, gbp_args, builder_args = parse_args(argv, prefix, tree) + try: + init_tmpdir(options.tmp_dir, prefix='buildpackage_') + Command(options.cleaner, shell=True)() if not options.ignore_new: - (ret, out) = repo.is_clean() + (ret, out) = repo.is_clean(options.ignore_untracked) if not ret: gbp.log.err("You have uncommitted changes in your source tree:") gbp.log.err(out) - raise GbpError("Use --git-ignore-new to ignore.") + raise GbpError("Use --git-ignore-new or --git-ignore-untracked " + "to ignore.") try: branch = repo.get_branch() @@ -578,12 +613,11 @@ def main(argv): raise if not options.ignore_new and not options.ignore_branch: - if branch != options.debian_branch: - gbp.log.err("You are not on branch '%s' but on '%s'" % (options.debian_branch, branch)) + if branch != options.packaging_branch: + gbp.log.err("You are not on branch '%s' but on '%s'" % (options.packaging_branch, branch)) raise GbpError("Use --git-ignore-branch to ignore or --git-debian-branch to set the branch name.") head = repo.head - tree = write_tree(repo, options) source = source_vfs(repo, options, tree) check_tag(options, repo, source) @@ -656,7 +690,7 @@ def main(argv): extra_env={'GBP_CHANGES_FILE': changes, 'GBP_BUILD_DIR': build_dir})() if options.tag or options.tag_only: - tag = repo.version_to_tag(options.debian_tag, source.changelog.version) + tag = repo.version_to_tag(options.packaging_tag, source.changelog.version) gbp.log.info("Tagging %s as %s" % (source.changelog.version, tag)) if options.retag and repo.has_tag(tag): repo.delete_tag(tag) @@ -685,7 +719,8 @@ def main(argv): source = None retval = 1 finally: - drop_index() + drop_index(repo) + del_tmpdir() if not options.tag_only: if options.export_dir and options.purge and not retval: diff --git a/gbp/scripts/buildpackage_bb.py b/gbp/scripts/buildpackage_bb.py new file mode 100644 index 00000000..c309e5a4 --- /dev/null +++ b/gbp/scripts/buildpackage_bb.py @@ -0,0 +1,533 @@ +# vim: set fileencoding=utf-8 : +# +# (C) 2014-2015 Intel Corporation +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, please see +# +# +"""Build an RPM package out of a Git repo with Bitbake meta data""" + +import ConfigParser +import os, os.path +import sys +import shutil +import tempfile + +import gbp.rpm as rpm +from gbp.rpm.policy import RpmPkgPolicy +from gbp.command_wrappers import Command, RunAtCommand, CommandExecFailed +from gbp.config import GbpOptionParserBB, GbpOptionGroup +from gbp.rpm.git import (GitRepositoryError, RpmGitRepository) +from gbp.errors import GbpError +import gbp.log +import gbp.notifications +from gbp.scripts.common.buildpackage import (index_name, wc_names, dump_tree, + drop_index) +from gbp.scripts.buildpackage_rpm import (disable_hooks, get_tree, + get_current_branch, get_upstream_tree, get_vcs_info, + create_packaging_tag, GbpAutoGenerateError) +from gbp.scripts.import_bb import recursive_copy +from gbp.scripts.pq_bb import update_patch_series +from gbp.scripts.common.pq import is_pq_branch, pq_branch_base +from gbp.bb import (bb, init_tinfoil, guess_bb_path, BBFile, bb_from_repo, + pkg_version, parse_bb) + +# pylint: disable=bad-continuation + + +def guess_export_params(repo, options): + """Get commit and tree from where to export packaging and patches""" + tree = None + branch = None + if options.export in wc_names.keys() + [index_name, 'HEAD']: + branch = get_current_branch(repo) + elif options.export in repo.get_local_branches(): + branch = options.export + if branch: + if is_pq_branch(branch, options): + packaging_branch = pq_branch_base(branch, options) + if repo.has_branch(packaging_branch): + gbp.log.info("It seems you're building a development/patch-" + "queue branch. Export target changed to '%s' and " + "patch-export enabled!" % packaging_branch) + options.patch_export = True + if not options.patch_export_rev: + options.patch_export_rev = options.export + options.export = packaging_branch + else: + gbp.log.warn("It seems you're building a development/patch-" + "queue branch. No corresponding packaging branch " + "found. Build may fail!") + if tree is None: + tree = get_tree(repo, options.export) + + # Get recipe path + bb_path = guess_bb_path(options, repo, tree, bbappend=True) + # Adjust meta-dir accordingly + options.meta_dir = os.path.dirname(bb_path) + + # Filter out changes in recipe directory + if options.patch_export: + relpath = os.path.relpath(os.path.abspath(options.meta_dir), repo.path) + if relpath != '.': + gbp.log.info("Auto-excluding changes under meta-dir (%s/)" % + relpath) + if options.patch_export_ignore_path: + options.patch_export_ignore_path += '|' + relpath + '/*' + else: + options.patch_export_ignore_path = relpath + '/*' + return tree + +def guess_export_dir(options, tinfoil, repo, treeish): + """Guess export directory""" + if not tinfoil: + gbp.log.err("Bitbake build environment (bb.tinfoil) not initialized, " + "unable to guess export directory") + gbp.log.err("Please use --git-export-dir or try initializing bitbake " + "build environment with the 'oe-init-build-env' script") + raise GbpError + + gbp.log.info('Guessing export directory') + tinfoil.parseRecipes() + + # Parse recipe + bb_path = guess_bb_path(options, repo, treeish, bbappend=True) + #cfg_data = bb.data.createCopy(tinfoil.config_data) + #bbfile = bb_from_repo(cfg_data, repo, treeish, bb_path) + # Use naive parsing, at least for now as the file might be .bbappend + bbfile = bb_from_repo(None, repo, treeish, bb_path) + + pkg_name = bbfile.getVar('PN', True) + bb_name = os.path.basename(bb_path) + if bb_name.endswith('.bb'): + for name in tinfoil.cooker_data.pkg_fn: + if os.path.basename(name) == bb_name and os.path.isabs(name): + gbp.log.debug("Found matching recipe filename: %s" % name) + return os.path.dirname(name) + else: + for name, appends in tinfoil.cooker.collection.appendlist.iteritems(): + print name, appends + if name.rsplit('_', 1)[0] == pkg_name: + gbp.log.debug("Found %s from appends" % name) + for append_name in appends: + if os.path.basename(append_name) == bb_name: + gbp.log.debug("Found matching recipe filename: %s" % + append_name) + return os.path.dirname(append_name) + export_dir = os.path.dirname(appends[-1]) + gbp.log.debug("Using existing appends directory %s" % + export_dir) + return export_dir + if pkg_name in tinfoil.cooker_data.pkg_pn: + export_dir = os.path.dirname(tinfoil.cooker_data.pkg_pn[pkg_name][-1]) + gbp.log.debug("Using existing package directory %s" % export_dir) + return export_dir + else: + pkg_ver = bbfile.getVar('PV', True) + raise GbpError("Package %s-%s not found under any configured layer, " + "please use --git-export-dir to define the export " + "directory" % (pkg_name, pkg_ver)) + +def export_patches(repo, bbfile, export_treeish, options): + """Generate patches and update recipe""" + try: + if bbfile.getVar('SRCREV', True): + upstream_tree = bbfile.getVar('SRCREV', True) + else: + upstream_version = bbfile.getVar('PV', True) + upstream_tree = get_upstream_tree(repo, upstream_version, options) + update_patch_series(repo, bbfile, upstream_tree, export_treeish, + options) + except (GitRepositoryError, GbpError) as err: + raise GbpAutoGenerateError(str(err)) + + +def is_native(repo, options): + """Determine whether a package is native or non-native""" + if options.native.is_auto(): + if repo.has_branch(options.upstream_branch): + return False + # Check remotes, too + for remote_branch in repo.get_remote_branches(): + remote, branch = remote_branch.split('/', 1) + if branch == options.upstream_branch: + gbp.log.debug("Found upstream branch '%s' from remote '%s'" % + (remote, branch)) + return False + return True + + return options.native.is_on() + + +def setup_builder(options, builder_args): + """Setup everything to use git-pbuilder""" + # TODO: placeholder for Bitbake: implement or remove entirely + pass + +def bb_get_local_files(bbfile, tgt_dir, whole_dir=False): + """Get (local) packaging files""" + if not whole_dir: + for path in bbfile.localfiles + bbfile.includes + [bbfile.bb_path]: + relpath = os.path.relpath(path, bbfile.bb_dir) + subdir = os.path.join(tgt_dir, os.path.dirname(relpath)) + if not os.path.exists(subdir): + os.makedirs(subdir) + shutil.copy2(path, os.path.join(tgt_dir, relpath)) + else: + # Simply copy whole meta dir, if requested + recursive_copy(bbfile.bb_dir, tgt_dir) + +def dump_meta(cfg_data, options, repo, treeish, dump_dir): + """Parse and dump meta information from a treeish""" + tmpdir = tempfile.mkdtemp(prefix='gbp-bb_') + try: + bb_path = guess_bb_path(options, repo, treeish, bbappend=True) + # Dump whole meta directory + dump_tree(repo, tmpdir, '%s:%s' % (treeish, os.path.dirname(bb_path)), + False) + # Parse recipe + full_path = os.path.join(tmpdir, os.path.basename(bb_path)) + bbfile = BBFile(full_path, cfg_data) + bb_get_local_files(bbfile, dump_dir) + except GitRepositoryError as err: + raise GbpError("Git error: %s" % err) + finally: + shutil.rmtree(tmpdir) + + # Re-parse recipe from final location + full_path = os.path.abspath(os.path.join(dump_dir, + os.path.basename(bb_path))) + return BBFile(full_path, cfg_data) + + +def build_parser(name, prefix=None, git_treeish=None): + """Create command line parser""" + try: + parser = GbpOptionParserBB(command=os.path.basename(name), + prefix=prefix, git_treeish=git_treeish) + except ConfigParser.ParsingError, err: + gbp.log.err(err) + return None + + tag_group = GbpOptionGroup(parser, "tag options", + "options related to git tag creation") + branch_group = GbpOptionGroup(parser, "branch options", + "branch layout options") + cmd_group = GbpOptionGroup(parser, "external command options", + "how and when to invoke external commands and hooks") + orig_group = GbpOptionGroup(parser, "orig tarball options", + "options related to the creation of the orig tarball") + export_group = GbpOptionGroup(parser, "export build-tree options", + "alternative build tree related options") + parser.add_option_group(tag_group) + parser.add_option_group(orig_group) + parser.add_option_group(branch_group) + parser.add_option_group(cmd_group) + parser.add_option_group(export_group) + + parser.add_boolean_config_file_option(option_name = "ignore-untracked", + dest="ignore_untracked") + parser.add_boolean_config_file_option(option_name = "ignore-new", + dest="ignore_new") + parser.add_option("--git-verbose", action="store_true", dest="verbose", + help="verbose command execution") + parser.add_config_file_option(option_name="tmp-dir", dest="tmp_dir") + parser.add_config_file_option(option_name="color", dest="color", + type='tristate') + parser.add_config_file_option(option_name="color-scheme", + dest="color_scheme") + parser.add_config_file_option(option_name="notify", dest="notify", + type='tristate') + parser.add_config_file_option(option_name="vendor", action="store", + dest="vendor") + parser.add_config_file_option(option_name="native", dest="native", + type='tristate') + tag_group.add_option("--git-tag", action="store_true", dest="tag", + help="create a tag after a successful build") + tag_group.add_option("--git-tag-only", action="store_true", dest="tag_only", + help="don't build, only tag and run the posttag hook") + tag_group.add_option("--git-retag", action="store_true", dest="retag", + help="don't fail if the tag already exists") + tag_group.add_boolean_config_file_option(option_name="sign-tags", + dest="sign_tags") + tag_group.add_config_file_option(option_name="keyid", dest="keyid") + tag_group.add_config_file_option(option_name="packaging-tag", + dest="packaging_tag") + tag_group.add_config_file_option(option_name="packaging-tag-msg", + dest="packaging_tag_msg") + tag_group.add_config_file_option(option_name="upstream-tag", + dest="upstream_tag") + orig_group.add_config_file_option(option_name="upstream-tree", + dest="upstream_tree") + branch_group.add_config_file_option(option_name="upstream-branch", + dest="upstream_branch") + branch_group.add_config_file_option(option_name="packaging-branch", + dest="packaging_branch") + branch_group.add_config_file_option(option_name="pq-branch", + dest="pq_branch") + branch_group.add_boolean_config_file_option(option_name = "ignore-branch", + dest="ignore_branch") + cmd_group.add_config_file_option(option_name="builder", dest="builder", + help="command to build the package, default is " + "'%(builder)s'") + cmd_group.add_config_file_option(option_name="cleaner", dest="cleaner", + help="command to clean the working copy, default is " + "'%(cleaner)s'") + cmd_group.add_config_file_option(option_name="prebuild", dest="prebuild", + help="command to run before a build, default is " + "'%(prebuild)s'") + cmd_group.add_config_file_option(option_name="postexport", + dest="postexport", + help="command to run after exporting the source tree, " + "default is '%(postexport)s'") + cmd_group.add_config_file_option(option_name="postbuild", dest="postbuild", + help="hook run after a successful build, default is " + "'%(postbuild)s'") + cmd_group.add_config_file_option(option_name="posttag", dest="posttag", + help="hook run after a successful tag operation, default " + "is '%(posttag)s'") + cmd_group.add_boolean_config_file_option(option_name="hooks", dest="hooks") + export_group.add_option("--git-no-build", action="store_true", + dest="no_build", + help="Don't run builder or the associated hooks") + export_group.add_config_file_option(option_name="export-dir", + dest="export_dir", type="path", + help="Build topdir, also export the sources under " + "EXPORT_DIR, default is '%(export-dir)s'") + export_group.add_config_file_option("export", dest="export", + help="export treeish object TREEISH, default is " + "'%(export)s'", metavar="TREEISH") + export_group.add_config_file_option(option_name="meta-dir", + dest="meta_dir") + export_group.add_config_file_option(option_name="bb-file", dest="bb_file") + export_group.add_boolean_config_file_option("patch-export", + dest="patch_export") + export_group.add_option("--git-patch-export-rev", dest="patch_export_rev", + metavar="TREEISH", + help="[experimental] Export patches from treeish object " + "TREEISH") + export_group.add_config_file_option("patch-export-ignore-path", + dest="patch_export_ignore_path") + export_group.add_config_file_option("patch-export-compress", + dest="patch_export_compress") + export_group.add_config_file_option("patch-export-squash-until", + dest="patch_export_squash_until") + export_group.add_boolean_config_file_option(option_name="patch-numbers", + dest="patch_numbers") + export_group.add_config_file_option("bb-vcs-info", dest="bb_vcs_info") + return parser + +def parse_args(argv, prefix, git_treeish=None): + """Parse config and command line arguments""" + args = [arg for arg in argv[1:] if arg.find('--%s' % prefix) == 0] + builder_args = [arg for arg in argv[1:] if arg.find('--%s' % prefix) == -1] + + # We handle these although they don't have a --git- prefix + for arg in ["--help", "-h", "--version"]: + if arg in builder_args: + args.append(arg) + + parser = build_parser(argv[0], prefix=prefix, git_treeish=git_treeish) + if not parser: + return None, None, None + options, args = parser.parse_args(args) + + options.patch_export_compress = rpm.string_to_int( + options.patch_export_compress) + + gbp.log.setup(options.color, options.verbose, options.color_scheme) + if not options.hooks: + disable_hooks(options) + if options.retag: + if not options.tag and not options.tag_only: + gbp.log.err("'--%sretag' needs either '--%stag' or '--%stag-only'" % + (prefix, prefix, prefix)) + return None, None, None + + return options, args, builder_args + + +def main(argv): + """Entry point for git-buildpackage-bb""" + retval = 0 + prefix = "git-" + bbfile = None + dump_dir = None + + if not bb: + return 1 + + options, gbp_args, builder_args = parse_args(argv, prefix) + if not options: + return 1 + + try: + repo = RpmGitRepository(os.path.curdir) + except GitRepositoryError: + gbp.log.err("%s is not a git repository" % (os.path.abspath('.'))) + return 1 + + # Determine tree-ish to be exported + try: + tree = get_tree(repo, options.export) + except GbpError as err: + gbp.log.err('Failed to determine export treeish: %s' % err) + return 1 + # Re-parse config options with using the per-tree config file(s) from the + # exported tree-ish + options, gbp_args, builder_args = parse_args(argv, prefix, tree) + + branch = get_current_branch(repo) + + try: + tinfoil = init_tinfoil(config_only=True) + #bb_cfg_data = bb.data.createCopy(tinfoil.config_data) + except GbpError: + tinfoil = None + + # Use naive parsing because repository might only have .bb file + gbp.log.info("Using naive standalone parsing of recipes in package repo.") + bb_cfg_data = None + + try: + tree = guess_export_params(repo, options) + + Command(options.cleaner, shell=True)() + if not options.ignore_new: + (ret, out) = repo.is_clean(options.ignore_untracked) + if not ret: + gbp.log.err("You have uncommitted changes in your source tree:") + gbp.log.err(out) + raise GbpError("Use --git-ignore-new or --git-ignore-untracked " + "to ignore.") + + if not options.ignore_new and not options.ignore_branch: + if branch != options.packaging_branch: + gbp.log.err("You are not on branch '%s' but on '%s'" % + (options.packaging_branch, branch)) + raise GbpError("Use --git-ignore-branch to ignore or " + "--git-packaging-branch to set the branch name.") + + if not options.tag_only: + # Dump/parse meta to export dir + if options.export_dir: + export_dir = os.path.abspath(options.export_dir) + else: + export_dir = guess_export_dir(options, tinfoil, repo, tree) + gbp.log.info("Dumping meta from tree '%s' to '%s'" % + (options.export, export_dir)) + bbfile = dump_meta(bb_cfg_data, options, repo, tree, + export_dir) + + # Setup builder opts + setup_builder(options, builder_args) + + if is_native(repo, options) and bbfile.getVar('SRCREV') == 'HEAD': + # Update SRCREV for native packages that are exported from + # pristine repository + BBFile.set_var_val(bbfile.bb_path, 'SRCREV', + repo.rev_parse(tree)) + + # TODO: Re-design the handling of native packages. Updating + # SRCREV must probably be more explicit + if options.patch_export: + # Generate patches, if requested + if options.patch_export_rev: + patch_tree = get_tree(repo, options.patch_export_rev) + else: + patch_tree = tree + export_patches(repo, bbfile, patch_tree, options) + + # Run postexport hook + if options.postexport: + RunAtCommand(options.postexport, shell=True, + extra_env={'GBP_GIT_DIR': repo.git_dir, + 'GBP_TMP_DIR': export_dir} + )(dir=export_dir) + # Do actual build + if not options.no_build: + if options.prebuild: + RunAtCommand(options.prebuild, shell=True, + extra_env={'GBP_GIT_DIR': repo.git_dir, + 'GBP_BUILD_DIR': export_dir} + )(dir=export_dir) + + # Unlock cooker so that we are able to run external bitbake + if options.builder == 'bitbake' and tinfoil: + bb.utils.unlockfile(tinfoil.cooker.lock) + + # Finally build the package: + bb_path = bbfile.getVar('FILE', True) + builder_args.extend(['-b', bb_path]) + RunAtCommand(options.builder, builder_args, shell=True, + extra_env={'GBP_BUILD_DIR': export_dir})() + + if options.postbuild: + Command(options.postbuild, shell=True, + extra_env={'GBP_BUILD_DIR': export_dir})() + else: + # Tag-only: we just need to parse the meta + bbfile = parse_bb(bb_cfg_data, options, repo, tree) + + # Tag (note: tags the exported version) + if options.tag or options.tag_only: + version = pkg_version(bbfile) + gbp.log.info("Tagging %s" % rpm.compose_version_str(version)) + create_packaging_tag(repo, tree, bbfile.getVar('PN'), version, + options) + vcs_info = get_vcs_info(repo, tag) + if options.posttag: + sha = repo.rev_parse("%s^{}" % tag) + Command(options.posttag, shell=True, + extra_env={'GBP_TAG': tag, + 'GBP_BRANCH': branch, + 'GBP_SHA1': sha})() + else: + vcs_info = get_vcs_info(repo, tree) + # TODO: Put VCS information to recipe + if options.bb_vcs_info: + raise GbpError("Injecting VCS info into recipe not yet supported") + + except CommandExecFailed: + retval = 1 + except GitRepositoryError as err: + gbp.log.err("Git command failed: %s" % err) + retval = 1 + except GbpAutoGenerateError as err: + if len(err.__str__()): + gbp.log.err(err) + retval = 2 + except GbpError, err: + if len(err.__str__()): + gbp.log.err(err) + retval = 1 + finally: + drop_index(repo) + if dump_dir and os.path.exists(dump_dir): + shutil.rmtree(dump_dir) + + if not options.tag_only: + if bbfile and options.notify: + summary = "GBP buildpackage-bb %s" % \ + ["failed", "successful"][not retval] + message = ("Build of %s %s %s" % (bbfile.getVar('PN', True), + RpmPkgPolicy.compose_full_version(pkg_version(bbfile)), + ["failed", "succeeded"][not retval])) + if not gbp.notifications.notify(summary, message, options.notify): + gbp.log.err("Failed to send notification") + retval = 1 + + return retval + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/gbp/scripts/buildpackage_rpm.py b/gbp/scripts/buildpackage_rpm.py index 00582df1..b8a2ae32 100644 --- a/gbp/scripts/buildpackage_rpm.py +++ b/gbp/scripts/buildpackage_rpm.py @@ -18,8 +18,10 @@ # """Build an RPM package out of a Git repository""" +from datetime import datetime from six.moves import configparser import os +import re import shutil import sys @@ -34,11 +36,12 @@ from gbp.rpm.git import GitRepositoryError, RpmGitRepository from gbp.rpm.policy import RpmPkgPolicy from gbp.tmpfile import init_tmpdir, del_tmpdir, tempfile -from gbp.scripts.common.buildpackage import (index_name, wc_name, +from gbp.scripts.common.buildpackage import (index_name, wc_names, git_archive_submodules, git_archive_single, dump_tree, write_wc, drop_index) -from gbp.scripts.pq_rpm import parse_spec +from gbp.scripts.pq_rpm import parse_spec, update_patch_series +from gbp.scripts.common.pq import is_pq_branch, pq_branch_name, pq_branch_base class GbpAutoGenerateError(GbpError): @@ -75,7 +78,7 @@ def git_archive(repo, spec, output_dir, treeish, prefix, comp_level, spec.orig_src['archive_fmt']) else: - git_archive_single(treeish, output, prefix, + git_archive_single(repo, treeish, output, prefix, spec.orig_src['compression'], comp_level, comp_opts, spec.orig_src['archive_fmt']) except (GitRepositoryError, CommandExecFailed): @@ -163,9 +166,10 @@ def get_tree(repo, tree_name): if tree_name == index_name: # Write a tree of the index tree = repo.write_tree() - elif tree_name == wc_name: + elif tree_name in wc_names: # Write a tree of the working copy - tree = write_wc(repo) + tree = write_wc(repo, wc_names[tree_name]['force'], + wc_names[tree_name]['untracked']) else: tree = tree_name except GitRepositoryError as err: @@ -202,6 +206,45 @@ def get_vcs_info(repo, treeish): return info +def guess_export_params(repo, options): + """Get commit and tree from where to export packaging and patches""" + tree = None + branch = None + if options.export in wc_names.keys() + [index_name, 'HEAD']: + branch = get_current_branch(repo) + elif options.export in repo.get_local_branches(): + branch = options.export + if branch: + if is_pq_branch(branch, options): + packaging_branch = pq_branch_base(branch, options) + if repo.has_branch(packaging_branch): + gbp.log.info("It seems you're building a development/patch-" + "queue branch. Export target changed to '%s' and " + "patch-export enabled!" % packaging_branch) + options.patch_export = True + if not options.patch_export_rev: + options.patch_export_rev = options.export + options.export = packaging_branch + else: + gbp.log.warn("It seems you're building a development/patch-" + "queue branch. No corresponding packaging branch " + "found. Build may fail!") + elif options.patch_export and not options.patch_export_rev: + tree = get_tree(repo, options.export) + spec = parse_spec(options, repo, treeish=tree) + pq_branch = pq_branch_name(branch, options, spec.version) + if repo.has_branch(pq_branch): + gbp.log.info("Exporting patches from development/patch-queue " + "branch '%s'" % pq_branch) + options.patch_export_rev = pq_branch + if tree is None: + tree = get_tree(repo, options.export) + spec = parse_spec(options, repo, treeish=tree) + + # Return tree-ish object and relative spec path for for exporting packaging + return tree, spec + + def git_archive_build_orig(repo, spec, output_dir, options): """ Build orig tarball using git-archive @@ -218,7 +261,6 @@ def git_archive_build_orig(repo, spec, output_dir, options): @rtype: C{str} """ try: - orig_prefix = spec.orig_src['prefix'] upstream_tree = get_upstream_tree(repo, spec.upstreamversion, options) gbp.log.info("%s does not exist, creating from '%s'" % (spec.orig_src['filename'], upstream_tree)) @@ -227,7 +269,7 @@ def git_archive_build_orig(repo, spec, output_dir, options): "'%s -%s'" % (spec.orig_src['compression'], options.comp_level)) if not git_archive(repo, spec, output_dir, upstream_tree, - orig_prefix, options.comp_level, + options.orig_prefix, options.comp_level, options.with_submodules): raise GbpError("Cannot create upstream tarball at '%s'" % output_dir) @@ -236,6 +278,15 @@ def git_archive_build_orig(repo, spec, output_dir, options): return upstream_tree +def export_patches(repo, spec, export_treeish, options): + """Generate patches and update spec file""" + try: + upstream_tree = get_upstream_tree(repo, spec.upstreamversion, options) + update_patch_series(repo, spec, upstream_tree, export_treeish, options) + except (GitRepositoryError, GbpError) as err: + raise GbpAutoGenerateError(str(err)) + + def is_native(repo, options): """Determine whether a package is native or non-native""" if options.native.is_auto(): @@ -264,12 +315,60 @@ def setup_builder(options, builder_args): '--define "_sourcedir %%_topdir/%s"' % options.export_sourcedir]) +def packaging_tag_time_fields(repo, commit, tag_format_str, other_fields): + """Update string format fields for packaging tag""" + commit_info = repo.get_commit_info(commit) + fields = {} + fields['nowtime'] = datetime.now().\ + strftime(RpmPkgPolicy.tag_timestamp_format) + + time = datetime.fromtimestamp(int(commit_info['author'].date.split()[0])) + fields['authortime'] = time.strftime(RpmPkgPolicy.tag_timestamp_format) + time = datetime.fromtimestamp(int(commit_info['committer'].date.split()[0])) + fields['committime'] = time.strftime(RpmPkgPolicy.tag_timestamp_format) + + # Create re for finding tags with incremental numbering + re_fields = dict(fields) + re_fields.update(other_fields) + re_fields['nowtimenum'] = fields['nowtime'] + "\.(?P[0-9]+)" + re_fields['authortimenum'] = fields['authortime'] + "\.(?P[0-9]+)" + re_fields['committimenum'] = fields['committime'] + "\.(?P[0-9]+)" + + tag_re = re.compile("^%s$" % (format_str(tag_format_str, re_fields))) + + # Defaults for numbered tags + fields['nowtimenum'] = fields['nowtime'] + ".1" + fields['authortimenum'] = fields['authortime'] + ".1" + fields['committimenum'] = fields['committime'] + ".1" + + # Search for existing numbered tags + for tag in reversed(repo.get_tags()): + match = tag_re.match(tag) + if match: + match = match.groupdict() + # Increase numbering if a tag with the same "base" is found + if 'nownum' in match: + fields['nowtimenum'] = "%s.%s" % (fields['nowtime'], + int(match['nownum'])+1) + if 'authornum' in match: + fields['authortimenum'] = "%s.%s" % (fields['authortime'], + int(match['authornum'])+1) + if 'commitnum' in match: + fields['committimenum'] = "%s.%s" % (fields['committime'], + int(match['commitnum'])+1) + break + return fields + + def packaging_tag_data(repo, commit, name, version, options): """Compose packaging tag name and msg""" version_dict = dict(version, version=rpm.compose_version_str(version)) # Compose tag name and message tag_name_fields = dict(version_dict, vendor=options.vendor.lower()) + tag_name_fields.update(packaging_tag_time_fields(repo, commit, + options.packaging_tag, + tag_name_fields)) tag_name = repo.version_to_tag(options.packaging_tag, tag_name_fields) tag_msg = format_str(options.packaging_tag_msg, @@ -316,7 +415,7 @@ def build_parser(name, prefix=None, git_treeish=None): """Construct config/option parser""" try: parser = GbpOptionParserRpm(command=os.path.basename(name), - prefix=prefix) + prefix=prefix, git_treeish=git_treeish) except configparser.ParsingError as err: gbp.log.err(err) return None @@ -339,6 +438,8 @@ def build_parser(name, prefix=None, git_treeish=None): parser.add_boolean_config_file_option(option_name="ignore-new", dest="ignore_new") + parser.add_boolean_config_file_option(option_name = "ignore-untracked", + dest="ignore_untracked") parser.add_option("--git-verbose", action="store_true", dest="verbose", default=False, help="verbose command execution") parser.add_config_file_option(option_name="tmp-dir", dest="tmp_dir") @@ -388,10 +489,14 @@ def build_parser(name, prefix=None, git_treeish=None): dest="comp_level", help="Compression level, default is " "'%(compression-level)s'") + orig_group.add_config_file_option(option_name="orig-prefix", + dest="orig_prefix") branch_group.add_config_file_option(option_name="upstream-branch", dest="upstream_branch") branch_group.add_config_file_option(option_name="packaging-branch", dest="packaging_branch") + branch_group.add_config_file_option(option_name="pq-branch", + dest="pq_branch") branch_group.add_boolean_config_file_option(option_name = "ignore-branch", dest="ignore_branch") branch_group.add_boolean_config_file_option(option_name = "submodules", @@ -440,6 +545,18 @@ def build_parser(name, prefix=None, git_treeish=None): dest="packaging_dir") export_group.add_config_file_option(option_name="spec-file", dest="spec_file") + export_group.add_config_file_option("spec-vcs-tag", dest="spec_vcs_tag") + export_group.add_boolean_config_file_option("patch-export", + dest="patch_export") + export_group.add_option("--git-patch-export-rev", dest="patch_export_rev", + metavar="TREEISH", + help="Export patches from TREEISH") + export_group.add_boolean_config_file_option(option_name="patch-numbers", + dest="patch_numbers") + export_group.add_config_file_option("patch-compress", dest="patch_compress") + export_group.add_config_file_option("patch-squash", dest="patch_squash") + export_group.add_config_file_option("patch-ignore-path", + dest="patch_ignore_path") return parser @@ -467,6 +584,8 @@ def parse_args(argv, prefix, git_treeish=None): (prefix, prefix, prefix)) return None, None, None + options.patch_compress = rpm.string_to_int(options.patch_compress) + return options, args, builder_args @@ -502,16 +621,16 @@ def main(argv): try: init_tmpdir(options.tmp_dir, prefix='buildpackage-rpm_') - tree = get_tree(repo, options.export) - spec = parse_spec(options, repo, treeish=tree) + tree, spec = guess_export_params(repo, options) Command(options.cleaner, shell=True)() if not options.ignore_new: - ret, out = repo.is_clean() + ret, out = repo.is_clean(options.ignore_untracked) if not ret: gbp.log.err("You have uncommitted changes in your source tree:") gbp.log.err(out) - raise GbpError("Use --git-ignore-new to ignore.") + raise GbpError("Use --git-ignore-new or --git-ignore-untracked " + "to ignore.") if not options.ignore_new and not options.ignore_branch: if branch != options.packaging_branch: @@ -535,6 +654,14 @@ def main(argv): if options.use_mock: setup_mock(options) + # Generate patches, if requested + if options.patch_export and not is_native(repo, options): + if options.patch_export_rev: + patch_tree = get_tree(repo, options.patch_export_rev) + else: + patch_tree = tree + export_patches(repo, spec, patch_tree, options) + # Prepare final export dirs export_dir = makedir(options.export_dir) source_dir = makedir(os.path.join(export_dir, @@ -556,6 +683,15 @@ def main(argv): raise GbpError("Error exporting packaging files: %s" % err) spec.specdir = os.path.abspath(spec_dir) + if options.orig_prefix != 'auto': + orig_prefix_fields = dict(spec.version, + version = spec.upstreamversion, + name=spec.name) + options.orig_prefix = format_str(options.orig_prefix, + orig_prefix_fields) + elif spec.orig_src: + options.orig_prefix = spec.orig_src['prefix'] + # Get/build the orig tarball if is_native(repo, options): if spec.orig_src and not options.no_create_orig: @@ -567,9 +703,8 @@ def main(argv): "compression '%s -%s'" % (spec.orig_src['compression'], options.comp_level)) - orig_prefix = spec.orig_src['prefix'] if not git_archive(repo, spec, source_dir, tree, - orig_prefix, options.comp_level, + options.orig_prefix, options.comp_level, options.with_submodules): raise GbpError("Cannot create source tarball at '%s'" % source_dir) @@ -623,6 +758,10 @@ def main(argv): else: vcs_info = get_vcs_info(repo, tree) + # Put 'VCS:' tag to .spec + spec.set_tag('VCS', None, format_str(options.spec_vcs_tag, vcs_info)) + spec.write_spec_file() + except CommandExecFailed: retval = 1 except GitRepositoryError as err: @@ -637,7 +776,7 @@ def main(argv): gbp.log.err(err) retval = 1 finally: - drop_index() + drop_index(repo) del_tmpdir() if not options.tag_only: diff --git a/gbp/scripts/clone.py b/gbp/scripts/clone.py index 216bf993..e5c98758 100755 --- a/gbp/scripts/clone.py +++ b/gbp/scripts/clone.py @@ -23,10 +23,13 @@ import sys import os, os.path from gbp.config import (GbpOptionParser, GbpOptionGroup) -from gbp.deb.git import DebianGitRepository -from gbp.git import (GitRepository, GitRepositoryError) +from gbp.git import GitRepositoryError from gbp.errors import GbpError import gbp.log +try: + from gbp.deb.git import DebianGitRepository as GitRepository +except ImportError: + from gbp.rpm.git import RpmGitRepository as GitRepository def build_parser(name): @@ -43,7 +46,8 @@ def build_parser(name): branch_group.add_option("--all", action="store_true", dest="all", default=False, help="track all branches, not only debian and upstream") branch_group.add_config_file_option(option_name="upstream-branch", dest="upstream_branch") - branch_group.add_config_file_option(option_name="debian-branch", dest="debian_branch") + branch_group.add_config_file_option(option_name="debian-branch", dest="packaging_branch") + branch_group.add_config_file_option(option_name="packaging-branch", dest="packaging_branch") branch_group.add_boolean_config_file_option(option_name="pristine-tar", dest="pristine_tar") branch_group.add_option("--depth", action="store", dest="depth", default=0, help="git history depth (for creating shallow clones)") @@ -71,6 +75,8 @@ def parse_args (argv): def main(argv): retval = 0 + gbp.log.initialize() + (options, args) = parse_args(argv) if not options: return 1 @@ -90,8 +96,8 @@ def main(argv): pass try: - repo = DebianGitRepository.clone(clone_to, source, options.depth, - auto_name=auto_name,reference=options.reference) + repo = GitRepository.clone(clone_to, source, options.depth, + auto_name=auto_name,reference=options.reference) os.chdir(repo.path) # Reparse the config files of the cloned repository so we pick up the @@ -107,7 +113,7 @@ def main(argv): local != "HEAD": repo.create_branch(local, remote) else: # only track gbp's default branches - branches = [ options.debian_branch, options.upstream_branch ] + branches = [ options.packaging_branch, options.upstream_branch ] if options.pristine_tar: branches += [ repo.pristine_tar_branch ] gbp.log.debug('Will track branches: %s' % branches) @@ -117,7 +123,7 @@ def main(argv): not repo.has_branch(branch): repo.create_branch(branch, remote) - repo.set_branch(options.debian_branch) + repo.set_branch(options.packaging_branch) except GitRepositoryError as err: gbp.log.err("Git command failed: %s" % err) diff --git a/gbp/scripts/clone_bb.py b/gbp/scripts/clone_bb.py new file mode 100755 index 00000000..aedea0eb --- /dev/null +++ b/gbp/scripts/clone_bb.py @@ -0,0 +1,174 @@ +# vim: set fileencoding=utf-8 : +# +# (C) 2009,2010 Guido Guenther +# (C) 2014-2015 Intel Corporation +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, please see +# +# +# inspired by dom-git-checkout +# +"""Clone a package Git repository from a bitbake-based distro""" + +import ConfigParser +import re +import sys +import os, os.path + +from gbp.config import (GbpOptionParser, GbpOptionGroup) +from gbp.git import GitRepositoryError +from gbp.errors import GbpError +import gbp.log +from gbp.rpm.git import RpmGitRepository as GitRepository +from gbp.bb import bb, init_tinfoil, guess_pkg + +# pylint: disable=bad-continuation + + +def guess_remote(tinfoil, source): + """Guess the remote repository URL""" + # Try to determine if a remote URL is referenced + if re.match(r'[a-z]{3,5}://', source) or re.match(r'\S+@\S+', source): + return source, None + + # Get remote repo from recipe + recipe = guess_pkg(tinfoil, source) + appends = tinfoil.cooker.collection.get_file_appends(recipe) + gbp.log.info("Using %s with appends %s" % (recipe, appends)) + pkg_data = bb.cache.Cache.loadDataFull(recipe, appends, tinfoil.config_data) + uri = pkg_data.getVar('GBP_PACKAGING_REPO', True) + if not uri: + raise GbpError("GBP_PACKAGING_REPO not defined in recipe. Unable to " + "determine remote repo") + rev = pkg_data.getVar('GBP_PACKAGING_REV', True) + return uri, rev + + +def build_parser(name): + """Create command line argument parser""" + try: + parser = GbpOptionParser(command=os.path.basename(name), prefix='', + usage='%prog [options] repository - clone a ' + 'remote per-package repository') + except ConfigParser.ParsingError as err: + gbp.log.err(err) + return None + + branch_group = GbpOptionGroup(parser, "branch options", + "branch tracking and layout options") + parser.add_option_group(branch_group) + + branch_group.add_option("--all", action="store_true", dest="all", + help="track all branches, not only packaging and upstream") + branch_group.add_config_file_option(option_name="upstream-branch", + dest="upstream_branch") + branch_group.add_config_file_option(option_name="packaging-branch", + dest="packaging_branch") + branch_group.add_option("--depth", action="store", dest="depth", default=0, + help="git history depth (for creating shallow clones)") + + parser.add_option("-v", "--verbose", action="store_true", dest="verbose", + help="verbose command execution") + parser.add_config_file_option(option_name="color", dest="color", + type='tristate') + parser.add_config_file_option(option_name="color-scheme", + dest="color_scheme") + return parser + + +def parse_args (argv): + """Parse command line arguments""" + parser = build_parser(argv[0]) + if not parser: + return None, None + + (options, args) = parser.parse_args(argv) + gbp.log.setup(options.color, options.verbose, options.color_scheme) + return (options, args) + + +def main(argv): + """Entry point for gbp-clone-bb""" + retval = 0 + + if not bb: + return 1 + + (options, args) = parse_args(argv) + if not options: + return 1 + + if len(args) < 2: + gbp.log.err("Need a package or repository to clone.") + return 1 + + # Determine target dir + clone_to = os.path.curdir + auto_name = False + if len(args) < 3: + if 'BUILDDIR' in os.environ: + clone_to = os.path.join(os.environ['BUILDDIR'], 'devel') + auto_name = True + else: + clone_to = args[2] + + try: + tinfoil = init_tinfoil() + + source, revision = guess_remote(tinfoil, args[1]) + + gbp.log.info("Cloning from %s..." % source) + repo = GitRepository.clone(clone_to, source, options.depth, + auto_name=auto_name) + os.chdir(repo.path) + + # Reparse the config files of the cloned repository so we pick up the + # branch information from there: + (options, args) = parse_args(argv) + + # Track all branches: + if options.all: + remotes = repo.get_remote_branches() + for remote in remotes: + local = remote.replace("origin/", "", 1) + if not repo.has_branch(local) and local != "HEAD": + repo.create_branch(local, remote) + else: # only track gbp's default branches + branches = [ options.packaging_branch, options.upstream_branch ] + gbp.log.debug('Will track branches: %s' % branches) + for branch in branches: + remote = 'origin/%s' % branch + if repo.has_branch(remote, remote=True) and \ + not repo.has_branch(branch): + repo.create_branch(branch, remote) + + gbp.log.info("Successfully cloned into %s" % clone_to) + if (revision and repo.rev_parse('HEAD') != + repo.rev_parse('%s^0' % revision)): + gbp.log.info("Checking out revision %s" % revision) + repo.set_branch(revision) + + except GitRepositoryError as err: + gbp.log.err("Git command failed: %s" % err) + retval = 1 + except GbpError as err: + if len(err.__str__()): + gbp.log.err(err) + retval = 1 + + return retval + +if __name__ == '__main__': + sys.exit(main(sys.argv)) + +# vim:et:ts=4:sw=4:et:sts=4:ai:set list listchars=tab\:»·,trail\:·: diff --git a/gbp/scripts/common/buildpackage.py b/gbp/scripts/common/buildpackage.py index b58f0877..fe08d1b3 100644 --- a/gbp/scripts/common/buildpackage.py +++ b/gbp/scripts/common/buildpackage.py @@ -21,17 +21,22 @@ import os, os.path import pipes import tempfile +import subprocess import shutil +import subprocess + from gbp.command_wrappers import (CatenateTarArchive, CatenateZipArchive) from gbp.errors import GbpError +from gbp.git.repository import GitRepository, GitRepositoryError import gbp.log # when we want to reference the index in a treeish context we call it: index_name = "INDEX" # when we want to reference the working copy in treeish context we call it: -wc_name = "WC" -# index file name used to export working copy -wc_index = ".git/gbp_index" +wc_names = {'WC': {'force': True, 'untracked': True}, + 'WC.TRACKED': {'force': False, 'untracked': False}, + 'WC.UNTRACKED': {'force': False, 'untracked': True}, + 'WC.IGNORED': {'force': True, 'untracked': True}} def sanitize_prefix(prefix): @@ -50,6 +55,27 @@ def sanitize_prefix(prefix): return '/' +def compress(cmd, options, output, input_data=None): + """ + Filter data through a compressor cmd. + + For better performance input_data should feed data in bigger chunks. + """ + stdin = subprocess.PIPE if input_data else None + try: + with open(output, 'w') as fobj: + popen = subprocess.Popen([cmd] + options, stdin=stdin, stdout=fobj) + if stdin: + for chunk in input_data: + popen.stdin.write(chunk) + popen.stdin.close() + if popen.wait(): + raise GbpError("Error creating %s: running '%s' failed" % + (output, ' '.join([cmd] + options))) + except (OSError, IOError) as err: + raise GbpError("Error creating %s: %s" % (output, err)) + + def git_archive_submodules(repo, treeish, output, prefix, comp_type, comp_level, comp_opts, format='tar'): """ @@ -72,10 +98,11 @@ def git_archive_submodules(repo, treeish, output, prefix, comp_type, comp_level, # generate each submodule's arhive and append it to the main archive for (subdir, commit) in repo.get_submodules(treeish): tarpath = [subdir, subdir[2:]][subdir.startswith("./")] + subrepo = GitRepository(os.path.join(repo.path, subdir)) gbp.log.debug("Processing submodule %s (%s)" % (subdir, commit[0:8])) - repo.archive(format=format, prefix='%s%s/' % (prefix, tarpath), - output=submodule_archive, treeish=commit, cwd=subdir) + subrepo.archive(format=format, prefix='%s%s/' % (prefix, tarpath), + output=submodule_archive, treeish=commit) if format == 'tar': CatenateTarArchive(main_archive)(submodule_archive) elif format == 'zip': @@ -85,91 +112,89 @@ def git_archive_submodules(repo, treeish, output, prefix, comp_type, comp_level, if comp_type: # Redirect through stdout directly to the correct output file in # order to avoid determining the output filename of the compressor - ret = os.system("%s --stdout -%s %s %s > %s" % - (comp_type, comp_level, comp_opts, main_archive, - output)) - if ret: - raise GbpError("Error creating %s: %d" % (output, ret)) + compress(comp_type, ['--stdout', '-%s' % comp_level] + comp_opts + + [main_archive], output) else: shutil.move(main_archive, output) finally: shutil.rmtree(tempdir) -def git_archive_single(treeish, output, prefix, comp_type, comp_level, comp_opts, format='tar'): +def git_archive_single(repo, treeish, output, prefix, comp_type, comp_level, + comp_opts, format='tar'): """ Create an archive without submodules Exception handling is left to the caller. """ prefix = sanitize_prefix(prefix) - pipe = pipes.Template() - pipe.prepend("git archive --format=%s --prefix=%s %s" % (format, prefix, treeish), '.-') if comp_type: - pipe.append('%s -c -%s %s' % (comp_type, comp_level, comp_opts), '--') - ret = pipe.copy('', output) - if ret: - raise GbpError("Error creating %s: %d" % (output, ret)) + cmd = comp_type + opts = ['--stdout', '-%s' % comp_level] + comp_opts + else: + cmd= 'cat' + opts = [] + input_data = repo.archive(format, prefix, None, treeish) + compress(cmd, opts, output, input_data) + +def untar_data(outdir, data): + """Extract tar provided as an iterable""" + popen = subprocess.Popen(['tar', '-C', outdir, '-x'], + stdin=subprocess.PIPE) + for chunk in data: + popen.stdin.write(chunk) + popen.stdin.close() + if popen.wait(): + raise GbpError("Error extracting tar to %s" % outdir) #{ Functions to handle export-dir def dump_tree(repo, export_dir, treeish, with_submodules, recursive=True): - "dump a tree to output_dir" - output_dir = os.path.dirname(export_dir) - prefix = sanitize_prefix(os.path.basename(export_dir)) + """Dump a git tree-ish to output_dir""" + if not os.path.exists(export_dir): + os.makedirs(export_dir) if recursive: - paths = [] + paths = '' else: - paths = ["'%s'" % nam for _mod, typ, _sha, nam in - repo.list_tree(treeish) if typ == 'blob'] - - pipe = pipes.Template() - pipe.prepend('git archive --format=tar --prefix=%s %s -- %s' % - (prefix, treeish, ' '.join(paths)), '.-') - pipe.append('tar -C %s -xf -' % output_dir, '-.') - top = os.path.abspath(os.path.curdir) + paths = [nam for _mod, typ, _sha, nam in repo.list_tree(treeish) if + typ == 'blob'] try: - ret = pipe.copy('', '') - if ret: - raise GbpError("Error in dump_tree archive pipe") - - if recursive and with_submodules: - if repo.has_submodules(): - repo.update_submodules() + data = repo.archive('tar', '', None, treeish, paths) + untar_data(export_dir, data) + if recursive and with_submodules and repo.has_submodules(): + repo.update_submodules() for (subdir, commit) in repo.get_submodules(treeish): - gbp.log.info("Processing submodule %s (%s)" % (subdir, commit[0:8])) - tarpath = [subdir, subdir[2:]][subdir.startswith("./")] - os.chdir(subdir) - pipe = pipes.Template() - pipe.prepend('git archive --format=tar --prefix=%s%s/ %s' % - (prefix, tarpath, commit), '.-') - pipe.append('tar -C %s -xf -' % output_dir, '-.') - ret = pipe.copy('', '') - os.chdir(top) - if ret: - raise GbpError("Error in dump_tree archive pipe in submodule %s" % subdir) - except OSError as err: - gbp.log.err("Error dumping tree to %s: %s" % (output_dir, err[0])) - return False - except GbpError as err: - gbp.log.err(err) + gbp.log.info("Processing submodule %s (%s)" % (subdir, + commit[0:8])) + subrepo = GitRepository(os.path.join(repo.path, subdir)) + prefix = [subdir, subdir[2:]][subdir.startswith("./")] + '/' + data = subrepo.archive('tar', prefix, None, treeish) + untar_data(export_dir, data) + except GitRepositoryError as err: + gbp.log.err("Git error when dumping tree: %s" % err) return False - except Exception as e: - gbp.log.err("Error dumping tree to %s: %s" % (output_dir, e)) - return False - finally: - os.chdir(top) return True -def write_wc(repo, force=True): +def wc_index(repo): + """Get path of the temporary index file used for exporting working copy""" + return os.path.join(repo.git_dir, "gbp_index") + +def write_wc(repo, force=True, untracked=True): """write out the current working copy as a treeish object""" - repo.add_files(repo.path, force=force, index_file=wc_index) - tree = repo.write_tree(index_file=wc_index) + clone_index(repo) + repo.add_files(repo.path, force=force, untracked=untracked, index_file=wc_index(repo)) + tree = repo.write_tree(index_file=wc_index(repo)) return tree -def drop_index(): +def drop_index(repo): """drop our custom index""" - if os.path.exists(wc_index): - os.unlink(wc_index) + if os.path.exists(wc_index(repo)): + os.unlink(wc_index(repo)) + +def clone_index(repo): + """Copy the current index file to our custom index file""" + indexfn = os.path.join(repo.git_dir, "index") + if os.path.exists(indexfn): + shutil.copy2(indexfn, wc_index(repo)) diff --git a/gbp/scripts/common/import_orig.py b/gbp/scripts/common/import_orig.py index b2f45bb6..08f68861 100644 --- a/gbp/scripts/common/import_orig.py +++ b/gbp/scripts/common/import_orig.py @@ -23,7 +23,7 @@ import gbp.command_wrappers as gbpc import gbp.log -from gbp.pkg import UpstreamSource +from gbp.pkg import parse_archive_filename from gbp.errors import GbpError from gbp.deb.upstreamsource import DebianUpstreamSource @@ -35,43 +35,6 @@ except ImportError: pass -def orig_needs_repack(upstream_source, options): - """ - Determine if the upstream sources needs to be repacked - - We repack if - 1. we want to filter out files and use pristine tar since we want - to make a filtered tarball available to pristine-tar - 2. when we don't have a suitable upstream tarball (e.g. zip archive or unpacked dir) - and want to use filters - 3. when we don't have a suitable upstream tarball (e.g. zip archive or unpacked dir) - and want to use pristine-tar - """ - if ((options.pristine_tar and options.filter_pristine_tar and len(options.filters) > 0)): - return True - elif not upstream_source.is_orig(): - if len(options.filters): - return True - elif options.pristine_tar: - return True - return False - - -def cleanup_tmp_tree(tree): - """remove a tree of temporary files""" - try: - gbpc.RemoveTree(tree)() - except gbpc.CommandExecFailed: - gbp.log.err("Removal of tmptree %s failed." % tree) - - -def is_link_target(target, link): - """does symlink link already point to target?""" - if os.path.exists(link): - if os.path.samefile(target, link): - return True - return False - def ask_package_name(default, name_validator_func, err_msg): """ @@ -111,35 +74,6 @@ def ask_package_version(default, ver_validator_func, err_msg): gbp.log.warn("\nNot a valid upstream version: '%s'.\n%s" % (version, err_msg)) -def repacked_tarball_name(source, name, version): - if source.is_orig(): - # Repacked orig tarball needs a different name since there's already - # one with that name - name = os.path.join( - os.path.dirname(source.path), - os.path.basename(source.path).replace(".tar", ".gbp.tar")) - else: - # Repacked sources or other archives get canonical name - name = os.path.join( - os.path.dirname(source.path), - "%s_%s.orig.tar.bz2" % (name, version)) - return name - - -def repack_source(source, name, version, tmpdir, filters): - """Repack the source tree""" - name = repacked_tarball_name(source, name, version) - repacked = source.pack(name, filters) - if source.is_orig(): # the tarball was filtered on unpack - repacked.unpacked = source.unpacked - else: # otherwise unpack the generated tarball get a filtered tree - if tmpdir: - cleanup_tmp_tree(tmpdir) - tmpdir = tempfile.mkdtemp(dir='../') - repacked.unpack(tmpdir, filters) - return (repacked, tmpdir) - - def download_orig(url): """ Download orig tarball from given URL @@ -176,3 +110,127 @@ def download_orig(url): os.unlink(target) return DebianUpstreamSource(target) + + +def prepare_pristine_tar(source, pkg_name, pkg_version, pristine_commit_name, + filters=None, prefix=None, tmpdir=None): + """ + Prepare the upstream sources for pristine-tar import + + @param source: original upstream sources + @type source: C{UpstreamSource} + @param pkg_name: package name + @type pkg_name: C{str} + @param pkg_version: upstream version of the package + @type pkg_version: C{str} + @param pristine_commit_name: archive filename to commit to pristine-tar + @type pristine_commit_name: C{str} or C{None} + @param filters: filter to exclude files + @type filters: C{list} of C{str} or C{None} + @param prefix: prefix (i.e. leading directory of files) to use in + pristine-tar, set to C{None} to not mangle orig archive + @type prefix: C{str} or C{None} + @param tmpdir: temporary working dir (cleanup left to caller) + @type tmpdir: C{str} + @return: prepared source archive + @rtype: C{UpstreamSource} + """ + need_repack = False + if source.is_dir(): + if prefix is None: + prefix = '%s-%s' % (pkg_name, pkg_version) + gbp.log.info("Using guessed prefix '%s/' for pristine-tar" % prefix) + need_repack = True + else: + if prefix is not None and prefix == source.prefix: + prefix = None + comp = parse_archive_filename(pristine_commit_name)[2] + if filters or prefix is not None or source.compression != comp: + if not source.unpacked: + unpack_dir = tempfile.mkdtemp(prefix='pristine_unpack_', + dir=tmpdir) + source.unpack(unpack_dir) + need_repack = True + pristine_path = os.path.join(tmpdir, pristine_commit_name) + if need_repack: + gbp.log.debug("Packing '%s' from '%s' for pristine-tar" % + (pristine_path, source.unpacked)) + pristine = source.pack(pristine_path, filters, prefix) + else: + # Just create symlink for mangling the pristine tarball name + os.symlink(source.path, pristine_path) + pristine = source.__class__(pristine_path) + + return pristine + + +def prepare_sources(source, pkg_name, pkg_version, pristine_commit_name, + filters, filter_pristine, prefix, tmpdir): + """ + Prepare upstream sources for importing + + Unpack, filter and repack sources for importing to git and to pristine-tar. + + @param source: original upstream sources + @type source: C{UpstreamSource} + @param pkg_name: package name + @type pkg_name: C{str} + @param pkg_version: upstream version of the package + @type pkg_version: C{str} + @param pristine_commit_name: archive filename to commit to pristine-tar + @type pristine_commit_name: C{str} or C{None} + @param filters: filter to exclude files + @type filters: C{list} of C{str} + @param filter_pristine: filter pristine-tar, too + @type filter_pristine: C{bool} + @param prefix: prefix (i.e. leading directory of files) to use in + pristine-tar, set to C{None} to not mangle orig archive + @type prefix: C{str} or C{None} + @param tmpdir: temporary working dir (cleanup left to caller) + @type tmpdir: C{str} + @return: path to prepared source tree and tarball to commit to pristine-tar + @rtype: C{tuple} of C{str} + """ + pristine = None + # Determine parameters for pristine tar + pristine_filters = filters if filters and filter_pristine else None + pristine_prefix = None + if prefix is not None and prefix != 'auto': + prefix_subst = {'name': pkg_name, + 'version': pkg_version, + 'upstreamversion': pkg_version} + pristine_prefix = prefix % prefix_subst + # Handle unpacked sources, i.e. importing a directory + if source.is_dir(): + if pristine_commit_name: + gbp.log.warn('Preparing unpacked sources for pristine-tar') + pristine = prepare_pristine_tar(source, pkg_name, pkg_version, + pristine_commit_name, + pristine_filters, pristine_prefix, + tmpdir) + if filters: + # Re-use sources packed for pristine-tar, if available + if pristine: + packed = pristine + else: + packed_fn = tempfile.mkstemp(prefix="packed_", dir=tmpdir, + suffix='.tar')[1] + gbp.log.debug("Packing '%s' to '%s'" % (source.path, packed_fn)) + packed = source.pack(packed_fn) + unpack_dir = tempfile.mkdtemp(prefix='filtered_', dir=tmpdir) + filtered = packed.unpack(unpack_dir, filters) + else: + filtered = source + # Handle source archives + else: + unpack_dir = tempfile.mkdtemp(prefix='filtered_', dir=tmpdir) + gbp.log.debug("Unpacking '%s' to '%s'" % (source.path, unpack_dir)) + filtered = source.unpack(unpack_dir, filters) + if pristine_commit_name: + pristine = prepare_pristine_tar(source, pkg_name, pkg_version, + pristine_commit_name, + pristine_filters, pristine_prefix, + tmpdir) + pristine_path = pristine.path if pristine else '' + return (filtered.unpacked, pristine_path) + diff --git a/gbp/scripts/common/pq.py b/gbp/scripts/common/pq.py index 5580426b..a5f96564 100644 --- a/gbp/scripts/common/pq.py +++ b/gbp/scripts/common/pq.py @@ -22,6 +22,8 @@ import os import subprocess import datetime +import pwd +import socket import time from email.message import Message from email.header import Header @@ -32,43 +34,129 @@ from gbp.errors import GbpError import gbp.log -PQ_BRANCH_PREFIX = "patch-queue/" +DEFAULT_PQ_BRANCH_NAME = "patch-queue/%(branch)s" -def is_pq_branch(branch): +def pq_branch_match(branch, pq_fmt_str): + """ + Match branch name with pq branch name pattern + + >>> pq_branch_match('patch-queue/foo', 'patch-queue/%(branch)s').groupdict() + {'branch': 'foo'} + >>> pq_branch_match('pq/foo/bar', 'pq/%(branch)s/baz') + >>> pq_branch_match('pq/foo/bar', 'pq/%(branch)s/bar').groupdict() + {'branch': 'foo'} + >>> pq_branch_match('foo/bar/1.0/pq', + ... 'foo/%(branch)s/%(ver)s/pq').groupdict() + {'ver': '1.0', 'branch': 'bar'} + >>> pq_branch_match('foo/bar/1.0/pq', + ... 'foo/%(b)s/%(ver)s/pq').groupdict() # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + GbpError: Invalid pq-branch, name format must contain %(branch)s... + + """ + pq_re_str = '^%s$' % re.sub('%\(([a-z_\-]+)\)s', r'(?P<\1>\S+)', pq_fmt_str) + pq_re = re.compile(pq_re_str) + if not 'branch' in pq_re.groupindex: + raise GbpError("Invalid pq-branch, name format must contain %(branch)s " + "keyword for identifying the base branch") + return pq_re.match(branch) + + +def is_pq_branch(branch, options): """ is branch a patch-queue branch? - >>> is_pq_branch("foo") + >>> from optparse import OptionParser + >>> (opts, args) = OptionParser().parse_args([]) + >>> is_pq_branch("foo", opts) + False + >>> is_pq_branch("patch-queue/foo", opts) + True + >>> opts.pq_branch = "%(branch)s/development" + >>> is_pq_branch("foo/development/bar", opts) + False + >>> is_pq_branch("bar/foo/development", opts) + True + >>> opts.pq_branch = "development" + >>> is_pq_branch("development", opts) # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + GbpError: Invalid pq-branch, name format must contain %(branch)s... + >>> opts.pq_branch = "my/%(branch)s/pq" + >>> is_pq_branch("my/foo/pqb", opts) + False + >>> is_pq_branch("my/foo/pq", opts) + True + >>> opts.pq_branch = "my/%(branch)s/%(version)s" + >>> is_pq_branch("my/foo", opts) False - >>> is_pq_branch("patch-queue/foo") + >>> is_pq_branch("my/foo/1.0", opts) True """ - return [False, True][branch.startswith(PQ_BRANCH_PREFIX)] + pq_format_str = (options.pq_branch if hasattr(options, 'pq_branch') + else DEFAULT_PQ_BRANCH_NAME) + if pq_branch_match(branch, pq_format_str): + return True + return False -def pq_branch_name(branch): +def pq_branch_name(branch, options, extra_keys=None): """ get the patch queue branch corresponding to branch - >>> pq_branch_name("patch-queue/master") - >>> pq_branch_name("foo") + >>> from optparse import OptionParser + >>> (opts, args) = OptionParser().parse_args([]) + >>> pq_branch_name("patch-queue/master", opts) + >>> pq_branch_name("foo", opts) 'patch-queue/foo' + >>> opts.pq_branch = "%(branch)s/development" + >>> pq_branch_name("foo", opts) + 'foo/development' + >>> opts.pq_branch = "development" + >>> pq_branch_name("foo", opts) # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + GbpError: Invalid pq-branch, name format must contain %(branch)s... + >>> opts.pq_branch = "pq/%(branch)s/%(ver)s" + >>> pq_branch_name("foo", opts, {'ver': '1.0'}) + 'pq/foo/1.0' """ - if not is_pq_branch(branch): - return PQ_BRANCH_PREFIX + branch + pq_format_str = (options.pq_branch if hasattr(options, 'pq_branch') + else DEFAULT_PQ_BRANCH_NAME) + format_fields = {'branch': branch} + if extra_keys: + format_fields.update(extra_keys) + if not is_pq_branch(branch, options): + return pq_format_str % format_fields -def pq_branch_base(pq_branch): +def pq_branch_base(pq_branch, options): """ - get the branch corresponding to the given patch queue branch + Get the branch corresponding to the given patch queue branch. - >>> pq_branch_base("patch-queue/master") + >>> from optparse import OptionParser + >>> (opts, args) = OptionParser().parse_args([]) + >>> pq_branch_base("patch-queue/master", opts) 'master' - >>> pq_branch_base("foo") + >>> pq_branch_base("foo", opts) + >>> opts.pq_branch = "my/%(branch)s/development" + >>> pq_branch_base("foo/development", opts) + >>> pq_branch_base("my/foo/development/bar", opts) + >>> pq_branch_base("my/foo/development", opts) + 'foo' + >>> opts.pq_branch = "development" + >>> pq_branch_base("development", opts) # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + GbpError: Invalid pq-branch, name format must contain %(branch)s... """ - if is_pq_branch(pq_branch): - return pq_branch[len(PQ_BRANCH_PREFIX):] + pq_format_str = (options.pq_branch if hasattr(options, 'pq_branch') + else DEFAULT_PQ_BRANCH_NAME) + m = pq_branch_match(pq_branch, pq_format_str) + if m: + return m.group('branch') def parse_gbp_commands(info, cmd_tag, noarg_cmds, arg_cmds, filter_cmds=None): @@ -237,7 +325,7 @@ def format_patch(outdir, repo, commit_info, series, numbered=True, def format_diff(outdir, filename, repo, start, end, path_exclude_regex=None): """Create a patch of diff between two repository objects""" - info = {'author': repo.get_author_info()} + info = {'author': get_author(repo)} now = datetime.datetime.now().replace(tzinfo=GitTz(-time.timezone)) info['author'].set_date(now) info['subject'] = "Raw diff %s..%s" % (start, end) @@ -257,6 +345,28 @@ def format_diff(outdir, filename, repo, start, end, path_exclude_regex=None): return None +def get_author(repo): + """Determine author name and email""" + author = GitModifier() + if repo: + author = repo.get_author_info() + + passwd_data = pwd.getpwuid(os.getuid()) + if not author.name: + # On some distros (Ubuntu, at least) the gecos field has it's own + # internal structure of comma-separated fields + author.name = passwd_data.pw_gecos.split(',')[0].strip() + if not author.name: + author.name = passwd_data.pw_name + if not author.email: + if 'EMAIL' in os.environ: + author.email = os.environ['EMAIL'] + else: + author.email = "%s@%s" % (passwd_data.pw_name, socket.getfqdn()) + + return author + + def get_maintainer_from_control(repo): """Get the maintainer from the control file""" control = os.path.join(repo.path, 'debian', 'control') @@ -274,15 +384,15 @@ def get_maintainer_from_control(repo): return GitModifier() -def switch_to_pq_branch(repo, branch): +def switch_to_pq_branch(repo, branch, options, name_keys=None): """ Switch to patch-queue branch if not already there, create it if it doesn't exist yet """ - if is_pq_branch(branch): + if is_pq_branch(branch, options): return - pq_branch = pq_branch_name(branch) + pq_branch = pq_branch_name(branch, options, name_keys) if not repo.has_branch(pq_branch): try: repo.create_branch(pq_branch) @@ -294,8 +404,9 @@ def switch_to_pq_branch(repo, branch): repo.set_branch(pq_branch) -def apply_single_patch(repo, branch, patch, fallback_author, topic=None): - switch_to_pq_branch(repo, branch) +def apply_single_patch(repo, branch, patch, fallback_author, options): + switch_to_pq_branch(repo, branch, options) + topic = None if not hasattr(options, 'topic') else options.topic apply_and_commit_patch(repo, patch, fallback_author, topic) gbp.log.info("Applied %s" % os.path.basename(patch.path)) @@ -327,12 +438,12 @@ def apply_and_commit_patch(repo, patch, fallback_author, topic=None, name=None): repo.update_ref('HEAD', commit, msg="gbp-pq import %s" % patch.path) -def drop_pq(repo, branch): - if is_pq_branch(branch): +def drop_pq(repo, branch, options, name_keys=None): + if is_pq_branch(branch, options): gbp.log.err("On a patch-queue branch, can't drop it.") raise GbpError else: - pq_branch = pq_branch_name(branch) + pq_branch = pq_branch_name(branch, options, name_keys) if repo.has_branch(pq_branch): repo.delete_branch(pq_branch) @@ -341,11 +452,12 @@ def drop_pq(repo, branch): gbp.log.info("No patch queue branch found - doing nothing.") -def switch_pq(repo, current): +def switch_pq(repo, current, options): """Switch to patch-queue branch if on base branch and vice versa""" - if is_pq_branch(current): - base = pq_branch_base(current) + if is_pq_branch(current, options): + base = pq_branch_base(current, options) gbp.log.info("Switching to %s" % base) repo.checkout(base) else: - switch_to_pq_branch(repo, current) + switch_to_pq_branch(repo, current, options) + diff --git a/gbp/scripts/config.py b/gbp/scripts/config.py index 3856c829..d720be7b 100755 --- a/gbp/scripts/config.py +++ b/gbp/scripts/config.py @@ -116,6 +116,8 @@ def value_printer(value): def main(argv): retval = 1 + gbp.log.initialize() + (options, args) = parse_args(argv) gbp.log.setup(options.color, options.verbose, options.color_scheme) diff --git a/gbp/scripts/create_remote_repo.py b/gbp/scripts/create_remote_repo.py old mode 100644 new mode 100755 index f0a818a6..9b933e6f --- a/gbp/scripts/create_remote_repo.py +++ b/gbp/scripts/create_remote_repo.py @@ -23,7 +23,6 @@ from six.moves import configparser import sys import os, os.path -from six.moves import urllib import subprocess import tty, termios import re @@ -108,7 +107,14 @@ def parse_url(remote_url, name, pkg, template_dir=None): ... GbpError: URL contains invalid ~username expansion. """ - frags = urllib.parse.urlparse(remote_url) + # Fix for old distros like Debian 7, Ubuntu 12.04 and openSUSE 12.3 with old + # python-six that doesn't have urllib + try: + from six.moves import urllib + frags = urllib.parse.urlparse(remote_url) + except ImportError: + import urlparse + frags = urlparse.urlparse(remote_url) if frags.scheme in ['ssh', 'git+ssh', '']: scheme = frags.scheme else: @@ -250,7 +256,7 @@ def build_parser(name, sections=[]): branch_group.add_config_file_option(option_name="upstream-branch", dest="upstream_branch") branch_group.add_config_file_option(option_name="debian-branch", - dest="debian_branch") + dest="packaging_branch") branch_group.add_boolean_config_file_option(option_name="pristine-tar", dest="pristine_tar") branch_group.add_boolean_config_file_option(option_name="track", @@ -308,6 +314,8 @@ def main(argv): changelog = 'debian/changelog' cmd = [] + gbp.log.initialize() + try: options, args = parse_args(argv) except Exception as e: @@ -324,7 +332,7 @@ def main(argv): try: branches = [] - for branch in [ options.debian_branch, options.upstream_branch ]: + for branch in [ options.packaging_branch, options.upstream_branch ]: if repo.has_branch(branch): branches += [ branch ] diff --git a/gbp/scripts/dch.py b/gbp/scripts/dch.py old mode 100644 new mode 100755 index 230d908b..cbdee4d2 --- a/gbp/scripts/dch.py +++ b/gbp/scripts/dch.py @@ -322,9 +322,9 @@ def build_parser(name): parser.add_boolean_config_file_option(option_name = "ignore-branch", dest="ignore_branch") naming_group.add_config_file_option(option_name="upstream-branch", dest="upstream_branch") - naming_group.add_config_file_option(option_name="debian-branch", dest="debian_branch") + naming_group.add_config_file_option(option_name="debian-branch", dest="packaging_branch") naming_group.add_config_file_option(option_name="upstream-tag", dest="upstream_tag") - naming_group.add_config_file_option(option_name="debian-tag", dest="debian_tag") + naming_group.add_config_file_option(option_name="debian-tag", dest="packaging_tag") naming_group.add_config_file_option(option_name="snapshot-number", dest="snapshot_number", help="expression to determine the next snapshot number, default is '%(snapshot-number)s'") parser.add_config_file_option(option_name="git-log", dest="git_log", @@ -405,6 +405,7 @@ def main(argv): version_change = {} branch = None + gbp.log.initialize() options, args, dch_options, editor_cmd = parse_args(argv) @@ -421,9 +422,11 @@ def main(argv): if not options.ignore_branch: raise - if options.debian_branch != branch and not options.ignore_branch: - gbp.log.err("You are not on branch '%s' but on '%s'" % (options.debian_branch, branch)) - raise GbpError("Use --ignore-branch to ignore or --debian-branch to set the branch name.") + if options.packaging_branch != branch and not options.ignore_branch: + gbp.log.err("You are not on branch '%s' but on '%s'" % + (options.packaging_branch, branch)) + raise GbpError("Use --ignore-branch to ignore or --debian-branch " + "to set the branch name.") source = DebianSource('.') cp = source.changelog @@ -433,7 +436,7 @@ def main(argv): else: since = '' if options.auto: - since = guess_documented_commit(cp, repo, options.debian_tag) + since = guess_documented_commit(cp, repo, options.packaging_tag) if since: msg = "Continuing from commit '%s'" % since else: @@ -441,7 +444,7 @@ def main(argv): gbp.log.info(msg) found_snapshot_banner = has_snapshot_banner(cp) else: # Fallback: continue from last tag - since = repo.find_version(options.debian_tag, cp['Version']) + since = repo.find_version(options.packaging_tag, cp['Version']) if not since: raise GbpError("Version %s not found" % cp['Version']) diff --git a/gbp/scripts/import_bb.py b/gbp/scripts/import_bb.py new file mode 100755 index 00000000..8f2a2d2c --- /dev/null +++ b/gbp/scripts/import_bb.py @@ -0,0 +1,419 @@ +# vim: set fileencoding=utf-8 : +# +# (C) 2014-2015 Intel Corporation +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, please see +# +"""Import an RPM package in Bitbake format""" + +import ConfigParser +import sys +import os +import shutil + +import gbp.tmpfile as tempfile +import gbp.command_wrappers as gbpc +import gbp.log +from gbp.rpm import RpmUpstreamSource +from gbp.rpm.policy import RpmPkgPolicy +from gbp.rpm.git import RpmGitRepository, GitRepositoryError +from gbp.config import (GbpOptionParserBB, GbpOptionGroup, + no_upstream_branch_msg) +from gbp.errors import GbpError +from gbp.pkg import parse_archive_filename +from gbp.scripts.import_srpm import move_tag_stamp, force_to_branch_head +from gbp.bb import bb, init_tinfoil, pkg_version, guess_pkg + +# pylint: disable=bad-continuation + +NO_PACKAGING_BRANCH_MSG = """ +Repository does not have branch '%s' for meta/packaging files. +You need to reate it or use --packaging-branch to specify it. +""" + +class SkipImport(Exception): + """Nothing imported""" + pass + +def set_bare_repo_options(options): + """Modify options for import into a bare repository""" + if options.pristine_tar: + gbp.log.info("Bare repository: setting %s option '--no-pristine-tar'") + options.pristine_tar = False + + +def build_parser(name): + """Create command line parser""" + try: + parser = GbpOptionParserBB(command=os.path.basename(name), + prefix='', + usage='%prog [options] /path/to/package' + '.src.rpm') + except ConfigParser.ParsingError, err: + gbp.log.err(err) + return None + + import_group = GbpOptionGroup(parser, "import options", + "pristine-tar and filtering") + tag_group = GbpOptionGroup(parser, "tag options", + "options related to git tag creation") + branch_group = GbpOptionGroup(parser, "version and branch naming options", + "version number and branch layout options") + + for group in [import_group, branch_group, tag_group ]: + parser.add_option_group(group) + + parser.add_option("-v", "--verbose", action="store_true", dest="verbose", + default=False, help="verbose command execution") + parser.add_config_file_option(option_name="color", dest="color", + type='tristate') + parser.add_config_file_option(option_name="color-scheme", + dest="color_scheme") + parser.add_config_file_option(option_name="tmp-dir", dest="tmp_dir") + parser.add_config_file_option(option_name="vendor", action="store", + dest="vendor") + branch_group.add_config_file_option(option_name="packaging-branch", + dest="packaging_branch") + branch_group.add_config_file_option(option_name="upstream-branch", + dest="upstream_branch") + branch_group.add_option("--upstream-vcs-tag", dest="vcs_tag", + help="Upstream VCS tag on top of which to import " + "the orig sources") + branch_group.add_boolean_config_file_option( + option_name="create-missing-branches", + dest="create_missing_branches") + + tag_group.add_boolean_config_file_option(option_name="sign-tags", + dest="sign_tags") + tag_group.add_config_file_option(option_name="keyid", + dest="keyid") + tag_group.add_config_file_option(option_name="packaging-tag", + dest="packaging_tag") + tag_group.add_config_file_option(option_name="upstream-tag", + dest="upstream_tag") + + import_group.add_config_file_option(option_name="filter", + dest="filters", action="append") + import_group.add_boolean_config_file_option(option_name="pristine-tar", + dest="pristine_tar") + import_group.add_option("--allow-same-version", action="store_true", + dest="allow_same_version", default=False, + help="allow to import already imported version") + import_group.add_config_file_option(option_name="meta-dir", + dest="meta_dir") + return parser + +def parse_args(argv): + """Parse commandline arguments""" + parser = build_parser(argv[0]) + if not parser: + return None, None + + (options, args) = parser.parse_args(argv[1:]) + gbp.log.setup(options.color, options.verbose, options.color_scheme) + return options, args + + +def init_repo(path): + """Check and initialize Git repository""" + try: + repo = RpmGitRepository(path) + clean, out = repo.is_clean() + if not clean and not repo.is_empty(): + gbp.log.err("Repository has uncommitted changes, commit " + "these first:") + gbp.log.err(out) + raise GbpError + except GitRepositoryError: + gbp.log.info("No git repository found, creating one in %s" % path) + repo = RpmGitRepository.create(path) + return repo + +def recursive_copy(src, dst): + """Recursive copy, overwriting files and preserving symlinks""" + # Remove existing destinations, if needed + if os.path.isfile(dst) or os.path.islink(dst): + os.unlink(dst) + elif (os.path.isfile(src) or os.path.islink(src)) and os.path.isdir(dst): + # Remove dst dir if src is a file + shutil.rmtree(dst) + + try: + if os.path.islink(src): + os.symlink(os.readlink(src), dst) + elif os.path.isdir(src): + if not os.path.exists(dst): + os.makedirs(dst) + for fname in os.listdir(src): + recursive_copy(os.path.join(src, fname), + os.path.join(dst, fname)) + else: + shutil.copy2(src, dst) + except (IOError, OSError) as err: + raise GbpError("Error while copying '%s' to '%s': %s" % (src, dst, err)) + +def guess_upstream_source(pkg_data, remotes): + """Guess the primary upstream source archive.""" + orig = None + name = pkg_data.getVar('PN', True) + + for fetch_data in remotes: + if fetch_data.type == 'git': + orig = fetch_data + else: + path = fetch_data.localpath + fname = os.path.basename(path) + fn_base, archive_fmt, _ = parse_archive_filename(fname) + if fn_base.startswith(name) and archive_fmt: + # Take an archive that starts with pkg name + orig = fetch_data + # otherwise we take the first archive + elif not orig and archive_fmt: + orig = fetch_data + # else don't accept + return orig + +def bb_get_files(pkg_data, tgt_dir, whole_dir=False, download=True): + """Get (local) packaging files""" + uris = (pkg_data.getVar('SRC_URI', True) or "").split() + try: + fetch = bb.fetch2.Fetch(uris, pkg_data) + if download: + gbp.log.info("Fetching sources...") + fetch.download() + except bb.fetch2.BBFetchException as err: + raise GbpError("Failed to fetch packaging files: %s" % err) + + # Copy local files to target directory + bb_dir = os.path.dirname(pkg_data.getVar('FILE', True)) + remote = [] + local = [path for path in pkg_data.getVar('BBINCLUDED', True).split() if + path.startswith(bb_dir) and os.path.exists(path)] + for url in fetch.urls: + path = fetch.localpath(url) + if path.startswith(bb_dir): + if not whole_dir: + gbp.log.debug("Found local meta file '%s'" % path) + local.append(path) + else: + gbp.log.debug("Found remote file '%s'" % path) + remote.append(fetch.ud[url]) + + if whole_dir: + # Simply copy whole meta dir, if requested + recursive_copy(bb_dir, tgt_dir) + else: + for path in local: + relpath = os.path.relpath(path, bb_dir) + subdir = os.path.join(tgt_dir, os.path.dirname(relpath)) + if not os.path.exists(subdir): + os.makedirs(subdir) + shutil.copy2(path, os.path.join(tgt_dir, relpath)) + + return remote + +def import_upstream_archive(repo, pkg_data, fetch_data, dirs, options): + """Import upstream sources from archive""" + # Unpack orig source archive + path = fetch_data.localpath + sources = RpmUpstreamSource(path) + sources = sources.unpack(dirs['origsrc'], options.filters) + + tag_str_fields = dict(pkg_version(pkg_data), vendor=options.vendor.lower()) + tag = repo.version_to_tag(options.upstream_tag, tag_str_fields) + if not repo.has_tag(tag): + gbp.log.info("Tag %s not found, importing upstream sources" % tag) + branch = options.upstream_branch + + msg = "Upstream version %s" % tag_str_fields['upstreamversion'] + if options.vcs_tag: + parents = [repo.rev_parse("%s^{}" % options.vcs_tag)] + else: + parents = None + commit = repo.commit_dir(sources.unpacked, "Imported %s" % msg, + branch, other_parents=parents, + create_missing_branch=options.create_missing_branches) + repo.create_tag(name=tag, msg=msg, commit=commit, + sign=options.sign_tags, keyid=options.keyid) + + if options.pristine_tar: + archive_fmt = parse_archive_filename(path)[1] + if archive_fmt == 'tar': + repo.pristine_tar.commit(path, 'refs/heads/%s' % branch) + else: + gbp.log.warn('Ignoring pristine-tar, %s archives ' + 'not supported' % archive_fmt) + return repo.rev_parse('%s^0' % tag) + +def import_upstream_git(repo, fetch_data, options): + """Import upstream sources from Git""" + # Fetch from local cached repo + for branch in fetch_data.branches.values(): + repo.fetch(repo=fetch_data.localpath, refspec=branch) + + commit = fetch_data.revision + repo.update_ref('refs/heads/' + options.upstream_branch, commit) + return commit + +def import_upstream_sources(repo, pkg_data, remotes, dirs, options): + """Import upstream sources to Git""" + fetch_data = guess_upstream_source(pkg_data, remotes) + if fetch_data: + gbp.log.debug("Using upstream source '%s'" % fetch_data.url) + else: + gbp.log.info("No orig source archive imported") + return + + if not repo.has_branch(options.upstream_branch): + if options.create_missing_branches: + gbp.log.info("Will create missing branch '%s'" % + options.upstream_branch) + else: + gbp.log.err(no_upstream_branch_msg % options.upstream_branch + "\n" + "Also check the --create-missing-branches option.") + raise GbpError + + if fetch_data.type == 'git': + return import_upstream_git(repo, fetch_data, options) + else: + return import_upstream_archive(repo, pkg_data, fetch_data, dirs, + options) + + +def main(argv): + """Main function of the gbp import-bb script""" + dirs = dict(top=os.path.abspath(os.curdir)) + ret = 0 + skipped = False + + if not bb: + return 1 + + options, args = parse_args(argv) + + if len(args) == 0 or len(args) > 2: + gbp.log.err("Need to give exactly one package to import. Try --help.") + return 1 + + try: + dirs['tmp_base'] = tempfile.mkdtemp(dir=options.tmp_dir, + prefix='import-bb') + tinfoil = init_tinfoil() + pkg_bb = guess_pkg(tinfoil, args[0]) + dirs['src'] = os.path.abspath(os.path.dirname(pkg_bb)) + gbp.log.info("Importing '%s' from '%s'" % + (os.path.basename(pkg_bb), dirs['src'])) + + pkg_data = bb.cache.Cache.loadDataFull(pkg_bb, [], tinfoil.config_data) + + # Determine target repo dir + target_dir = '' + if len(args) == 2: + target_dir = args[1] + else: + if 'BUILDDIR' in os.environ: + target_dir = os.path.join(os.environ['BUILDDIR'], 'devel') + target_dir = os.path.join(target_dir, pkg_data.getVar('PN', True)) + + # Check the Git repository state + repo = init_repo(target_dir) + if repo.bare: + set_bare_repo_options(options) + if repo.is_empty(): + options.create_missing_branches = True + os.chdir(repo.path) + + # Create more tempdirs + dirs['origsrc'] = tempfile.mkdtemp(dir=dirs['tmp_base'], + prefix='origsrc_') + dirs['packaging_base'] = tempfile.mkdtemp(dir=dirs['tmp_base'], + prefix='packaging_') + dirs['packaging'] = os.path.join(dirs['packaging_base'], + options.meta_dir) + + # Copy (local) packaging files to tmp dir + remote_srcs = bb_get_files(pkg_data, dirs['packaging']) + + version_dict = pkg_version(pkg_data) + tag_str_fields = dict(version_dict, vendor=options.vendor.lower()) + ver_str = RpmPkgPolicy.compose_full_version(version_dict) + + # Check if the same version of the package is already imported + if repo.find_version(options.packaging_tag, tag_str_fields): + gbp.log.warn("Version %s already imported." % ver_str) + if options.allow_same_version: + gbp.log.info("Moving tag of version '%s' since import forced" % + ver_str) + move_tag_stamp(repo, options.packaging_tag, tag_str_fields) + else: + raise SkipImport + + # Import upstream sources + import_upstream_sources(repo, pkg_data, remote_srcs, dirs, options) + + # Import packaging files + gbp.log.info("Importing local meta/packaging files") + branch = options.packaging_branch + if not repo.has_branch(branch): + if options.create_missing_branches: + gbp.log.info("Will create missing branch '%s'" % branch) + else: + gbp.log.err(NO_PACKAGING_BRANCH_MSG % branch + "\n" + "Also check the --create-missing-branches " + "option.") + raise GbpError + + tag = repo.version_to_tag(options.packaging_tag, tag_str_fields) + msg = "%s release %s" % (options.vendor, ver_str) + + commit = repo.commit_dir(dirs['packaging_base'], + "Imported %s" % msg, + branch, + create_missing_branch=options.create_missing_branches) + + # Create packaging tag + repo.create_tag(name=tag, + msg=msg, + commit=commit, + sign=options.sign_tags, + keyid=options.keyid) + + force_to_branch_head(repo, options.packaging_branch) + + except KeyboardInterrupt: + ret = 1 + gbp.log.err("Interrupted. Aborting.") + except gbpc.CommandExecFailed: + ret = 1 + except GitRepositoryError as err: + gbp.log.err("Git command failed: %s" % err) + ret = 1 + except GbpError as err: + if len(err.__str__()): + gbp.log.err(err) + ret = 1 + except SkipImport: + skipped = True + finally: + os.chdir(dirs['top']) + gbpc.RemoveTree(dirs['tmp_base'])() + + if not ret and not skipped: + gbp.log.info("Version '%s' imported under '%s'" % + (ver_str, repo.path)) + return ret + +if __name__ == '__main__': + sys.exit(main(sys.argv)) + +# vim:et:ts=4:sw=4:et:sts=4:ai:set list listchars=tab\:»·,trail\:·: diff --git a/gbp/scripts/import_dsc.py b/gbp/scripts/import_dsc.py old mode 100644 new mode 100755 index bc93bb8d..d846563f --- a/gbp/scripts/import_dsc.py +++ b/gbp/scripts/import_dsc.py @@ -21,7 +21,6 @@ import re import os import shutil -import tempfile import glob import pipes import time @@ -35,6 +34,7 @@ from gbp.config import (GbpOptionParserDebian, GbpOptionGroup, no_upstream_branch_msg) from gbp.errors import GbpError +from gbp.tmpfile import init_tmpdir, del_tmpdir, tempfile import gbp.log class SkipImport(Exception): @@ -147,19 +147,19 @@ def apply_debian_patch(repo, unpack_dir, src, options, tag): os.chdir(repo.path) parents = check_parents(repo, - options.debian_branch, + options.packaging_branch, tag) author = get_author_from_changelog(unpack_dir) committer = get_committer_from_author(author, options) commit = repo.commit_dir(unpack_dir, "Imported Debian patch %s" % src.version, - branch = options.debian_branch, + branch = options.packaging_branch, other_parents = parents, author=author, committer=committer) - if not options.skip_debian_tag: - repo.create_tag(repo.version_to_tag(options.debian_tag, src.version), + if not options.skip_packaging_tag: + repo.create_tag(repo.version_to_tag(options.packaging_tag, src.version), msg="Debian release %s" % src.version, commit=commit, sign=options.sign_tags, @@ -227,10 +227,11 @@ def build_parser(name): parser.add_config_file_option(option_name="color", dest="color", type='tristate') parser.add_config_file_option(option_name="color-scheme", dest="color_scheme") + parser.add_config_file_option(option_name="tmp-dir", dest="tmp_dir") parser.add_option("--download", action="store_true", dest="download", default=False, help="download source package") branch_group.add_config_file_option(option_name="debian-branch", - dest="debian_branch") + dest="packaging_branch") branch_group.add_config_file_option(option_name="upstream-branch", dest="upstream_branch") branch_group.add_boolean_config_file_option(option_name="create-missing-branches", @@ -241,10 +242,10 @@ def build_parser(name): tag_group.add_config_file_option(option_name="keyid", dest="keyid") tag_group.add_config_file_option(option_name="debian-tag", - dest="debian_tag") + dest="packaging_tag") tag_group.add_config_file_option(option_name="upstream-tag", dest="upstream_tag") - tag_group.add_option("--skip-debian-tag",dest="skip_debian_tag", + tag_group.add_option("--skip-debian-tag",dest="skip_packaging_tag", action="store_true", default=False, help="Don't add a tag after importing the Debian patch") @@ -281,11 +282,15 @@ def main(argv): ret = 0 skipped = False + gbp.log.initialize() + options, args = parse_args(argv) if not options: return 1 try: + init_tmpdir(options.tmp_dir, prefix='import-dsc_') + if len(args) != 1: gbp.log.err("Need to give exactly one package to import. Try --help.") raise GbpError @@ -333,19 +338,19 @@ def main(argv): if repo.bare: set_bare_repo_options(options) - dirs['tmp'] = os.path.abspath(tempfile.mkdtemp(dir='..')) + dirs['tmp'] = os.path.abspath(tempfile.mkdtemp()) upstream = DebianUpstreamSource(src.tgz) - upstream.unpack(dirs['tmp'], options.filters) + upstream = upstream.unpack(dirs['tmp'], options.filters) - format = [(options.upstream_tag, "Upstream"), (options.debian_tag, "Debian")][src.native] + format = [(options.upstream_tag, "Upstream"), (options.packaging_tag, "Debian")][src.native] tag = repo.version_to_tag(format[0], src.upstream_version) msg = "%s version %s" % (format[1], src.upstream_version) - if repo.find_version(options.debian_tag, src.version): + if repo.find_version(options.packaging_tag, src.version): gbp.log.warn("Version %s already imported." % src.version) if options.allow_same_version: gbp.log.info("Moving tag of version '%s' since import forced" % src.version) - move_tag_stamp(repo, options.debian_tag, src.version) + move_tag_stamp(repo, options.packaging_tag, src.version) else: raise SkipImport @@ -355,7 +360,7 @@ def main(argv): branch = None else: branch = [options.upstream_branch, - options.debian_branch][src.native] + options.packaging_branch][src.native] if not repo.has_branch(branch): if options.create_missing_branches: gbp.log.info("Creating missing branch '%s'" % branch) @@ -377,7 +382,7 @@ def main(argv): author=author, committer=committer) - if not (src.native and options.skip_debian_tag): + if not (src.native and options.skip_packaging_tag): repo.create_tag(name=tag, msg=msg, commit=commit, @@ -388,18 +393,18 @@ def main(argv): repo.create_branch(options.upstream_branch, commit) if options.pristine_tar: repo.pristine_tar.commit(src.tgz, options.upstream_branch) - if (not repo.has_branch(options.debian_branch) + if (not repo.has_branch(options.packaging_branch) and (is_empty or options.create_missing_branches)): - repo.create_branch(options.debian_branch, commit) + repo.create_branch(options.packaging_branch, commit) if not src.native: if src.diff or src.deb_tgz: apply_debian_patch(repo, upstream.unpacked, src, options, tag) else: gbp.log.warn("Didn't find a diff to apply.") - if repo.get_branch() == options.debian_branch or is_empty: + if repo.get_branch() == options.packaging_branch or is_empty: # Update HEAD if we modified the checked out branch - repo.force_head(options.debian_branch, hard=True) + repo.force_head(options.packaging_branch, hard=True) except KeyboardInterrupt: ret = 1 gbp.log.err("Interrupted. Aborting.") @@ -416,6 +421,7 @@ def main(argv): skipped = True finally: os.chdir(dirs['top']) + del_tmpdir() for d in [ 'tmp', 'download' ]: if d in dirs: diff --git a/gbp/scripts/import_dscs.py b/gbp/scripts/import_dscs.py index 1f98cce6..118dd430 100644 --- a/gbp/scripts/import_dscs.py +++ b/gbp/scripts/import_dscs.py @@ -93,6 +93,7 @@ def main(argv): dscs = [] ret = 0 verbose = False + gbp.log.initialize() dsc_cmp = DscCompareVersions() use_debsnap = False diff --git a/gbp/scripts/import_orig.py b/gbp/scripts/import_orig.py index 16573c31..8143102d 100644 --- a/gbp/scripts/import_orig.py +++ b/gbp/scripts/import_orig.py @@ -20,7 +20,6 @@ from six.moves import configparser import os import sys -import tempfile import gbp.command_wrappers as gbpc from gbp.deb import (DebianPkgPolicy, parse_changelog_repo) from gbp.deb.upstreamsource import DebianUpstreamSource @@ -31,48 +30,11 @@ from gbp.errors import GbpError from gbp.format import format_str import gbp.log -from gbp.scripts.common.import_orig import (orig_needs_repack, cleanup_tmp_tree, - ask_package_name, ask_package_version, - repack_source, is_link_target, download_orig) - - -def prepare_pristine_tar(archive, pkg, version): - """ - Prepare the upstream source for pristine tar import. - - This checks if the upstream source is actually a tarball - and creates a symlink from I{archive} - to I{_.orig.tar.} so pristine-tar will - see the correct basename. - - @param archive: the upstream source's name - @type archive: C{str} - @param pkg: the source package's name - @type pkg: C{str} - @param version: the upstream version number - @type version: C{str} - @rtype: C{str} - """ - linked = False - if os.path.isdir(archive): - return None - - ext = os.path.splitext(archive)[1] - if ext in ['.tgz', '.tbz2', '.tlz', '.txz' ]: - ext = ".%s" % ext[2:] - - link = "../%s_%s.orig.tar%s" % (pkg, version, ext) - - if os.path.basename(archive) != os.path.basename(link): - try: - if not is_link_target(archive, link): - os.symlink(os.path.abspath(archive), link) - linked = True - except OSError as err: - raise GbpError("Cannot symlink '%s' to '%s': %s" % (archive, link, err[1])) - return (link, linked) - else: - return (archive, linked) +from gbp.pkg import compressor_opts +from gbp.scripts.common.import_orig import (ask_package_name, + ask_package_version, download_orig, + prepare_sources) +from gbp.tmpfile import init_tmpdir, del_tmpdir, tempfile def upstream_import_commit_msg(options, version): @@ -93,7 +55,7 @@ def detect_name_and_version(repo, source, options): # Check the changelog file from the repository, in case # we're not on the debian-branch (but upstream, for # example). - cp = parse_changelog_repo(repo, options.debian_branch, 'debian/changelog') + cp = parse_changelog_repo(repo, options.packaging_branch, 'debian/changelog') sourcepackage = cp['Source'] except NoChangeLogError: if options.interactive: @@ -128,21 +90,6 @@ def find_source(use_uscan, args): @return: upstream source filename or None if nothing to import @rtype: string @raise GbpError: raised on all detected errors - - >>> find_source(False, ['too', 'much']) - Traceback (most recent call last): - ... - GbpError: More than one archive specified. Try --help. - >>> find_source(False, []) - Traceback (most recent call last): - ... - GbpError: No archive to import specified. Try --help. - >>> find_source(True, ['tarball']) - Traceback (most recent call last): - ... - GbpError: you can't pass both --uscan and a filename. - >>> find_source(False, ['tarball']).path - 'tarball' """ if use_uscan: if args: @@ -189,12 +136,12 @@ def debian_branch_merge(repo, tag, version, options): if cp.has_epoch(): epoch = '%s:' % cp.epoch info = {'version': "%s%s-1" % (epoch, version)} - env = {'GBP_BRANCH': options.debian_branch} + env = {'GBP_BRANCH': options.packaging_branch} gbpc.Command(format_str(options.postimport, info), extra_env=env, shell=True)() def debian_branch_merge_by_replace(repo, tag, version, options): - gbp.log.info("Replacing upstream source on '%s'" % options.debian_branch) + gbp.log.info("Replacing upstream source on '%s'" % options.packaging_branch) tree = [x for x in repo.list_tree("%s^{tree}" % tag) if x[-1] != 'debian'] @@ -202,7 +149,7 @@ def debian_branch_merge_by_replace(repo, tag, version, options): # Get the current debian/ tree on the debian branch try: - deb_sha = [x for x in repo.list_tree("%s^{tree}" % options.debian_branch) + deb_sha = [x for x in repo.list_tree("%s^{tree}" % options.packaging_branch) if x[-1] == 'debian' and x[1] == 'tree'][0][2] tree.append(['040000', 'tree', deb_sha, 'debian']) msg += "\n\nwith Debian dir %s" % deb_sha @@ -210,21 +157,34 @@ def debian_branch_merge_by_replace(repo, tag, version, options): pass # no debian/ dir is fine sha = repo.make_tree(tree) - commit = repo.commit_tree(sha, msg, ["%s^{commit}" % options.debian_branch, + commit = repo.commit_tree(sha, msg, ["%s^{commit}" % options.packaging_branch, "%s^{commit}" % tag]) - repo.update_ref("refs/heads/%s" % options.debian_branch, commit, - msg="gbp: Updating %s after import of %s" % (options.debian_branch, + repo.update_ref("refs/heads/%s" % options.packaging_branch, commit, + msg="gbp: Updating %s after import of %s" % (options.packaging_branch, tag)) repo.force_head(commit, hard=True) def debian_branch_merge_by_merge(repo, tag, version, options): - gbp.log.info("Merging to '%s'" % options.debian_branch) + gbp.log.info("Merging to '%s'" % options.packaging_branch) try: repo.merge(tag) except GitRepositoryError: raise GbpError("Merge failed, please resolve.") - repo.set_branch(options.debian_branch) + repo.set_branch(options.packaging_branch) + + +def pristine_tarball_name(source, pkg_name, pkg_version): + if source.is_tarball(): + if source.compression: + comp_ext = '.' + compressor_opts[source.compression][1] + else: + comp_ext = '' + else: + # Need to repack and/or mangle filename if the archive is not + # pristine-tar-compatible -> we decide to create gz compressed tarball + comp_ext = '.gz' + return '%s_%s.orig.tar%s' % (pkg_name, pkg_version, comp_ext) def set_bare_repo_options(options): @@ -259,13 +219,16 @@ def build_parser(name): branch_group.add_option("-u", "--upstream-version", dest="version", help="Upstream Version") branch_group.add_config_file_option(option_name="debian-branch", - dest="debian_branch") + dest="packaging_branch") branch_group.add_config_file_option(option_name="upstream-branch", dest="upstream_branch") branch_group.add_config_file_option(option_name="upstream-vcs-tag", dest="vcs_tag", help="Upstream VCS tag add to the merge commit") branch_group.add_boolean_config_file_option(option_name="merge", dest="merge") branch_group.add_config_file_option(option_name="merge-mode", dest="merge_mode") + branch_group.add_boolean_config_file_option( + option_name="create-missing-branches", + dest="create_missing_branches") tag_group.add_boolean_config_file_option(option_name="sign-tags", dest="sign_tags") @@ -292,6 +255,7 @@ def build_parser(name): parser.add_config_file_option(option_name="color", dest="color", type='tristate') parser.add_config_file_option(option_name="color-scheme", dest="color_scheme") + parser.add_config_file_option(option_name="tmp-dir", dest="tmp_dir") # Accepted for compatibility parser.add_option("--no-dch", dest='no_dch', action="store_true", @@ -340,15 +304,17 @@ def parse_args(argv): def main(argv): ret = 0 - tmpdir = '' - pristine_orig = None - linked = False + + gbp.log.initialize() (options, args) = parse_args(argv) if not options: return 1 try: + init_tmpdir(options.tmp_dir, prefix='import-orig_') + tmpdir = tempfile.mkdtemp() + if options.download: source = download_orig(args[0]) else: @@ -366,9 +332,13 @@ def main(argv): is_empty = False if initial_branch else True if not repo.has_branch(options.upstream_branch) and not is_empty: - raise GbpError(no_upstream_branch_msg % options.upstream_branch) + if options.create_missing_branches: + gbp.log.info("Will create missing branch '%s'" % + options.upstream_branch) + else: + raise GbpError(no_upstream_branch_msg % options.upstream_branch) - (sourcepackage, version) = detect_name_and_version(repo, source, options) + (pkg_name, version) = detect_name_and_version(repo, source, options) (clean, out) = repo.is_clean() if not clean and not is_empty: @@ -378,22 +348,16 @@ def main(argv): if repo.bare: set_bare_repo_options(options) - if not source.is_dir(): - tmpdir = tempfile.mkdtemp(dir='../') - source.unpack(tmpdir, options.filters) - gbp.log.debug("Unpacked '%s' to '%s'" % (source.path, source.unpacked)) - - if orig_needs_repack(source, options): - gbp.log.debug("Filter pristine-tar: repacking '%s' from '%s'" % (source.path, source.unpacked)) - (source, tmpdir) = repack_source(source, sourcepackage, version, tmpdir, options.filters) - - (pristine_orig, linked) = prepare_pristine_tar(source.path, - sourcepackage, - version) + # Prepare sources for importing + pristine_name = pristine_tarball_name(source, pkg_name, version) + prepare_pristine = pristine_name if options.pristine_tar else None + unpacked_orig, pristine_orig = prepare_sources( + source, pkg_name, version, prepare_pristine, options.filters, + options.filter_pristine_tar, None, tmpdir) # Don't mess up our repo with git metadata from an upstream tarball try: - if os.path.isdir(os.path.join(source.unpacked, '.git/')): + if os.path.isdir(os.path.join(unpacked_orig, '.git/')): raise GbpError("The orig tarball contains .git metadata - giving up.") except OSError: pass @@ -405,7 +369,7 @@ def main(argv): gbp.log.info("Importing '%s' to branch '%s'%s..." % (source.path, upstream_branch, filter_msg)) - gbp.log.info("Source package is %s" % sourcepackage) + gbp.log.info("Source package is %s" % pkg_name) gbp.log.info("Upstream version is %s" % version) import_branch = [ options.upstream_branch, None ][is_empty] @@ -416,17 +380,14 @@ def main(argv): else: parents = None - commit = repo.commit_dir(source.unpacked, - msg=msg, - branch=import_branch, - other_parents=parents, - ) + commit = repo.commit_dir(unpacked_orig, + msg=msg, + branch=import_branch, + other_parents=parents, + create_missing_branch=options.create_missing_branches) - if options.pristine_tar: - if pristine_orig: - repo.pristine_tar.commit(pristine_orig, upstream_branch) - else: - gbp.log.warn("'%s' not an archive, skipping pristine-tar" % source.path) + if options.pristine_tar and pristine_orig: + repo.pristine_tar.commit(pristine_orig, upstream_branch) tag = repo.version_to_tag(options.upstream_tag, version) repo.create_tag(name=tag, @@ -437,8 +398,8 @@ def main(argv): if is_empty: repo.create_branch(options.upstream_branch, rev=commit) repo.force_head(options.upstream_branch, hard=True) - if options.debian_branch != 'master': - repo.rename_branch('master', options.debian_branch) + if options.packaging_branch != 'master': + repo.rename_branch('master', options.packaging_branch) elif options.merge: debian_branch_merge(repo, tag, version, options) @@ -448,19 +409,26 @@ def main(argv): if current_branch in [ options.upstream_branch, repo.pristine_tar_branch]: repo.force_head(current_branch, hard=True) + # Create symlink, if requested + if options.symlink_orig: + if source.is_tarball(): + link = os.path.join('..', pristine_name) + if not (os.path.exists(link) and + os.path.samefile(link, source.path)): + gbp.log.info('Creating symlink to %s' % source.path) + os.symlink(source.path, link) + else: + gbp.log.warn('Orig source not a tarball, not symlinked') + except (gbpc.CommandExecFailed, GitRepositoryError) as err: msg = str(err) or 'Unknown error, please report a bug' raise GbpError("Import of %s failed: %s" % (source.path, msg)) - except GbpError as err: + except (GbpError, GitRepositoryError) as err: if str(err): gbp.log.err(err) ret = 1 - - if pristine_orig and linked and not options.symlink_orig: - os.unlink(pristine_orig) - - if tmpdir: - cleanup_tmp_tree(tmpdir) + finally: + del_tmpdir() if not ret: gbp.log.info("Successfully imported version %s of %s" % (version, source.path)) diff --git a/gbp/scripts/import_orig_rpm.py b/gbp/scripts/import_orig_rpm.py new file mode 100755 index 00000000..751d9d8f --- /dev/null +++ b/gbp/scripts/import_orig_rpm.py @@ -0,0 +1,350 @@ +# vim: set fileencoding=utf-8 : +# +# (C) 2006, 2007, 2009, 2011 Guido Guenther +# (C) 2012-2015 Intel Corporation +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, please see +# +# +"""Import a new upstream version into a git repository""" + +import ConfigParser +import os +import sys +import gbp.command_wrappers as gbpc +import re +import string +from gbp.pkg import parse_archive_filename +from gbp.rpm import (RpmUpstreamSource, SpecFile, NoSpecError, guess_spec, + guess_spec_repo) +from gbp.rpm.policy import RpmPkgPolicy +from gbp.rpm.git import (GitRepositoryError, RpmGitRepository) +from gbp.config import GbpOptionParserRpm, GbpOptionGroup, no_upstream_branch_msg +from gbp.errors import GbpError +import gbp.log +from gbp.scripts.common.import_orig import (ask_package_name, + ask_package_version, + prepare_sources) +from gbp.scripts.import_srpm import download_file +from gbp.tmpfile import init_tmpdir, del_tmpdir, tempfile + + +def upstream_import_commit_msg(options, version): + return options.import_msg % dict(version=version) + +def find_spec(repo, options): + """Find spec in the working tree or repository""" + try: + preferred_fn = os.path.basename(repo.path) + '.spec' + spec = guess_spec(os.path.join(repo.path, options.packaging_dir), True, + preferred_fn) + except NoSpecError: + try: + # Check the spec file from the repository, in case we're not on the + # packaging-branch (but upstream, for example). + spec = guess_spec_repo(repo, options.packaging_branch, + options.packaging_dir, True, preferred_fn) + except NoSpecError: + spec = None + return spec + +def detect_name_and_version(repo, source, spec, options): + """Determine name and version of the upstream project""" + # Guess defaults for the package name and version from the + # original tarball. + (guessed_package, guessed_version) = source.guess_version() or ('', '') + + # Try to find the source package name + if spec: + sourcepackage = spec.name + else: + if options.interactive: + sourcepackage = ask_package_name(guessed_package, + RpmPkgPolicy.is_valid_packagename, + RpmPkgPolicy.packagename_msg) + else: + if guessed_package: + sourcepackage = guessed_package + else: + raise GbpError, "Couldn't determine upstream package name. Use --interactive." + + # Try to find the version. + if options.version: + version = options.version + else: + if options.interactive: + version = ask_package_version(guessed_version, + RpmPkgPolicy.is_valid_upstreamversion, + RpmPkgPolicy.upstreamversion_msg) + else: + if guessed_version: + version = guessed_version + else: + raise GbpError, "Couldn't determine upstream version. Use '-u' or --interactive." + + return (sourcepackage, version) + + +def find_source(spec, options, args): + """Find the tarball to import + @return: upstream source filename or None if nothing to import + @rtype: string + @raise GbpError: raised on all detected errors + """ + if len(args) > 1: # source specified + raise GbpError("More than one archive specified. Try --help.") + elif len(args) == 0: + if spec and spec.orig_src: + path = spec.orig_src['uri'] + gbp.log.info("Archive file path from spec is used ('%s')" % path) + elif spec: + raise GbpError("No archive to import specified and unable to " + "determine source from spec. Try --help.") + else: + raise GbpError("No archive to import specified and no spec file " + "found. Try --help.") + else: + path = args[0] + if re.match(r'[a-z]{1,5}://', path): + path = download_file('..', path) + return RpmUpstreamSource(path) + + +def pristine_tarball_name(source, pkg_name, pkg_version, pristine_name): + old_filename = os.path.basename(source.path) + base_name, _fmt, _comp = parse_archive_filename(old_filename) + if pristine_name != 'auto': + ext = string.replace(old_filename, base_name, '', 1) + return pristine_name % {'name': pkg_name, + 'version': pkg_version, + 'upstreamversion': pkg_version, + 'filename_base': base_name, + 'filename_ext': ext} + # Need to repack and mangle filename if the archive is not + # pristine-tar-compatible -> we decide to create gz compressed tarball + elif not source.is_tarball(): + return "%s.tar.gz" % base_name + return old_filename + + +def set_bare_repo_options(options): + """Modify options for import into a bare repository""" + if options.pristine_tar or options.merge: + gbp.log.info("Bare repository: setting %s%s options" + % (["", " '--no-pristine-tar'"][options.pristine_tar], + ["", " '--no-merge'"][options.merge])) + options.pristine_tar = False + options.merge = False + + +def parse_args(argv): + try: + parser = GbpOptionParserRpm(command=os.path.basename(argv[0]), + prefix='', + usage='%prog [options] /path/to/upstream-version.tar.gz') + except ConfigParser.ParsingError, err: + gbp.log.err(err) + return None, None + + import_group = GbpOptionGroup(parser, "import options", + "pristine-tar and filtering") + tag_group = GbpOptionGroup(parser, "tag options", + "options related to git tag creation") + branch_group = GbpOptionGroup(parser, "version and branch naming options", + "version number and branch layout options") + cmd_group = GbpOptionGroup(parser, "external command options", "how and when to invoke external commands and hooks") + + for group in [import_group, branch_group, tag_group, cmd_group ]: + parser.add_option_group(group) + + branch_group.add_option("-u", "--upstream-version", dest="version", + help="Upstream Version") + branch_group.add_config_file_option(option_name="packaging-branch", + dest="packaging_branch") + branch_group.add_config_file_option(option_name="upstream-branch", + dest="upstream_branch") + branch_group.add_option("--upstream-vcs-tag", dest="vcs_tag", + help="Upstream VCS tag add to the merge commit") + branch_group.add_boolean_config_file_option(option_name="merge", dest="merge") + branch_group.add_config_file_option(option_name="packaging-dir", dest="packaging_dir") + branch_group.add_boolean_config_file_option( + option_name="create-missing-branches", + dest="create_missing_branches") + + tag_group.add_boolean_config_file_option(option_name="sign-tags", + dest="sign_tags") + tag_group.add_config_file_option(option_name="keyid", + dest="keyid") + tag_group.add_config_file_option(option_name="upstream-tag", + dest="upstream_tag") + import_group.add_config_file_option(option_name="filter", + dest="filters", action="append") + import_group.add_boolean_config_file_option(option_name="pristine-tar", + dest="pristine_tar") + import_group.add_boolean_config_file_option(option_name="filter-pristine-tar", + dest="filter_pristine_tar") + import_group.add_config_file_option(option_name="pristine-tarball-name", + dest="pristine_tarball_name") + import_group.add_config_file_option(option_name="orig-prefix", + dest="orig_prefix") + import_group.add_config_file_option(option_name="import-msg", + dest="import_msg") + cmd_group.add_config_file_option(option_name="postimport", dest="postimport") + + parser.add_boolean_config_file_option(option_name="interactive", + dest='interactive') + parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, + help="verbose command execution") + parser.add_config_file_option(option_name="color", dest="color", type='tristate') + parser.add_config_file_option(option_name="color-scheme", + dest="color_scheme") + parser.add_config_file_option(option_name="tmp-dir", dest="tmp_dir") + + (options, args) = parser.parse_args(argv[1:]) + gbp.log.setup(options.color, options.verbose, options.color_scheme) + + return options, args + + +def main(argv): + ret = 0 + + (options, args) = parse_args(argv) + if not options: + return 1 + + try: + init_tmpdir(options.tmp_dir, prefix='import-orig-rpm_') + tmpdir = tempfile.mkdtemp() + + try: + repo = RpmGitRepository('.') + except GitRepositoryError: + raise GbpError, "%s is not a git repository" % (os.path.abspath('.')) + + spec = find_spec(repo, options) + source = find_source(spec, options, args) + + # an empty repo has now branches: + initial_branch = repo.get_branch() + is_empty = False if initial_branch else True + + if not repo.has_branch(options.upstream_branch): + if options.create_missing_branches: + gbp.log.info("Will create missing branch '%s'" % + options.upstream_branch) + elif is_empty: + options.create_missing_branches = True + else: + raise GbpError(no_upstream_branch_msg % options.upstream_branch) + + sourcepackage, version = detect_name_and_version(repo, source, spec, + options) + + (clean, out) = repo.is_clean() + if not clean and not is_empty: + gbp.log.err("Repository has uncommitted changes, commit these first: ") + raise GbpError, out + + if repo.bare: + set_bare_repo_options(options) + + # Prepare sources for importing + if options.pristine_tar: + prepare_pristine = pristine_tarball_name(source, sourcepackage, + version, + options.pristine_tarball_name) + else: + prepare_pristine = None + unpacked_orig, pristine_orig = \ + prepare_sources(source, sourcepackage, version, + prepare_pristine, options.filters, + options.filter_pristine_tar, + options.orig_prefix, tmpdir) + + # Don't mess up our repo with git metadata from an upstream tarball + if os.path.isdir(os.path.join(unpacked_orig, '.git/')): + raise GbpError("The orig tarball contains .git metadata - " + "giving up.") + try: + filter_msg = ["", " (filtering out %s)" + % options.filters][len(options.filters) > 0] + gbp.log.info("Importing '%s' to branch '%s'%s..." % (source.path, + options.upstream_branch, + filter_msg)) + gbp.log.info("Source package is %s" % sourcepackage) + gbp.log.info("Upstream version is %s" % version) + + msg = upstream_import_commit_msg(options, version) + + if options.vcs_tag: + parents = [repo.rev_parse("%s^{}" % options.vcs_tag)] + else: + parents = None + + commit = repo.commit_dir(unpacked_orig, + msg=msg, + branch=options.upstream_branch, + other_parents=parents, + create_missing_branch=options.create_missing_branches) + if options.pristine_tar and pristine_orig: + gbp.log.info("Pristine-tar: commiting %s" % pristine_orig) + repo.pristine_tar.commit(pristine_orig, options.upstream_branch) + + tag_str_fields = {'upstreamversion': version, 'version': version} + tag = repo.version_to_tag(options.upstream_tag, tag_str_fields) + repo.create_tag(name=tag, + msg="Upstream version %s" % version, + commit=commit, + sign=options.sign_tags, + keyid=options.keyid) + if options.merge: + gbp.log.info("Merging to '%s'" % options.packaging_branch) + if repo.has_branch(options.packaging_branch): + repo.set_branch(options.packaging_branch) + try: + repo.merge(tag) + except GitRepositoryError: + raise GbpError, """Merge failed, please resolve.""" + else: + repo.create_branch(options.packaging_branch, rev=options.upstream_branch) + if repo.get_branch() == options.packaging_branch: + repo.force_head(options.packaging_branch, hard=True) + if options.postimport: + info = { 'upstreamversion': version } + env = { 'GBP_BRANCH': options.packaging_branch } + gbpc.Command(options.postimport % info, extra_env=env, + shell=True)() + # Update working copy and index if we've possibly updated the + # checked out branch + current_branch = repo.get_branch() + if (current_branch == options.upstream_branch or + current_branch == repo.pristine_tar_branch): + repo.force_head(current_branch, hard=True) + except (GitRepositoryError, gbpc.CommandExecFailed): + raise GbpError, "Import of %s failed" % source.path + except GbpError, err: + if len(err.__str__()): + gbp.log.err(err) + ret = 1 + finally: + del_tmpdir() + + if not ret: + gbp.log.info("Successfully imported version %s of %s" % (version, source.path)) + return ret + +if __name__ == "__main__": + sys.exit(main(sys.argv)) + +# vim:et:ts=4:sw=4:et:sts=4:ai:set list listchars=tab\:»·,trail\:·: diff --git a/gbp/scripts/import_srpm.py b/gbp/scripts/import_srpm.py index 5013ba60..315d8cae 100755 --- a/gbp/scripts/import_srpm.py +++ b/gbp/scripts/import_srpm.py @@ -38,6 +38,8 @@ no_upstream_branch_msg) from gbp.errors import GbpError import gbp.log +from gbp.scripts.pq_rpm import safe_patches, rm_patch_files, get_packager +from gbp.scripts.common.pq import apply_and_commit_patch from gbp.pkg import parse_archive_filename no_packaging_branch_msg = """ @@ -45,10 +47,21 @@ You need to reate it or use --packaging-branch to specify it. """ +PATCH_AUTODELETE_COMMIT_MSG = """ +Autoremove imported patches from packaging + +Removed all imported patches from %s +and patch files from the packaging dir. +""" + class SkipImport(Exception): """Nothing imported""" pass +class PatchImportError(Exception): + """Patch import failed""" + pass + def download_file(target_dir, url): """Download a remote file""" @@ -105,6 +118,45 @@ def set_bare_repo_options(options): if options.pristine_tar: gbp.log.info("Bare repository: setting %s option '--no-pristine-tar'") options.pristine_tar = False + if options.patch_import: + gbp.log.info("Bare repository: setting %s option '--no-patch-import')") + options.patch_import = False + + +def import_spec_patches(repo, spec): + """ + Import patches from a spec file to the current branch + """ + queue = spec.patchseries() + if len(queue) == 0: + return + + gbp.log.info("Importing patches to '%s' branch" % repo.get_branch()) + orig_head = repo.rev_parse("HEAD") + packager = get_packager(spec) + + # Put patches in a safe place + queue = safe_patches(queue) + for patch in queue: + gbp.log.debug("Applying %s" % patch.path) + try: + apply_and_commit_patch(repo, patch, packager) + except (GbpError, GitRepositoryError): + repo.force_head(orig_head, hard=True) + raise PatchImportError("Patch(es) didn't apply, you need apply " + "and commit manually") + + # Remove patches from spec and packaging directory + gbp.log.info("Removing imported patch files from spec and packaging dir") + rm_patch_files(spec) + try: + spec.update_patches([], {}) + spec.write_spec_file() + except GbpError: + repo.force_head('HEAD', hard=True) + raise PatchImportError("Unable to update spec file, you need to edit" + "and commit it manually") + repo.commit_all(msg=PATCH_AUTODELETE_COMMIT_MSG % spec.specfile) def force_to_branch_head(repo, branch): @@ -152,6 +204,9 @@ def build_parser(name): dest="packaging_branch") branch_group.add_config_file_option(option_name="upstream-branch", dest="upstream_branch") + branch_group.add_option("--upstream-vcs-tag", dest="vcs_tag", + help="Upstream VCS tag on top of which to import " + "the orig sources") branch_group.add_boolean_config_file_option( option_name="create-missing-branches", dest="create_missing_branches") @@ -171,6 +226,9 @@ def build_parser(name): dest="packaging_tag") tag_group.add_config_file_option(option_name="upstream-tag", dest="upstream_tag") + tag_group.add_option("--skip-packaging-tag",dest="skip_packaging_tag", + action="store_true", + help="Don't add a tag after importing packaging files") import_group.add_config_file_option(option_name="filter", dest="filters", action="append") @@ -184,6 +242,8 @@ def build_parser(name): dest="author_is_committer") import_group.add_config_file_option(option_name="packaging-dir", dest="packaging_dir") + import_group.add_boolean_config_file_option(option_name="patch-import", + dest="patch_import") return parser def parse_args(argv): @@ -204,6 +264,8 @@ def main(argv): ret = 0 skipped = False + gbp.log.initialize() + options, args = parse_args(argv) if len(args) != 1: @@ -302,22 +364,31 @@ def main(argv): if spec.orig_src: orig_tarball = os.path.join(dirs['src'], spec.orig_src['filename']) sources = RpmUpstreamSource(orig_tarball) - sources.unpack(dirs['origsrc'], options.filters) + sources = sources.unpack(dirs['origsrc'], options.filters) else: sources = None - src_tag_format = options.packaging_tag if options.native \ - else options.upstream_tag - tag_str_fields = dict(spec.version, vendor=options.vendor.lower()) - src_tag = repo.version_to_tag(src_tag_format, tag_str_fields) - ver_str = compose_version_str(spec.version) - - if repo.find_version(options.packaging_tag, tag_str_fields): - gbp.log.warn("Version %s already imported." % ver_str) + packaging_tag_str_fields = dict( + spec.version, + version=compose_version_str(spec.version), + vendor=options.vendor.lower()) + if options.native: + src_tag_format = options.packaging_tag + src_tag_str_fields = packaging_tag_str_fields + else: + src_tag_format = options.upstream_tag + src_tag_str_fields = {'version': spec.upstreamversion, + 'upstreamversion': spec.upstreamversion} + src_tag = repo.version_to_tag(src_tag_format, src_tag_str_fields) + + if repo.find_version(options.packaging_tag, packaging_tag_str_fields): + gbp.log.warn("Version %s already imported." % + packaging_tag_str_fields['version']) if options.allow_same_version: gbp.log.info("Moving tag of version '%s' since import forced" % - ver_str) - move_tag_stamp(repo, options.packaging_tag, tag_str_fields) + packaging_tag_str_fields['version']) + move_tag_stamp(repo, options.packaging_tag, + packaging_tag_str_fields) else: raise SkipImport @@ -339,7 +410,7 @@ def main(argv): # Import sources if sources: - src_commit = repo.find_version(src_tag_format, tag_str_fields) + src_commit = repo.find_version(src_tag_format, src_tag_str_fields) if not src_commit: gbp.log.info("Tag %s not found, importing sources" % src_tag) @@ -355,17 +426,23 @@ def main(argv): raise GbpError src_vendor = "Native" if options.native else "Upstream" msg = "%s version %s" % (src_vendor, spec.upstreamversion) + if options.vcs_tag: + parents = [repo.rev_parse("%s^{}" % options.vcs_tag)] + else: + parents = None src_commit = repo.commit_dir(sources.unpacked, "Imported %s" % msg, branch, + other_parents=parents, author=author, committer=committer, create_missing_branch=options.create_missing_branches) - repo.create_tag(name=src_tag, - msg=msg, - commit=src_commit, - sign=options.sign_tags, - keyid=options.keyid) + if not (options.native and options.skip_packaging_tag): + repo.create_tag(name=src_tag, + msg=msg, + commit=src_commit, + sign=options.sign_tags, + keyid=options.keyid) if not options.native: if options.pristine_tar: @@ -394,8 +471,8 @@ def main(argv): "option.") raise GbpError - tag = repo.version_to_tag(options.packaging_tag, tag_str_fields) - msg = "%s release %s" % (options.vendor, ver_str) + msg = "%s release %s" % (options.vendor, + packaging_tag_str_fields['version']) if options.orphan_packaging or not sources: commit = repo.commit_dir(dirs['packaging_base'], @@ -426,13 +503,21 @@ def main(argv): # Import patches on top of the source tree # (only for non-native packages with non-orphan packaging) force_to_branch_head(repo, options.packaging_branch) + if options.patch_import: + spec = SpecFile(os.path.join(repo.path, + options.packaging_dir, spec.specfile)) + import_spec_patches(repo, spec) + commit = options.packaging_branch # Create packaging tag - repo.create_tag(name=tag, - msg=msg, - commit=commit, - sign=options.sign_tags, - keyid=options.keyid) + if not options.skip_packaging_tag: + tag = repo.version_to_tag(options.packaging_tag, + packaging_tag_str_fields) + repo.create_tag(name=tag, + msg=msg, + commit=commit, + sign=options.sign_tags, + keyid=options.keyid) force_to_branch_head(repo, options.packaging_branch) @@ -451,6 +536,9 @@ def main(argv): except NoSpecError as err: gbp.log.err("Failed determine spec file: %s" % err) ret = 1 + except PatchImportError as err: + gbp.log.err(err) + ret = 2 except SkipImport: skipped = True finally: @@ -458,7 +546,8 @@ def main(argv): del_tmpdir() if not ret and not skipped: - gbp.log.info("Version '%s' imported under '%s'" % (ver_str, spec.name)) + gbp.log.info("Version '%s' imported under '%s'" % + (packaging_tag_str_fields['version'], spec.name)) return ret if __name__ == '__main__': diff --git a/gbp/scripts/pq.py b/gbp/scripts/pq.py index 5dfe4c20..764578ff 100755 --- a/gbp/scripts/pq.py +++ b/gbp/scripts/pq.py @@ -22,7 +22,6 @@ import os import shutil import sys -import tempfile import re from gbp.config import GbpOptionParserDebian from gbp.git import (GitRepositoryError, GitRepository) @@ -36,6 +35,7 @@ apply_and_commit_patch, switch_pq, drop_pq, get_maintainer_from_control) from gbp.dch import extract_bts_cmds +from gbp.tmpfile import init_tmpdir, del_tmpdir, tempfile PATCH_DIR = "debian/patches/" SERIES_FILE = os.path.join(PATCH_DIR,"series") @@ -173,13 +173,13 @@ def commit_patches(repo, branch, patches, options): def export_patches(repo, branch, options): """Export patches from the pq branch into a patch series""" - if is_pq_branch(branch): - base = pq_branch_base(branch) + if is_pq_branch(branch, options): + base = pq_branch_base(branch, options) gbp.log.info("On '%s', switching to '%s'" % (branch, base)) branch = base repo.set_branch(branch) - pq_branch = pq_branch_name(branch) + pq_branch = pq_branch_name(branch, options) try: shutil.rmtree(PATCH_DIR) except OSError as e: @@ -208,7 +208,7 @@ def export_patches(repo, branch, options): gbp.log.info("No patches on '%s' - nothing to do." % pq_branch) if options.drop: - drop_pq(repo, branch) + drop_pq(repo, branch, options) def safe_patches(series): @@ -234,7 +234,7 @@ def safe_patches(series): return (tmpdir, series) -def import_quilt_patches(repo, branch, series, tries, force): +def import_quilt_patches(repo, branch, series, tries, options): """ apply a series of quilt patches in the series file 'series' to branch the patch-queue branch for 'branch' @@ -244,24 +244,24 @@ def import_quilt_patches(repo, branch, series, tries, force): @param series; series file to read patches from @param tries: try that many times to apply the patches going back one commit in the branches history after each failure. - @param force: import the patch series even if the branch already exists + @param options: gbp-pq command options """ tmpdir = None - if is_pq_branch(branch): - if force: - branch = pq_branch_base(branch) - pq_branch = pq_branch_name(branch) + if is_pq_branch(branch, options): + if options.force: + branch = pq_branch_base(branch, options) + pq_branch = pq_branch_name(branch, options) repo.checkout(branch) else: gbp.log.err("Already on a patch-queue branch '%s' - doing nothing." % branch) raise GbpError else: - pq_branch = pq_branch_name(branch) + pq_branch = pq_branch_name(branch, options) if repo.has_branch(pq_branch): - if force: - drop_pq(repo, branch) + if options.force: + drop_pq(repo, branch, options) else: raise GbpError("Patch queue branch '%s'. already exists. Try 'rebase' instead." % pq_branch) @@ -309,11 +309,11 @@ def import_quilt_patches(repo, branch, series, tries, force): shutil.rmtree(tmpdir) -def rebase_pq(repo, branch): - if is_pq_branch(branch): - base = pq_branch_base(branch) +def rebase_pq(repo, branch, options): + if is_pq_branch(branch, options): + base = pq_branch_base(branch, options) else: - switch_to_pq_branch(repo, branch) + switch_to_pq_branch(repo, branch, options) base = branch GitCommand("rebase")([base]) @@ -365,6 +365,8 @@ def parse_args(argv): def main(argv): retval = 0 + gbp.log.initialize() + (options, args) = parse_args(argv) if not options: return 1 @@ -395,33 +397,40 @@ def main(argv): gbp.log.err("%s is not a git repository" % (os.path.abspath('.'))) return 1 + if os.path.abspath('.') != repo.path: + gbp.log.warn("Switching to topdir before running commands") + os.chdir(repo.path) + try: + init_tmpdir(options.tmp_dir, prefix='pq_') current = repo.get_branch() if action == "export": export_patches(repo, current, options) elif action == "import": series = SERIES_FILE tries = options.time_machine if (options.time_machine > 0) else 1 - import_quilt_patches(repo, current, series, tries, options.force) + import_quilt_patches(repo, current, series, tries, options) current = repo.get_branch() gbp.log.info("Patches listed in '%s' imported on '%s'" % (series, current)) elif action == "drop": - drop_pq(repo, current) + drop_pq(repo, current, options) elif action == "rebase": - rebase_pq(repo, current) + rebase_pq(repo, current, options) elif action == "apply": patch = Patch(patchfile) maintainer = get_maintainer_from_control(repo) - apply_single_patch(repo, current, patch, maintainer, options.topic) + apply_single_patch(repo, current, patch, maintainer, options) elif action == "switch": - switch_pq(repo, current) + switch_pq(repo, current, options) except CommandExecFailed: retval = 1 except (GbpError, GitRepositoryError) as err: if str(err): gbp.log.err(err) retval = 1 + finally: + del_tmpdir() return retval diff --git a/gbp/scripts/pq_bb.py b/gbp/scripts/pq_bb.py new file mode 100755 index 00000000..68fcd3c9 --- /dev/null +++ b/gbp/scripts/pq_bb.py @@ -0,0 +1,427 @@ +# vim: set fileencoding=utf-8 : +# +# (C) 2011 Guido Günther +# (C) 2012-2015 Intel Corporation +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, please see +# +# +"""manage patches in a patch queue""" + +import ConfigParser +import errno +import os +import shutil +import sys + +import gbp.tmpfile as tempfile +from gbp.config import GbpOptionParserBB +from gbp.rpm.git import GitRepositoryError, RpmGitRepository +from gbp.command_wrappers import GitCommand, CommandExecFailed +from gbp.errors import GbpError +import gbp.log +from gbp.patch_series import PatchSeries, Patch +from gbp.rpm import string_to_int +from gbp.scripts.common.pq import (is_pq_branch, pq_branch_name, pq_branch_base, + apply_and_commit_patch, drop_pq) +from gbp.scripts.pq_rpm import (generate_patches, safe_patches, + import_extra_files) +from gbp.bb import bb, init_tinfoil, parse_bb, pkg_version + +# pylint: disable=bad-continuation + +USAGE_STRING = \ +"""%prog [options] action - maintain patches on a patch queue branch +tions: +export Export the patch queue / devel branch associated to the + current branch into a patch series in and update the recipe file +import Create a patch queue / devel branch from recipe file + and patches in current dir. +rebase Switch to patch queue / devel branch associated to the current + branch and rebase against upstream. +drop Drop (delete) the patch queue /devel branch associated to + the current branch. +apply Apply a patch +switch Switch to patch-queue branch and vice versa.""" + + +def rm_patch_files(bbfile): + """Delete the patch files listed in the pkg meta data.""" + unlinked = set() + + # Go through local files + for path in bbfile.localfiles: + if path.endswith('.patch'): + gbp.log.debug("Removing patch '%s'" % path) + unlinked.add(os.path.basename(path)) + try: + os.unlink(path) + except OSError as err: + if err.errno != errno.ENOENT: + raise GbpError("Failed to remove patch: %s" % err) + else: + gbp.log.debug("Patch %s does not exist." % path) + else: + gbp.log.debug("Unlink skipping non-local/non-patch file %s" % path) + uris = (bbfile.getVar('SRC_URI', False) or "").split() + return [uri for uri in uris if os.path.basename(uri) not in unlinked] + + +def update_patch_series(repo, bbfile, start, end, options): + """Export patches to packaging directory and update recipe file""" + squash = options.patch_export_squash_until.split(':', 1) + if len(squash) == 1: + squash.append(None) + else: + squash[1] += '.diff' + + # Unlink old (local) patch files and generate new patches + rm_patch_files(bbfile) + + # Guess patch subdir + bb_dir = os.path.dirname(bbfile.getVar('FILE', True)) + pkg_name = bbfile.getVar('PN', True) + pkg_ver = bbfile.getVar('PV', True) + subdir = pkg_name + '-' + pkg_ver + if not os.path.isdir(os.path.join(bb_dir, subdir)): + if os.path.isdir(os.path.join(bb_dir, pkg_name)): + subdir = pkg_name + elif os.path.isdir(os.path.join(bb_dir, 'files')): + subdir = 'files' + tgt_dir = os.path.join(bb_dir, subdir) + + patches, _commands = generate_patches(repo, start, squash, end, + tgt_dir, options) + # TODO: implement commands processing (e.g. topic) + new_uris = ['file://' + patch for patch in patches] + bbfile.substitute_var_val(bbfile.bb_path, 'SRC_URI', r'file://\S+.\.patch', + '') + bbfile.append_var_val(bbfile.bb_path, 'SRC_URI', new_uris) + return patches + +def var_to_str(var, value): + """Create a well formatted string buffer for a variable assignment""" + indent = ' ' * (len(var) + 3) + linebuf = ['%s = "%s \\\n' % (var, value[0])] + for val in value[1:]: + linebuf.append(indent + ' ' + val + '\\\n') + linebuf.append(indent + '"\n') + return linebuf + + +def find_upstream_commit(repo, bbfile, upstream_tag): + """Find commit corresponding upstream version""" + src_rev = bbfile.getVar('SRCREV', True) + if src_rev and src_rev != 'INVALID': + return bbfile.getVar('SRCREV', True) + + # Find tag + upstreamversion = bbfile.getVar('PV', True) + tag_str_fields = {'upstreamversion': upstreamversion, + 'vendor': 'Upstream'} + upstream_commit = repo.find_version(upstream_tag, tag_str_fields) + if not upstream_commit: + raise GbpError("Couldn't find upstream version %s" % upstreamversion) + return upstream_commit + + +def export_patches(cfg, repo, options): + """Export patches from the pq branch into a packaging branch""" + current = repo.get_branch() + if is_pq_branch(current, options): + base = pq_branch_base(current, options) + gbp.log.info("On branch '%s', switching to '%s'" % (current, base)) + repo.set_branch(base) + bbfile = parse_bb(cfg, options, repo) + pq_branch = current + else: + bbfile = parse_bb(cfg, options, repo) + pq_branch = pq_branch_name(current, options, pkg_version(bbfile)) + upstream_commit = find_upstream_commit(repo, bbfile, options.upstream_tag) + + export_treeish = options.export_rev if options.export_rev else pq_branch + + update_patch_series(repo, bbfile, upstream_commit, export_treeish, options) + + bb_dir = os.path.dirname(bbfile.getVar('FILE', True)) + GitCommand('status')(['--', bb_dir]) + + +def bb_to_patch_series(bbfile): + """Get all local patches as a series""" + series = PatchSeries() + for path in bbfile.localfiles: + if path.endswith('.patch'): + series.append(Patch(path)) + return series + + +def import_bb_patches(cfg, repo, options): + """Apply a series of patches in a recipe to branch onto a pq branch""" + current = repo.get_branch() + + if is_pq_branch(current, options): + base = pq_branch_base(current, options) + raise GbpError("Already on a patch-queue branch '%s' - doing " + "nothing." % current) + else: + bbfile = parse_bb(cfg, options, repo) + base = current + upstream_commit = find_upstream_commit(repo, bbfile, options.upstream_tag) + pq_branch = pq_branch_name(base, options, pkg_version(bbfile)) + + # Create pq-branch + if repo.has_branch(pq_branch) and not options.force: + raise GbpError("Patch-queue branch '%s' already exists. " + "Try 'rebase' instead." % pq_branch) + try: + if repo.get_branch() == pq_branch: + repo.force_head(upstream_commit, hard=True) + else: + repo.create_branch(pq_branch, upstream_commit, force=True) + except GitRepositoryError as err: + raise GbpError("Cannot create patch-queue branch '%s': %s" % + (pq_branch, err)) + + # Put patches in a safe place + in_queue = bb_to_patch_series(bbfile) + queue = safe_patches(in_queue, options.tmp_dir) + # Do import + try: + gbp.log.info("Switching to branch '%s'" % pq_branch) + repo.set_branch(pq_branch) + import_extra_files(repo, base, options.import_files) + + if not queue: + return + gbp.log.info("Trying to apply patches from branch '%s' onto '%s'" % + (base, upstream_commit)) + for patch in queue: + gbp.log.debug("Applying %s" % patch.path) + apply_and_commit_patch(repo, patch, fallback_author=None) + except (GbpError, GitRepositoryError) as err: + gbp.log.err('Import failed: %s' % err) + repo.force_head('HEAD', hard=True) + repo.set_branch(base) + repo.delete_branch(pq_branch) + raise + + recipe_fn = os.path.basename(bbfile.getVar('FILE', True)) + gbp.log.info("Patches listed in '%s' imported on '%s'" % (recipe_fn, + pq_branch)) + + +def rebase_pq(cfg, repo, options): + """Rebase pq branch on the correct upstream version""" + current = repo.get_branch() + if is_pq_branch(current, options): + base = pq_branch_base(current, options) + bbfile = parse_bb(cfg, options, repo, base) + else: + base = current + bbfile = parse_bb(cfg, options, repo) + upstream_commit = find_upstream_commit(repo, bbfile, options.upstream_tag) + + switch_to_pq_branch(cfg, repo, base, options) + GitCommand("rebase")([upstream_commit]) + + +def switch_pq(cfg, repo, options): + """Switch to patch-queue branch if on base branch and vice versa""" + current = repo.get_branch() + if is_pq_branch(current, options): + base = pq_branch_base(current, options) + gbp.log.info("Switching to branch '%s'" % base) + repo.checkout(base) + else: + switch_to_pq_branch(cfg, repo, current, options) + + +def drop_pq_bb(cfg, repo, options): + """Remove pq branch""" + current = repo.get_branch() + if is_pq_branch(current, options): + base = pq_branch_base(current, options) + bbfile = parse_bb(cfg, options, repo, base) + else: + bbfile = parse_bb(cfg, options, repo) + drop_pq(repo, current, options, pkg_version(bbfile)) + + +def switch_to_pq_branch(cfg, repo, branch, options): + """ + Switch to patch-queue branch if not already there, create it if it + doesn't exist yet + """ + if is_pq_branch(branch, options): + return + + bbfile = parse_bb(cfg, options, repo, branch) + pq_branch = pq_branch_name(branch, options, pkg_version(bbfile)) + if not repo.has_branch(pq_branch): + raise GbpError("Branch '%s' does not exist" % pq_branch) + + gbp.log.info("Switching to branch '%s'" % pq_branch) + repo.set_branch(pq_branch) + +def apply_single_patch(cfg, repo, patchfile, options): + """Apply a single patch onto the pq branch""" + current = repo.get_branch() + if not is_pq_branch(current, options): + switch_to_pq_branch(cfg, repo, current, options) + patch = Patch(patchfile) + apply_and_commit_patch(repo, patch, fallback_author=None) + +def opt_split_cb(option, opt_str, value, parser): + """Split option string into a list""" + setattr(parser.values, option.dest, value.split(',')) + +def build_parser(name): + """Create command line argument parser""" + try: + parser = GbpOptionParserBB(command=os.path.basename(name), + prefix='', usage=USAGE_STRING) + except ConfigParser.ParsingError as err: + gbp.log.err(err) + return None + + parser.add_boolean_config_file_option(option_name="patch-numbers", + dest="patch_numbers") + parser.add_option("-v", "--verbose", action="store_true", dest="verbose", + default=False, help="Verbose command execution") + parser.add_option("--force", dest="force", action="store_true", + default=False, + help="In case of import even import if the branch already exists") + parser.add_config_file_option(option_name="vendor", action="store", + dest="vendor") + parser.add_config_file_option(option_name="color", dest="color", + type='tristate') + parser.add_config_file_option(option_name="color-scheme", + dest="color_scheme") + parser.add_config_file_option(option_name="tmp-dir", dest="tmp_dir") + parser.add_config_file_option(option_name="upstream-tag", + dest="upstream_tag") + parser.add_config_file_option(option_name="bb-file", dest="bb_file") + parser.add_config_file_option(option_name="meta-dir", + dest="meta_dir") + parser.add_config_file_option(option_name="packaging-branch", + dest="packaging_branch", + help="Branch the packaging is being maintained on. Only relevant " + "if a invariable/single pq-branch is defined, in which case " + "this is used as the 'base' branch. Default is " + "'%(packaging-branch)s'") + parser.add_config_file_option(option_name="pq-branch", dest="pq_branch") + parser.add_config_file_option(option_name="import-files", + dest="import_files", type="string", action="callback", + callback=opt_split_cb) + parser.add_option("--export-rev", action="store", dest="export_rev", + default="", + help="Export patches from treeish object TREEISH instead of head " + "of patch-queue branch", metavar="TREEISH") + parser.add_config_file_option("patch-export-compress", + dest="patch_export_compress") + parser.add_config_file_option("patch-export-squash-until", + dest="patch_export_squash_until") + parser.add_config_file_option("patch-export-ignore-path", + dest="patch_export_ignore_path") + return parser + +def parse_args(argv): + """Parse command line arguments""" + parser = build_parser(argv[0]) + if not parser: + return None, None + + options, args = parser.parse_args(argv) + gbp.log.setup(options.color, options.verbose, options.color_scheme) + options.patch_export_compress = string_to_int(options.patch_export_compress) + + return options, args + + +def main(argv): + """Main function for the gbp pq-rpm command""" + retval = 0 + + if not bb: + return 1 + + options, args = parse_args(argv) + if not options: + return 1 + + if len(args) < 2: + gbp.log.err("No action given.") + return 1 + else: + action = args[1] + + if args[1] in ["export", "import", "rebase", "drop", "switch"]: + pass + elif args[1] in ["apply"]: + if len(args) != 3: + gbp.log.err("No patch name given.") + return 1 + else: + patchfile = args[2] + else: + gbp.log.err("Unknown action '%s'." % args[1]) + return 1 + + try: + repo = RpmGitRepository(os.path.curdir) + except GitRepositoryError: + gbp.log.err("%s is not a git repository" % (os.path.abspath('.'))) + return 1 + + if os.path.abspath('.') != repo.path: + gbp.log.warn("Switching to topdir before running commands") + os.chdir(repo.path) + + try: + # Initialize BitBake + tinfoil = init_tinfoil(config_only=True, tracking=True) + bb_cfg_data = bb.data.createCopy(tinfoil.config_data) + + # Create base temporary directory for this run + options.tmp_dir = tempfile.mkdtemp(dir=options.tmp_dir, + prefix='gbp-pq-bb_') + if action == "export": + export_patches(bb_cfg_data, repo, options) + elif action == "import": + import_bb_patches(bb_cfg_data, repo, options) + elif action == "drop": + drop_pq_bb(bb_cfg_data, repo, options) + elif action == "rebase": + rebase_pq(bb_cfg_data, repo, options) + elif action == "apply": + apply_single_patch(bb_cfg_data, repo, patchfile, options) + elif action == "switch": + switch_pq(bb_cfg_data, repo, options) + except CommandExecFailed: + retval = 1 + except GitRepositoryError as err: + gbp.log.err("Git command failed: %s" % err) + retval = 1 + except GbpError, err: + if len(err.__str__()): + gbp.log.err(err) + retval = 1 + finally: + shutil.rmtree(options.tmp_dir, ignore_errors=True) + + return retval + +if __name__ == '__main__': + sys.exit(main(sys.argv)) + diff --git a/gbp/scripts/pq_rpm.py b/gbp/scripts/pq_rpm.py index 660807a2..99725462 100755 --- a/gbp/scripts/pq_rpm.py +++ b/gbp/scripts/pq_rpm.py @@ -24,26 +24,46 @@ import gzip import os import re +import shutil +import subprocess import sys import gbp.log from gbp.tmpfile import init_tmpdir, del_tmpdir, tempfile -from gbp.config import GbpOptionParserRpm +from gbp.config import GbpOptionParserRpm, optparse_split_cb from gbp.rpm.git import GitRepositoryError, RpmGitRepository -from gbp.git.modifier import GitModifier +from gbp.git.modifier import GitModifier, GitTz from gbp.command_wrappers import GitCommand, CommandExecFailed from gbp.errors import GbpError from gbp.patch_series import PatchSeries, Patch from gbp.pkg import parse_archive_filename from gbp.rpm import (SpecFile, NoSpecError, guess_spec, guess_spec_repo, - spec_from_repo) + spec_from_repo, string_to_int) from gbp.scripts.common.pq import (is_pq_branch, pq_branch_name, pq_branch_base, parse_gbp_commands, format_patch, format_diff, - switch_to_pq_branch, apply_single_patch, apply_and_commit_patch, - drop_pq, switch_pq) + apply_and_commit_patch, drop_pq) from gbp.scripts.common.buildpackage import dump_tree +USAGE_STRING = \ +"""%prog [options] action - maintain patches on a patch queue branch +tions: +export Export the patch queue / devel branch associated to the + current branch into a patch series in and update the spec file +import Create a patch queue / devel branch from spec file + and patches in current dir. +rebase Switch to patch queue / devel branch associated to the current + branch and rebase against upstream. +drop Drop (delete) the patch queue /devel branch associated to + the current branch. +apply Apply a patch +switch Switch to patch-queue branch and vice versa. +convert [experimental] Convert package from single-branch development + model (packaging and source code changes in the same branch) + into the orphan-packaging plus patch-queue / development branch + development model.""" + + def is_ancestor(repo, parent, child): """Check if commit is ancestor of another""" parent_sha1 = repo.rev_parse("%s^0" % parent) @@ -54,7 +74,25 @@ def is_ancestor(repo, parent, child): merge_base = None return merge_base == parent_sha1 -def generate_patches(repo, start, end, outdir, options): + +def compress_patches(patches, compress_size=0): + """ + Rename and/or compress patches + """ + ret_patches = [] + for patch in patches: + # Compress if patch file is larger than "threshold" value + suffix = '' + if compress_size and os.path.getsize(patch) > compress_size: + gbp.log.debug("Compressing %s" % os.path.basename(patch)) + subprocess.Popen(['gzip', '-n', patch]).communicate() + suffix = '.gz' + + ret_patches.append(os.path.basename(patch) + suffix) + return ret_patches + + +def generate_patches(repo, start, squash, end, outdir, options): """ Generate patch files from git """ @@ -66,12 +104,12 @@ def generate_patches(repo, start, end, outdir, options): raise GbpError('Invalid treeish object %s' % treeish) start_sha1 = repo.rev_parse("%s^0" % start) - try: - end_commit = end - except GitRepositoryError: - # In case of plain tree-ish objects, assume current branch head is the - # last commit + # In case of plain tree-ish objects, assume current branch head is the + # last commit + if repo.get_obj_type(end) == 'tree': end_commit = "HEAD" + else: + end_commit = end end_commit_sha1 = repo.rev_parse("%s^0" % end_commit) start_sha1 = repo.rev_parse("%s^0" % start) @@ -79,6 +117,41 @@ def generate_patches(repo, start, end, outdir, options): if not is_ancestor(repo, start_sha1, end_commit_sha1): raise GbpError("Start commit '%s' not an ancestor of end commit " "'%s'" % (start, end_commit)) + # Squash commits, if requested + if squash[0]: + if squash[0] == 'HEAD': + squash[0] = end_commit + squash_sha1 = repo.rev_parse("%s^0" % squash[0]) + if start_sha1 != squash_sha1: + if not squash_sha1 in repo.get_commits(start, end_commit): + raise GbpError("Given squash point '%s' not in the history " + "of end commit '%s'" % (squash[0], end_commit)) + # Shorten SHA1s + squash_sha1 = repo.rev_parse(squash_sha1, short=7) + start_sha1 = repo.rev_parse(start_sha1, short=7) + gbp.log.info("Squashing commits %s..%s into one monolithic diff" % + (start_sha1, squash_sha1)) + patch_fn = format_diff(outdir, squash[1], repo, + start_sha1, squash_sha1, + options.patch_ignore_path) + if patch_fn: + patches.append(patch_fn) + start = squash_sha1 + # Check for merge commits, yet another squash if merges found + merges = repo.get_commits(start, end_commit, options=['--merges']) + if merges: + # Shorten SHA1s + start_sha1 = repo.rev_parse(start, short=7) + merge_sha1 = repo.rev_parse(merges[0], short=7) + patch_fn = format_diff(outdir, None, repo, start_sha1, merge_sha1, + options.patch_ignore_path) + if patch_fn: + gbp.log.info("Merge commits found! Diff between %s..%s written " + "into one monolithic diff" % (start_sha1, merge_sha1)) + patches.append(patch_fn) + start = merge_sha1 + print start + # Check for merge commits, squash if merges found merges = repo.get_commits(start, end_commit, options=['--merges']) if merges: @@ -95,13 +168,21 @@ def generate_patches(repo, start, end, outdir, options): # Generate patches for commit in reversed(repo.get_commits(start, end_commit)): info = repo.get_commit_info(commit) - (cmds, info['body']) = parse_gbp_commands(info, - 'gbp-rpm', - ('ignore'), - ('if', 'ifarch')) + cmds = {} + _cmds, info['body'] = parse_gbp_commands(info, + 'gbp', + ('ignore'), + ('topic')) + cmds.update(_cmds) + _cmds, info['body'] = parse_gbp_commands(info, + 'gbp-rpm', + ('ignore'), + ('if', 'ifarch')) + cmds.update(_cmds) if not 'ignore' in cmds: patch_fn = format_patch(outdir, repo, info, patches, - options.patch_numbers) + options.patch_numbers, + options.patch_ignore_path) if patch_fn: commands[os.path.basename(patch_fn)] = cmds else: @@ -111,10 +192,13 @@ def generate_patches(repo, start, end, outdir, options): if end_commit != end: gbp.log.info("Generating diff file %s..%s" % (end_commit, end)) patch_fn = format_diff(outdir, None, repo, end_commit, end, - options.patch_export_ignore_path) + options.patch_ignore_path) if patch_fn: patches.append(patch_fn) + # Compress + patches = compress_patches(patches, options.patch_compress) + return patches, commands @@ -139,10 +223,16 @@ def update_patch_series(repo, spec, start, end, options): """ Export patches to packaging directory and update spec file accordingly. """ + squash = options.patch_squash.split(':', 1) + if len(squash) == 1: + squash.append(None) + else: + squash[1] += '.diff' + # Unlink old patch files and generate new patches rm_patch_files(spec) - patches, commands = generate_patches(repo, start, end, + patches, commands = generate_patches(repo, start, squash, end, spec.specdir, options) spec.update_patches(patches, commands) spec.write_spec_file() @@ -178,35 +268,42 @@ def parse_spec(options, repo, treeish=None): return spec -def find_upstream_commit(repo, spec, upstream_tag): +def find_upstream_commit(repo, upstreamversion, upstream_tag): """Find commit corresponding upstream version""" - tag_str_fields = {'upstreamversion': spec.upstreamversion, - 'version': spec.upstreamversion} + tag_str_fields = {'upstreamversion': upstreamversion, + 'version': upstreamversion} upstream_commit = repo.find_version(upstream_tag, tag_str_fields) if not upstream_commit: - raise GbpError("Couldn't find upstream version %s" % - spec.upstreamversion) + raise GbpError("Couldn't find upstream version %s" % upstreamversion) return upstream_commit def export_patches(repo, options): """Export patches from the pq branch into a packaging branch""" current = repo.get_branch() - if is_pq_branch(current): - base = pq_branch_base(current) + if is_pq_branch(current, options): + base = pq_branch_base(current, options) gbp.log.info("On branch '%s', switching to '%s'" % (current, base)) repo.set_branch(base) pq_branch = current else: - pq_branch = pq_branch_name(current) + base = current + pq_branch = pq_branch_name(current, options) spec = parse_spec(options, repo) - upstream_commit = find_upstream_commit(repo, spec, options.upstream_tag) - export_treeish = pq_branch + upstream_commit = find_upstream_commit(repo, spec.upstreamversion, + options.upstream_tag) + + export_treeish = options.export_rev if options.export_rev else pq_branch + if not repo.has_treeish(export_treeish): + raise GbpError('Invalid treeish object %s' % export_treeish) update_patch_series(repo, spec, upstream_commit, export_treeish, options) GitCommand('status')(['--', spec.specdir]) + if options.drop: + drop_pq(repo, base, options) + def safe_patches(queue): """ @@ -258,6 +355,28 @@ def get_packager(spec): return GitModifier() +def import_extra_files(repo, commitish, files, patch_ignore=True): + """Import branch-specific gbp.conf files to current branch""" + for path in files: + if path: + try: + repo.checkout_files(commitish, path) + except GitRepositoryError: + pass + repo_status = repo.status() + added = repo_status['A '] if 'A ' in repo_status else [] + if added: + gbp.log.info("Importing additional file(s) from branch '%s' into '%s'" % + (commitish, repo.get_branch())) + gbp.log.debug('Adding/commiting %s' % added) + commit_msg = ("Auto-import file(s) from branch '%s':\n %s\n" % + (commitish, ' '.join(added))) + if patch_ignore: + commit_msg += "\nGbp: Ignore" + repo.commit_files(added, msg=commit_msg) + return added + + def import_spec_patches(repo, options): """ apply a series of patches in a spec/packaging dir to branch @@ -268,8 +387,8 @@ def import_spec_patches(repo, options): """ current = repo.get_branch() # Get spec and related information - if is_pq_branch(current): - base = pq_branch_base(current) + if is_pq_branch(current, options): + base = pq_branch_base(current, options) if options.force: spec = parse_spec(options, repo, base) spec_treeish = base @@ -280,9 +399,10 @@ def import_spec_patches(repo, options): spec = parse_spec(options, repo) spec_treeish = None base = current - upstream_commit = find_upstream_commit(repo, spec, options.upstream_tag) + upstream_commit = find_upstream_commit(repo, spec.upstreamversion, + options.upstream_tag) packager = get_packager(spec) - pq_branch = pq_branch_name(base) + pq_branch = pq_branch_name(base, options, spec.version) # Create pq-branch if repo.has_branch(pq_branch) and not options.force: @@ -310,6 +430,7 @@ def import_spec_patches(repo, options): try: gbp.log.info("Switching to branch '%s'" % pq_branch) repo.set_branch(pq_branch) + import_extra_files(repo, base, options.import_files) if not queue: return @@ -330,18 +451,178 @@ def import_spec_patches(repo, options): def rebase_pq(repo, options): """Rebase pq branch on the correct upstream version (from spec file).""" current = repo.get_branch() - if is_pq_branch(current): - base = pq_branch_base(current) + if is_pq_branch(current, options): + base = pq_branch_base(current, options) spec = parse_spec(options, repo, base) else: base = current spec = parse_spec(options, repo) - upstream_commit = find_upstream_commit(repo, spec, options.upstream_tag) + upstream_commit = find_upstream_commit(repo, spec.upstreamversion, + options.upstream_tag) - switch_to_pq_branch(repo, base) + switch_to_pq_branch(repo, base, options) GitCommand("rebase")([upstream_commit]) +def switch_pq(repo, options): + """Switch to patch-queue branch if on base branch and vice versa""" + current = repo.get_branch() + if is_pq_branch(current, options): + base = pq_branch_base(current, options) + gbp.log.info("Switching to branch '%s'" % base) + repo.checkout(base) + else: + switch_to_pq_branch(repo, current, options) + + +def drop_pq_rpm(repo, options): + """Remove pq branch""" + current = repo.get_branch() + if is_pq_branch(current, options): + base = pq_branch_base(current, options) + spec = parse_spec(options, repo, base) + else: + spec = parse_spec(options, repo) + drop_pq(repo, current, options, spec.version) + + +def switch_to_pq_branch(repo, branch, options): + """ + Switch to patch-queue branch if not already there, create it if it + doesn't exist yet + """ + if is_pq_branch(branch, options): + return + + spec = parse_spec(options, repo, branch) + pq_branch = pq_branch_name(branch, options, spec.version) + if not repo.has_branch(pq_branch): + raise GbpError("Branch '%s' does not exist" % pq_branch) + + gbp.log.info("Switching to branch '%s'" % pq_branch) + repo.set_branch(pq_branch) + + +def apply_single_patch(repo, patchfile, options): + """Apply a single patch onto the pq branch""" + current = repo.get_branch() + if not is_pq_branch(current, options): + switch_to_pq_branch(repo, current, options) + patch = Patch(patchfile) + apply_and_commit_patch(repo, patch, fallback_author=None) + + +def convert_package(repo, options): + """Convert package to orphan-packaging model""" + old_packaging = repo.get_branch() + # Check if we're on pq branch, already + err_msg_base = "Seems you're already using orphan-packaging model - " + if is_pq_branch(old_packaging, options): + raise GbpError(err_msg_base + "you're on patch-queue branch") + # Check if a pq branch already exists + spec = parse_spec(options, repo, treeish=old_packaging) + pq_branch = pq_branch_name(old_packaging, options, spec.version) + if repo.has_branch(pq_branch): + pq_branch = pq_branch_name(old_packaging, options, spec.version) + raise GbpError(err_msg_base + "pq branch %s already exists" % pq_branch) + # Check that the current branch is based on upstream + upstream_commit = find_upstream_commit(repo, spec.upstreamversion, + options.upstream_tag) + if not is_ancestor(repo, upstream_commit, old_packaging): + raise GbpError(err_msg_base + "%s is not based on upstream version %s" % + (old_packaging, spec.upstreamversion)) + # Check new branch + new_branch = old_packaging + "-orphan" + if repo.has_branch(new_branch): + if not options.force: + raise GbpError("Branch '%s' already exists!" % new_branch) + else: + gbp.log.info("Dropping branch '%s'" % new_branch) + repo.delete_branch(new_branch) + + # Determine "history" + if options.retain_history: + # Find first commit that has the spec file and list commits from there + try: + repo.show('%s:%s' % (upstream_commit, spec.specpath)) + history = repo.get_commits(upstream_commit, old_packaging) + except GitRepositoryError: + history_start = repo.get_commits(upstream_commit, old_packaging, + spec.specpath)[-1] + history = repo.get_commits('%s^' % history_start, old_packaging) + else: + history = [repo.rev_parse(old_packaging)] + history.reverse() + + # Do import + gbp.log.info("Importing packaging files from branch '%s' to '%s'" % + (old_packaging, new_branch)) + convert_with_history(repo, upstream_commit, history, new_branch, + spec.specfile, options) + # Copy extra files + import_extra_files(repo, old_packaging, options.import_files, + patch_ignore=False) + + gbp.log.info("Package successfully converted to orphan-packaging.") + gbp.log.info("You're now on the new '%s' packaging branch (the old " + "packaging branch '%s' was left intact)." % + (new_branch, old_packaging)) + gbp.log.info("Please check all files and test building the package!") + + +def convert_with_history(repo, upstream, commits, new_branch, spec_fn, options): + """Auto-import packaging files and (auto-generated) patches""" + + # Dump and commit packaging files + packaging_tree = '%s:%s' % (commits[0], options.packaging_dir) + packaging_tmp = tempfile.mkdtemp(prefix='pack_') + dump_packaging_dir = os.path.join(packaging_tmp, options.new_packaging_dir) + dump_tree(repo, dump_packaging_dir, packaging_tree, with_submodules=False, + recursive=False) + + msg = "Auto-import packaging files\n\n" \ + "Imported initial packaging files from commit '%s'" % (commits[0]) + new_tree = repo.create_tree(packaging_tmp) + tip_commit = repo.commit_tree(new_tree, msg, []) + + # Generate initial patches + spec = SpecFile(os.path.join(dump_packaging_dir, spec_fn)) + update_patch_series(repo, spec, upstream, commits[0], options) + # Commit updated packaging files only if something was changed + new_tree = repo.create_tree(packaging_tmp) + if new_tree != repo.rev_parse(tip_commit + ':'): + msg = "Auto-generate patches\n\n" \ + "Generated patches from\n'%s..%s'\n\n" \ + "updating spec file and possibly removing old patches." \ + % (upstream, commits[0]) + tip_commit = repo.commit_tree(new_tree, msg, [tip_commit]) + + # Import rest of the commits + for commit in commits[1:]: + shutil.rmtree(dump_packaging_dir) + packaging_tree = '%s:%s' % (commit, options.packaging_dir) + dump_tree(repo, dump_packaging_dir, packaging_tree, + with_submodules=False, recursive=False) + try: + spec = SpecFile(os.path.join(dump_packaging_dir, spec_fn)) + update_patch_series(repo, spec, upstream, commit, options) + except (NoSpecError, GbpError): + gbp.log.warn("Failed to generate patches from '%s'" % commit) + + new_tree = repo.create_tree(packaging_tmp) + if new_tree == repo.rev_parse(tip_commit + ':'): + gbp.log.info("Skipping commit '%s' which generated no change" % + commit) + else: + info = repo.get_commit_info(commit) + msg = "%s\n\n%sAuto-imported by gbp from '%s'" % (info['subject'], + info['body'], commit) + tip_commit = repo.commit_tree(new_tree, msg, [tip_commit]) + + repo.create_branch(new_branch, tip_commit) + repo.set_branch(new_branch) + + def build_parser(name): """Construct command line parser""" try: @@ -371,6 +652,7 @@ def build_parser(name): parser.add_option("--force", dest="force", action="store_true", default=False, help="In case of import even import if the branch already exists") + parser.add_boolean_config_file_option("drop", dest='drop') parser.add_config_file_option(option_name="color", dest="color", type='tristate') parser.add_config_file_option(option_name="color-scheme", @@ -381,6 +663,27 @@ def build_parser(name): parser.add_config_file_option(option_name="spec-file", dest="spec_file") parser.add_config_file_option(option_name="packaging-dir", dest="packaging_dir") + parser.add_config_file_option(option_name="pq-branch", dest="pq_branch") + parser.add_option("--export-rev", dest="export_rev", + metavar="TREEISH", + help="Export patches from treeish object TREEISH instead of head " + "of patch-queue branch") + parser.add_config_file_option(option_name="import-files", + dest="import_files", type="string", action="callback", + callback=optparse_split_cb) + parser.add_config_file_option("patch-compress", + dest="patch_compress") + parser.add_config_file_option("patch-squash", dest="patch_squash") + parser.add_config_file_option("patch-ignore-path", dest="patch_ignore_path") + parser.add_option("--new-packaging-dir", + help="Packaging directory in the new packaging branch. Only " + "relevant for the 'convert' action. If not defined, defaults " + "to '--packaging-dir'") + parser.add_option("--retain-history", action="store_true", + help="When doing convert, preserve as much of the git history as " + "possible, i.e. create one commit per commit. Only " + "relevant for the 'convert' action.") + return parser @@ -389,13 +692,20 @@ def parse_args(argv): parser = build_parser(argv[0]) if not parser: return None, None - return parser.parse_args(argv) + + options, args = parser.parse_args(argv) + options.patch_compress = string_to_int(options.patch_compress) + if options.new_packaging_dir is None: + options.new_packaging_dir = options.packaging_dir + return options, args def main(argv): """Main function for the gbp pq-rpm command""" retval = 0 + gbp.log.initialize() + (options, args) = parse_args(argv) if not options: return 1 @@ -426,23 +736,27 @@ def main(argv): gbp.log.err("%s is not a git repository" % (os.path.abspath('.'))) return 1 + if os.path.abspath('.') != repo.path: + gbp.log.warn("Switching to topdir before running commands") + os.chdir(repo.path) + try: # Create base temporary directory for this run init_tmpdir(options.tmp_dir, prefix='pq-rpm_') - current = repo.get_branch() if action == "export": export_patches(repo, options) elif action == "import": import_spec_patches(repo, options) elif action == "drop": - drop_pq(repo, current) + drop_pq_rpm(repo, options) elif action == "rebase": rebase_pq(repo, options) elif action == "apply": - patch = Patch(patchfile) - apply_single_patch(repo, current, patch, fallback_author=None) + apply_single_patch(repo, patchfile, options) elif action == "switch": - switch_pq(repo, current) + switch_pq(repo, options) + elif action == "convert": + convert_package(repo, options) except CommandExecFailed: retval = 1 except GitRepositoryError as err: diff --git a/gbp/scripts/pull.py b/gbp/scripts/pull.py index 67b2ab1c..c348f79b 100755 --- a/gbp/scripts/pull.py +++ b/gbp/scripts/pull.py @@ -27,18 +27,21 @@ from gbp.config import (GbpOptionParser, GbpOptionGroup) from gbp.errors import GbpError from gbp.git import GitRepositoryError -from gbp.deb.git import DebianGitRepository import gbp.log +try: + from gbp.deb.git import DebianGitRepository as GitRepository +except ImportError: + from gbp.rpm.git import RpmGitRepository as GitRepository -def fast_forward_branch(branch, repo, options): +def update_branch(branch, repo, options): """ update branch to its remote branch, fail on non fast forward updates unless --force is given @return: branch updated or already up to date @rtype: boolean """ - update = False + update = None remote = repo.get_merge_branch(branch) if not remote: @@ -52,23 +55,47 @@ def fast_forward_branch(branch, repo, options): return True if can_fast_forward: - update = True + update = 'merge' else: - if options.force: - gbp.log.info("Non-fast forwarding '%s' due to --force" % branch) - update = True + if options.force == 'merge': + gbp.log.info("Non-fast forwarding '%s' due to --force=merge" % branch) + update = 'merge' + elif options.force == 'clean': + gbp.log.info("Checking out clean copy of '%s' due to --force=clean" % branch) + update = 'clean' else: - gbp.log.warn("Skipping non-fast forward of '%s' - use --force" % branch) + gbp.log.warn("Skipping non-fast forward of '%s' - use --force or " + "update manually" % branch) if update: gbp.log.info("Updating '%s'" % branch) if repo.branch == branch: - repo.merge(remote) + if update == 'merge': + repo.merge(remote) + elif update == 'clean': + # Have to drop our current branch + tmpbranch = "_gbptmp-"+branch + gbp.log.debug("Checking out '%s' to '%s'" % (remote, tmpbranch)) + repo.create_branch(tmpbranch, remote) + gbp.log.debug("Switching current branch to '%s'" % (tmpbranch)) + repo.set_branch(tmpbranch) + gbp.log.debug("Dropping branch '%s'" % branch) + repo.delete_branch(branch) + gbp.log.info("Renaming branch '%s' to '%s'" % (tmpbranch, branch)) + repo.rename_branch(tmpbranch, branch) else: - sha1 = repo.rev_parse(remote) - repo.update_ref("refs/heads/%s" % branch, sha1, - msg="gbp: forward %s to %s" % (branch, remote)) - return update + if can_fast_forward or (update == 'clean'): + sha1 = repo.rev_parse(remote) + repo.update_ref("refs/heads/%s" % branch, sha1, + msg="gbp: forward %s to %s" % (branch, remote)) + elif update == 'merge': + # Merge other branch, if it cannot be fast-forwarded + current_branch=repo.branch + repo.set_branch(branch) + repo.merge(remote) + repo.set_branch(current_branch) + + return (update != None) def build_parser(name): @@ -82,12 +109,20 @@ def build_parser(name): branch_group = GbpOptionGroup(parser, "branch options", "branch update and layout options") parser.add_option_group(branch_group) branch_group.add_boolean_config_file_option(option_name="ignore-branch", dest="ignore_branch") - branch_group.add_option("--force", action="store_true", dest="force", default=False, - help="force a branch update even if it can't be fast forwarded") + branch_group.add_option("--force", action="store", dest="force", + default=None, + help="force a branch update even if it can't be " + "fast forwarded (valid ACTIONs are 'merge', " + "'clean')", + metavar='ACTION') + branch_group.add_option("--all", action="store_true", default=False, + help="update all remote-tracking branches that " + "have identical name in the remote") branch_group.add_option("--redo-pq", action="store_true", dest="redo_pq", default=False, help="redo the patch queue branch after a pull. Warning: this drops the old patch-queue branch") branch_group.add_config_file_option(option_name="upstream-branch", dest="upstream_branch") - branch_group.add_config_file_option(option_name="debian-branch", dest="debian_branch") + branch_group.add_config_file_option(option_name="debian-branch", dest="packaging_branch") + branch_group.add_config_file_option(option_name="packaging-branch", dest="packaging_branch") branch_group.add_boolean_config_file_option(option_name="pristine-tar", dest="pristine_tar") branch_group.add_option("--depth", action="store", dest="depth", default=0, help="git history depth (for deepening shallow clones)") @@ -110,6 +145,8 @@ def main(argv): retval = 0 current = None + gbp.log.initialize() + (options, args) = parse_args(argv) if not options: return 1 @@ -117,13 +154,13 @@ def main(argv): gbp.log.setup(options.color, options.verbose, options.color_scheme) try: - repo = DebianGitRepository(os.path.curdir) + repo = GitRepository(os.path.curdir) except GitRepositoryError: gbp.log.err("%s is not a git repository" % (os.path.abspath('.'))) return 1 try: - branches = [] + branches = set() try: current = repo.get_branch() except GitRepositoryError: @@ -134,12 +171,25 @@ def main(argv): else: raise - for branch in [options.debian_branch, options.upstream_branch]: + for branch in [options.packaging_branch, options.upstream_branch]: if repo.has_branch(branch): - branches += [branch] + branches.add(branch) if repo.has_pristine_tar_branch() and options.pristine_tar: - branches += [repo.pristine_tar_branch] + branches.add(repo.pristine_tar_branch) + + if options.all: + current_remote = repo.get_merge_branch(current) + if current_remote: + fetch_remote = current_remote.split('/')[0] + else: + fetch_remote = 'origin' + for branch in repo.get_local_branches(): + merge_branch = repo.get_merge_branch(branch) + if merge_branch: + rem, rem_br = merge_branch.split('/', 1) + if rem == fetch_remote and branch == rem_br: + branches.add(branch) (ret, out) = repo.is_clean() if not ret: @@ -150,11 +200,11 @@ def main(argv): repo.fetch(depth=options.depth) repo.fetch(depth=options.depth, tags=True) for branch in branches: - if not fast_forward_branch(branch, repo, options): + if not update_branch(branch, repo, options): retval = 2 if options.redo_pq: - repo.set_branch(options.debian_branch) + repo.set_branch(options.packaging_branch) Command("gbp-pq")(["drop"]) Command("gbp-pq")(["import"]) diff --git a/gbp/scripts/rpm_ch.py b/gbp/scripts/rpm_ch.py new file mode 100755 index 00000000..dbeabe80 --- /dev/null +++ b/gbp/scripts/rpm_ch.py @@ -0,0 +1,556 @@ +# vim: set fileencoding=utf-8 : +# +# (C) 2007, 2008, 2009, 2010, 2013 Guido Guenther +# (C) 2014-2015 Intel Corporation +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, please see +# +# +"""Generate RPM changelog entries from git commit messages""" + +import ConfigParser +from datetime import datetime +import os.path +import pwd +import re +import sys +import socket + +import gbp.command_wrappers as gbpc +import gbp.log +from gbp.config import GbpOptionParserRpm, GbpOptionGroup +from gbp.errors import GbpError +from gbp.git.modifier import GitModifier +from gbp.rpm import (guess_spec, NoSpecError, SpecFile, split_version_str, + compose_version_str) +from gbp.rpm.changelog import Changelog, ChangelogParser, ChangelogError +from gbp.rpm.git import GitRepositoryError, RpmGitRepository +from gbp.rpm.policy import RpmPkgPolicy +from gbp.scripts.buildpackage_rpm import (packaging_tag_data, + create_packaging_tag) +from gbp.tmpfile import init_tmpdir, del_tmpdir + + +ChangelogEntryFormatter = RpmPkgPolicy.ChangelogEntryFormatter + + +class ChangelogFile(object): + """Container for changelog file, whether it be a standalone changelog + or a spec file""" + + def __init__(self, file_path): + parser = ChangelogParser(RpmPkgPolicy) + + if os.path.splitext(file_path)[1] == '.spec': + gbp.log.debug("Using spec file '%s' as changelog" % file_path) + self._file = SpecFile(file_path) + self.changelog = parser.raw_parse_string(self._file.get_changelog()) + else: + self._file = os.path.abspath(file_path) + if not os.path.exists(file_path): + gbp.log.info("Changelog '%s' not found, creating new " + "changelog file" % file_path) + self.changelog = Changelog(RpmPkgPolicy) + else: + gbp.log.debug("Using changelog file '%s'" % file_path) + self.changelog = parser.raw_parse_file(self._file) + + # Parse topmost section and try to determine the start commit + if self.changelog.sections: + self.changelog.sections[0] = parser.parse_section( + self.changelog.sections[0]) + + def write(self): + """Write changelog file to disk""" + if isinstance(self._file, SpecFile): + self._file.set_changelog(str(self.changelog)) + self._file.write_spec_file() + else: + with open(self._file, 'w') as fobj: + fobj.write(str(self.changelog)) + + @property + def path(self): + """File path""" + if isinstance(self._file, SpecFile): + return self._file.specpath + else: + return self._file + +def load_customizations(customization_file): + """Load user defined customizations file""" + # Load customization file + if not customization_file: + return + customizations = {} + try: + execfile(customization_file, customizations, customizations) + except Exception as err: + raise GbpError("Failed to load customization file: %s" % err) + + # Set customization classes / functions + global ChangelogEntryFormatter + if 'ChangelogEntryFormatter' in customizations: + ChangelogEntryFormatter = customizations.get('ChangelogEntryFormatter') + + +def determine_editor(options): + """Determine text editor""" + + # Check if we need to spawn an editor + states = ['always'] + if options.release: + states.append('release') + if options.spawn_editor not in states: + return None + + # Determine the correct editor + if options.editor_cmd: + return options.editor_cmd + elif 'EDITOR' in os.environ: + return os.environ['EDITOR'] + else: + return 'vi' + + +def check_repo_state(repo, options): + """Check that the repository is in good state""" + # Check branch + try: + branch = repo.get_branch() + except GitRepositoryError: + branch = None + if options.packaging_branch != branch and not options.ignore_branch: + gbp.log.err("You are not on branch '%s' but on '%s'" % + (options.packaging_branch, branch)) + raise GbpError("Use --ignore-branch to ignore or " + "--packaging-branch to set the branch name.") + # Check unstaged changes + if options.commit: + unstaged = [] + status = repo.status() + for group, files in status.iteritems(): + if group != '??' and group[1] != ' ': + unstaged.extend(files) + if unstaged: + gbp.log.error("Unstaged changes in:\n %s" % + '\n '.join(unstaged)) + raise GbpError("Please commit or stage your changes before using " + "the --commit or --tag option") + + +def parse_spec_file(repo, options): + """Find and parse spec file""" + if options.spec_file: + spec_path = os.path.join(repo.path, options.spec_file) + spec = SpecFile(spec_path) + else: + spec = guess_spec(os.path.join(repo.path, options.packaging_dir), + True, os.path.basename(repo.path) + '.spec') + options.packaging_dir = spec.specdir + return spec + + +def parse_changelog_file(repo, spec, options): + """Find and parse changelog file""" + changes_file_name = os.path.splitext(spec.specfile)[0] + '.changes' + changes_file_path = os.path.join(options.packaging_dir, changes_file_name) + + # Determine changelog file path + if options.changelog_file == "SPEC": + changelog_path = spec.specpath + elif options.changelog_file == "CHANGES": + changelog_path = changes_file_path + elif options.changelog_file == 'auto': + if os.path.exists(changes_file_path): + changelog_path = changes_file_path + else: + changelog_path = spec.specpath + else: + changelog_path = os.path.join(repo.path, options.changelog_file) + + return ChangelogFile(changelog_path) + + +def guess_commit(section, repo, options): + """Guess the last commit documented in a changelog header""" + + if not section: + return None + header = section.header + + # Try to parse the fields from the header revision + rev_re = '^%s$' % re.sub(r'%\((\S+?)\)s', r'(?P<\1>\S+)', + options.changelog_revision) + match = re.match(rev_re, header['revision'], re.I) + fields = match.groupdict() if match else {} + + # First, try to find tag-name, if present + if 'tagname' in fields: + gbp.log.debug("Trying to find tagname %s" % fields['tagname']) + try: + return repo.rev_parse("%s^0" % fields['tagname']) + except GitRepositoryError: + gbp.log.warn("Changelog points to tagname '%s' which is not found " + "in the git repository" % fields['tagname']) + + # Next, try to find packaging tag matching the version + tag_str_fields = {'vendor': options.vendor} + if 'version' in fields: + gbp.log.debug("Trying to find packaging tag for version '%s'" % + fields['version']) + full_version = fields['version'] + tag_str_fields.update(split_version_str(full_version)) + elif 'upstreamversion' in fields: + gbp.log.debug("Trying to find packaging tag for version '%s'" % + fields['upstreamversion']) + tag_str_fields['upstreamversion'] = fields['upstreamversion'] + if 'release' in fields: + tag_str_fields['release'] = fields['release'] + commit = repo.find_version(options.packaging_tag, + tag_str_fields) + if commit: + return commit + else: + gbp.log.info("Couldn't find packaging tag for version %s" % + header['revision']) + + # As a last resort we look at the timestamp + timestamp = header['time'].isoformat() + last = repo.get_commits(num=1, options="--until='%s'" % timestamp) + if last: + gbp.log.info("Using commit (%s) before the last changelog timestamp " + "(%s)" % (last, timestamp)) + return last[0] + return None + + +def get_start_commit(changelog, repo, options): + """Get the start commit from which to generate new entries""" + if options.all: + since = None + elif options.since: + since = options.since + else: + if changelog.sections: + since = guess_commit(changelog.sections[0], repo, options) + else: + since = None + if not since: + raise GbpError("Couldn't determine starting point from " + "changelog, please use the '--since' or '--all'") + gbp.log.info("Continuing from commit '%s'" % since) + return since + + +def get_author(repo, use_git_config): + """Get author and email from git configuration""" + author = email = None + + if use_git_config: + modifier = repo.get_author_info() + author = modifier.name + email = modifier.email + + passwd_data = pwd.getpwuid(os.getuid()) + if not author: + # On some distros (Ubuntu, at least) the gecos field has it's own + # internal structure of comma-separated fields + author = passwd_data.pw_gecos.split(',')[0].strip() + if not author: + author = passwd_data.pw_name + if not email: + if 'EMAIL' in os.environ: + email = os.environ['EMAIL'] + else: + email = "%s@%s" % (passwd_data.pw_name, socket.getfqdn()) + + return author, email + + +def entries_from_commits(changelog, repo, commits, options): + """Generate a list of formatted changelog entries from a list of commits""" + entries = [] + for commit in commits: + info = repo.get_commit_info(commit) + entry_text = ChangelogEntryFormatter.compose(info, full=options.full, + ignore_re=options.ignore_regex, id_len=options.idlen, + meta_bts=options.meta_bts) + if entry_text: + entries.append(changelog.create_entry(author=info['author'].name, + text=entry_text)) + return entries + + +def entries_from_text(changelog, text, author): + """Generate a list of changelog entries from a string""" + entries = [] + # Use current user as the author for all entries + for line in text.splitlines(): + if line.strip(): + entry_text = "- %s" % line.strip() + entries.append(changelog.create_entry(author=author, + text=entry_text)) + return entries + + +def generate_new_entries(changelog, repo, options, args): + """Generate new entries to be appended to changelog""" + if options.message: + author = get_author(repo, options.git_author)[0] + entries = entries_from_text(changelog, options.message, author) + else: + # Get range of commits from where to generate changes + since = get_start_commit(changelog, repo, options) + if args: + gbp.log.info("Only looking for changes in '%s'" % ", ".join(args)) + commits = repo.get_commits(since=since, until='HEAD', paths=args, + options=options.git_log.split(" ")) + commits.reverse() + if not commits: + gbp.log.info("No changes detected from %s to %s." % (since, 'HEAD')) + entries = entries_from_commits(changelog, repo, commits, options) + return entries + + +def update_changelog(changelog, entries, repo, spec, options): + """Update the changelog with a range of commits""" + # Get info for section header + now = datetime.now() + name, email = get_author(repo, options.git_author) + author = None + committer = None + rev_str_fields = dict(spec.version, + version=compose_version_str(spec.version), + vendor=options.vendor) + if options.tag: + # Get fake information for the to-be-created git commit + author = committer = GitModifier(date=now) + tag, msg = packaging_tag_data(repo, 'HEAD', spec.name, spec.version, + options) + else: + tag = repo.describe('HEAD', longfmt=True, always=True) + msg = None + rev_str_fields['tagname'] = tag + + try: + revision = options.changelog_revision % rev_str_fields + except KeyError as err: + raise GbpError("Unable to construct revision field: unknown key " + "%s, only %s are accepted" % (err, rev_str_fields.keys())) + + # Add a new changelog section if new release or an empty changelog + if options.release or not changelog.sections: + top_section = changelog.add_section(time=now, name=name, + email=email, revision=revision) + else: + # Re-use already parsed top section + top_section = changelog.sections[0] + top_section.set_header(time=now, name=name, + email=email, revision=revision) + + # Add new entries to the topmost section + for entry in entries: + top_section.append_entry(entry) + return (tag, msg, author, committer) + +def create_commit_message(spec, options): + """Generate commit message""" + fields = spec.version + fields['version'] = version=compose_version_str(spec.version) + fields['vendor'] = options.vendor + fields['pkg'] = spec.name + try: + return options.commit_msg % fields + except KeyError as err: + raise GbpError("Unknown key %s in commit-msg string, " + "only %s are accepted" % (err, fields.keys())) + +def commit_changelog(repo, changelog, message, author, committer, edit): + """Commit changelog to Git""" + repo.add_files(changelog.path) + repo.commit_staged(message, author_info=author, committer_info=committer, + edit=edit) + + +def build_parser(name): + """Construct command line parser""" + try: + parser = GbpOptionParserRpm(command=os.path.basename(name), + prefix='', usage='%prog [options] paths') + except ConfigParser.ParsingError as err: + gbp.log.error('invalid config file: %s' % err) + return None + + range_grp = GbpOptionGroup(parser, "commit range options", + "which commits to add to the changelog") + format_grp = GbpOptionGroup(parser, "changelog entry formatting", + "how to format the changelog entries") + naming_grp = GbpOptionGroup(parser, "naming", + "branch names, tag formats, directory and file naming") + commit_grp = GbpOptionGroup(parser, "commit", + "automatic committing and tagging") + parser.add_option_group(range_grp) + parser.add_option_group(format_grp) + parser.add_option_group(naming_grp) + parser.add_option_group(commit_grp) + + # Non-grouped options + parser.add_option("-v", "--verbose", action="store_true", dest="verbose", + help="verbose command execution") + parser.add_config_file_option(option_name="color", dest="color", + type='tristate') + parser.add_config_file_option(option_name="color-scheme", + dest="color_scheme") + parser.add_config_file_option(option_name="tmp-dir", dest="tmp_dir") + parser.add_config_file_option(option_name="vendor", action="store", + dest="vendor") + parser.add_config_file_option(option_name="git-log", dest="git_log", + help="options to pass to git-log, default is '%(git-log)s'") + parser.add_boolean_config_file_option(option_name="ignore-branch", + dest="ignore_branch") + parser.add_config_file_option(option_name="customizations", + dest="customization_file", + help="Load Python code from CUSTOMIZATION_FILE. At the " + "moment, the only useful thing the code can do is " + "define a custom ChangelogEntryFormatter class.") + # Naming group options + naming_grp.add_config_file_option(option_name="packaging-branch", + dest="packaging_branch") + naming_grp.add_config_file_option(option_name="packaging-tag", + dest="packaging_tag") + naming_grp.add_config_file_option(option_name="packaging-tag-msg", + dest="packaging_tag_msg") + naming_grp.add_config_file_option(option_name="packaging-dir", + dest="packaging_dir") + naming_grp.add_config_file_option(option_name="changelog-file", + dest="changelog_file") + naming_grp.add_config_file_option(option_name="spec-file", dest="spec_file") + # Range group options + range_grp.add_option("-s", "--since", dest="since", + help="commit to start from (e.g. HEAD^^^, release/0.1.2)") + range_grp.add_option("--all", action="store_true", + help="use all commits from the Git history, overrides " + "--since") + # Formatting group options + format_grp.add_config_file_option(option_name="meta-bts", dest="meta_bts") + format_grp.add_option("--no-release", action="store_false", default=True, + dest="release", + help="no release, just update the last changelog section") + format_grp.add_boolean_config_file_option(option_name="git-author", + dest="git_author") + format_grp.add_boolean_config_file_option(option_name="full", dest="full") + format_grp.add_config_file_option(option_name="id-length", dest="idlen", + help="include N digits of the commit id in the changelog " + "entry, default is '%(id-length)s'", + type="int", metavar="N") + format_grp.add_config_file_option(option_name="ignore-regex", + dest="ignore_regex", + help="Ignore lines in commit message matching regex, " + "default is '%(ignore-regex)s'") + format_grp.add_config_file_option(option_name="changelog-revision", + dest="changelog_revision") + format_grp.add_config_file_option(option_name="spawn-editor", + dest="spawn_editor") + format_grp.add_config_file_option(option_name="editor-cmd", + dest="editor_cmd") + format_grp.add_option("-m", '--message', + help="text to use as new changelog entries - git commit " + "messages and the --since are ignored in this case") + # Commit/tag group options + commit_grp.add_option("-c", "--commit", action="store_true", + help="commit changes") + commit_grp.add_config_file_option(option_name="commit-msg", + dest="commit_msg") + commit_grp.add_option("--tag", action="store_true", + help="commit the changes and create a packaging/release" + "tag") + commit_grp.add_option("--retag", action="store_true", + help="Overwrite packaging tag if it already exists") + commit_grp.add_boolean_config_file_option(option_name="sign-tags", + dest="sign_tags") + commit_grp.add_config_file_option(option_name="keyid", dest="keyid") + return parser + +def parse_args(argv): + """Parse command line and config file options""" + parser = build_parser(argv[0]) + if not parser: + return None, None + + options, args = parser.parse_args(argv[1:]) + + if options.tag: + options.commit = True + if not options.changelog_revision: + options.changelog_revision = RpmPkgPolicy.Changelog.header_rev_format + + gbp.log.setup(options.color, options.verbose, options.color_scheme) + + return options, args + +def main(argv): + """Script main function""" + options, args = parse_args(argv) + if not options: + return 1 + + try: + init_tmpdir(options.tmp_dir, prefix='rpm-ch_') + + load_customizations(options.customization_file) + editor_cmd = determine_editor(options) + + repo = RpmGitRepository('.') + check_repo_state(repo, options) + + # Find and parse spec file + spec = parse_spec_file(repo, options) + + # Find and parse changelog file + ch_file = parse_changelog_file(repo, spec, options) + + # Get new entries + entries = generate_new_entries(ch_file.changelog, repo, options, args) + + # Do the actual update + tag, tag_msg, author, committer = update_changelog(ch_file.changelog, + entries, repo, spec, + options) + # Write to file + ch_file.write() + + if editor_cmd and not options.message: + gbpc.Command(editor_cmd, [ch_file.path])() + + if options.commit: + edit = True if editor_cmd else False + msg = create_commit_message(spec, options) + commit_changelog(repo, ch_file, msg, author, committer, edit) + if options.tag: + if options.retag and repo.has_tag(tag): + repo.delete_tag(tag) + repo.create_tag(tag, tag_msg, 'HEAD', options.sign_tags, + options.keyid) + + except (GbpError, GitRepositoryError, ChangelogError, NoSpecError) as err: + if len(err.__str__()): + gbp.log.err(err) + return 1 + finally: + del_tmpdir() + + return 0 + +if __name__ == "__main__": + sys.exit(main(sys.argv)) diff --git a/gbp/scripts/submit_bb.py b/gbp/scripts/submit_bb.py new file mode 100755 index 00000000..eac0c475 --- /dev/null +++ b/gbp/scripts/submit_bb.py @@ -0,0 +1,138 @@ +# vim: set fileencoding=utf-8 : +# +# (C) 2014-2015 Intel Corporation +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, please see +# +# +"""Create and push submit tag""" + +import ConfigParser +import os +import sys +from datetime import datetime + +import gbp.log +from gbp.config import GbpOptionParserBB +from gbp.errors import GbpError +from gbp.format import format_str +from gbp.git import GitRepository, GitRepositoryError + +# pylint: disable=bad-continuation + + +def guess_remote(repo, options): + """Guess remote where to push""" + if options.remote: + return options.remote + + remotes = repo.get_remotes() + if not remotes: + raise GbpError("Local repo has no remotes configured. Please add one " + "or use --remote to define the remote where to push.") + elif len(remotes) == 1: + return remotes.keys()[0] + else: + raise GbpError("Local repo has multiple remotes (%s). Don't know which " + "one to choose. Use --remote to define where to push." % + ', '.join(remotes.keys())) + + +def build_parser(name): + """Build command line parser""" + usage_str = "%prog [options] - create and push submit tag" + try: + parser = GbpOptionParserBB(command=os.path.basename(name), prefix='', + usage=usage_str) + except ConfigParser.ParsingError as err: + gbp.log.err(err) + return None + + parser.add_option("-v", "--verbose", action="store_true", dest="verbose", + help="verbose command execution") + parser.add_config_file_option(option_name="color", dest="color", + type='tristate') + parser.add_config_file_option(option_name="color-scheme", + dest="color_scheme") + parser.add_option("-m", "--message", dest="message", help="tag message") + parser.add_option("-c", "--commit", dest="commit", help="commit to submit", + default='HEAD') + parser.add_option("-r", "--remote", dest="remote", + help="remote where to push") + parser.add_config_file_option(option_name="submit-tag", dest="submit_tag") + parser.add_config_file_option(option_name="target", dest="target") + parser.add_boolean_config_file_option(option_name="sign-tags", + dest="sign_tags") + parser.add_config_file_option(option_name="keyid", dest="keyid") + + return parser + + +def parse_args(argv): + """Parse command line arguments""" + parser = build_parser(argv[0]) + if not parser: + return None, None + options, args = parser.parse_args(argv) + + gbp.log.setup(options.color, options.verbose, options.color_scheme) + + return (options, args) + + +def main(argv): + """Entry point for gbp-submit-bb""" + retval = 0 + + options, _args = parse_args(argv) + if not options: + return 1 + + try: + repo = GitRepository(os.path.curdir) + except GitRepositoryError: + gbp.log.err("The command must be run under a Git repository") + return 1 + + try: + remote = guess_remote(repo, options) + + tag_fields = {'nowtime': datetime.now().strftime('%Y%m%d.%H%M%S'), + 'target': options.target} + tag_name = format_str(options.submit_tag, tag_fields) + + gbp.log.info("Tagging %s" % tag_name) + repo.create_tag(tag_name, msg=options.message, commit=options.commit, + sign=options.sign_tags, keyid=options.keyid, + annotate=True) + + gbp.log.info("Pushing to remote %s" % remote) + try: + repo.push_tag(remote, tag_name) + except GitRepositoryError as err: + gbp.log.err(err) + gbp.log.info("Removing tag %s" % tag_name) + repo.delete_tag(tag_name) + raise GbpError("Git push failed!") + + except (GbpError, GitRepositoryError) as err: + if len(err.__str__()): + gbp.log.err(err) + retval = 1 + + return retval + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) + diff --git a/packaging/PKGBUILD b/packaging/PKGBUILD new file mode 100644 index 00000000..dbe4b00b --- /dev/null +++ b/packaging/PKGBUILD @@ -0,0 +1,83 @@ +pkgbase='git-buildpackage' +pkgname=('git-buildpackage-rpm' 'git-buildpackage-common' 'git-buildpackage-doc') +pkgver=0.6.15 +pkgrel=0 +pkgdesc="Tools from Debian to integrate the package build system with Git" +arch=(any) +url="https://honk.sigxcpu.org/piki/projects/git-buildpackage/" +license=('GPLv2') +conflicts=('git-buildpackage') +provides=('git-buildpackage') +makedepends=('python2-distribute' 'gtk-doc' 'docbook-sgml' 'docbook-utils') +source=(git-buildpackage_${pkgver}.tar.gz) +sha256sums=(SKIP) + +prepare() +{ + cd "$srcdir/$pkgbase-${pkgver}" + for patch_file in $(find ../../ -maxdepth 1 -name '*.patch' |sort) + do + patch -p1 -i $patch_file + done +} + +build() { + cd $srcdir/$pkgbase-$pkgver + WITHOUT_NOSETESTS=1 \ + python2 setup.py build + +# Prepare apidocs + epydoc -n git-buildpackage --no-sourcecode -o docs/apidocs/ \ + gbp*.py git*.py gbp/ + +# HTML docs + HAVE_SGML2X=0 make -C docs/ +} + +package_git-buildpackage-common() { + depends=('man-db' + 'python2' + 'git') + optdepends=('pristine-tar: regenerate pristine tarballs') + cd $srcdir/$pkgbase-$pkgver + WITHOUT_NOSETESTS=1 \ + python2 setup.py install \ + --root="$pkgdir" \ + --prefix=/usr \ + -O1 + rm $pkgdir/usr/lib/python*/site-packages/*info -rf + rm $pkgdir/usr/bin/*rpm* + rm $pkgdir/usr/lib/python*/site-packages/gbp/rpm -rf + rm $pkgdir/usr/lib/python*/site-packages/gbp/scripts/*rpm*.py* -rf +} + +package_git-buildpackage-rpm() { + depends=("git-buildpackage-common=$pkgver-$pkgrel" "rpm") + provides=("tizen-gbp-rpm=20140605") + cd $srcdir/$pkgbase-$pkgver + WITHOUT_NOSETESTS=1 \ + python2 setup.py install \ + --root="$pkgdir" \ + --prefix=/usr \ + -O1 + rm $pkgdir/usr/lib/python*/site-packages/*info -rf + rm -rf $pkgdir/etc + find $pkgdir/usr/bin -mindepth 1 -maxdepth 1 ! -name '*rpm*' -delete + find $pkgdir/usr/lib/python*/site-packages/gbp -mindepth 1 -maxdepth 1 -type f -o -type d ! -name rpm -a ! -name scripts |xargs rm -rf + find $pkgdir/usr/lib/python*/site-packages/gbp/scripts -mindepth 1 -maxdepth 1 ! -name '*rpm*.py*' |xargs rm -rf +} + +package_git-buildpackage-doc() { + cd $srcdir/$pkgbase-$pkgver +# Install man pages + mandir=/usr/share/man + install -d ${pkgdir}/${mandir}/man1 ${pkgdir}/${mandir}/man5 + install docs/*.1 ${pkgdir}/${mandir}/man1 + install docs/*.5 ${pkgdir}/${mandir}/man5 + +# Install html documentation + mkdir -p ${pkgdir}/${docdir}/${pkgbase} + cp -r docs/manual-html ${pkgdir}/${docdir}/${pkgbase} + cp -r docs/apidocs ${pkgdir}/${docdir}/${pkgbase} +} +# vim:set ts=2 sw=2 et: diff --git a/packaging/git-buildpackage.changes b/packaging/git-buildpackage.changes new file mode 100644 index 00000000..f048a47e --- /dev/null +++ b/packaging/git-buildpackage.changes @@ -0,0 +1,332 @@ +* Tue Oct 27 2015 Markus Lehtonen debian/0.7.0-159-g5c91d2c +- Rebase on top of upstream version 0.7.0 + +* Tue Feb 17 2015 Markus Lehtonen tizen/0.6.22-20150217 +- log: don't crash getLogger if getting the root logger + +* Fri Feb 06 2015 Markus Lehtonen tizen/0.6.22-20150206 +- Rebase on top of upstream version 0.6.22 +- Experimental support for BitBake + * Introduce import-bb tool. + This is the first tool in an effort of enabling gbp in the BitBake build + environment. Gbp-import-bb is a tool for importing packages from a + BitBake-based "combined" distro repository into individual per-package + Git repositories. + * Introduce pq-bb tool. + This is a tool for managing patch-queues for packages maintained in the + BitBake packaging format (.bb recipes). + * Introduce buildpackage-bb tool. + Initial version of the tool for building BitBake packages from Git. + NOTE: The buildpackage-bb tool itself is able to operate even without an + initialized BitBake build environment although the build likely fails in + this case. However, this makes it possible to export the packaging meta + data, for example. + * Introcude clone-bb tool. + This is a new tool for helping to clone remote per-package Git + repositories when working in BitBake-based "full distro" build + environment. This is useful in the case that individual packages are + actually maintained in per-package Git repositories (like Tizen). That + is, the full distro repository that the developer operates in is + composed of the packaging meta data from the individual per-package + repositories. When willing to contribute to a package the developer + would use clone-bb to clone the correct per-package repository and make + his changes there. + NOTE: clone-bb uses GBP_PACKAGING_REPO variable to determine the remote + repository URI. This variable should be defined in the package recipes + in order to make clone-bb usable. + * Introduce submit-bb tool. + This is a Tizen-specific tool for creating and pushing special submit + tags. +- pq-rpm: better error message +- buildpackage-rpm: look for remote upstream branches. + Consider remote branches, too, while looking for the upstream branch + when trying to guess if a package is native or not. +- rpm packaging changes + * enable CentOS 7. + * include python egg-info in -common. +- rpm: suppress stderr when unpacking src.rpm +- UpstreamSource: suppress stderr from tar in _determine_prefix() +- patchseries: strip numbering when guessing subject from filename +- Changes in logging + * don't propagate to ancestor loggers + * don't initialize handlers in GbpLogger init. + Better compatibility with 3rd party modules that have their own logging + initializations. +- Features/fixes in GitRepository + * GitRepository/__git_inout: correctly handle input error + * GitRepository.create_tag: add 'annotate' argument. + For forcing the creation of annotated tags. Causes the an editor to be + spawned if no message is given. + +* Thu Aug 28 2014 Markus Lehtonen tizen/0.6.15-20140828 +- Rebase on top of upstream version 0.6.15 +- Changes in buildpackage-rpm + * new 'commitish' keyword for spec vcs tag + * notify: catch RuntimeError when importing pynotify + * fix exit code in case of GitRepositoryError + * always create tarball for native packages + * check submodules from the exported treeish + * fix crash when running prebuild hook + * always run cleaner hook + * add --git-no-build option + * don't crash on invalid vcs format strings + * add '--git-native' option + * remove some outdated/unnecessary exception handling + * get rid of prepare_export_dir() + * handle invalid keys in orig prefix + * internal refactoring and fixes +- Changes in rpm-ch + * implement --commit option + * implement --commit-msg option + * rpm-ch: implement --meta-bts option + * rpm.policy: don't allow leading whitespace in bts meta tags +- docs: manpages for the rpm tools +- Misc. fixes and features + * buildpackage: refactor tarball compression + * gbp-dch: fix handling of the '--meta' option + * import-orig-rpm: download tarball into parent directory + * GitRepository/has_submodules: add treeish argument + * tristate: implement __nonzero__() method +- Logging: + * don't automatically setup gbp-specific logger + * fix auto colorizing for custom streams +- Tests + * multiple fixes and additions in ComponentTestBase class + * testutils: add directories arg to ls_* functions + * add unit tests for buildpackage-rpm +- Packaging + * rpm-packaging: recommend/require unzip + * recommend/require zipmerge + * depend/recommend rpmbuild + * [ Junchun Guan ] Add PKGBUILD to support arch linux + * [ Li Jinjing ] rpm packaging: use macro for python binary + +* Wed May 21 2014 Markus Lehtonen tizen/0.6.12-20140521 +- Rebase on top of upstream version 0.6.12 +- Changes in rpm-ch + * add --message cmdline option + For giving the text for new changelog entry/entries, skipping git commit + messages entirely. + * implement '--all' option + If defined, git-rpm-ch uses all commits in the Git history. Also, + '--since' option is omitted. + * implement --color-scheme option + * use name (string) as the entry author + * internal refactoring + * fix crash in guessing the start commit +- Changes in pq-rpm + * implement --retain-history option + With this option defined gbp tries to preserve the history when + converting. That is, for each commit in the old branch create one + corresponding commit in the new orphan packaging branch. This works by + dumping packaging files and updating patches for each commit. However, + empty commits are not generated - these are caused e.g. by changes in + files that are ignored by patch-generation. + NOTE! Only valid for the 'convert' action. + * convert: don't try to commit empty set of patches + * make sure we always get a sane author + * slight unification of log messages + * decrease (default) verbosity a bit + * don't create pq branch when switching +- Changes in buildpackage-rpm + * buildpackage-rpm: fix desktop notifications + * buildpackage-rpm: remove --purge option + * guess/parse spec as early as possible +- Documentation + * Include man pages in RPM packages + * Add -doc subpackage to RPM packaging + * Build deb and rpm html docs in one docbook + * A multitude of small cosmetic fixes +- Other misc. internal changes + * GitRepository: implement create_tree() method + * rpm: suppress some verbosity when updating patches in spec file + * [ Li Jinjing ] Fix missing return value in get_current_branch method + +* Thu Mar 06 2014 Markus Lehtonen tizen/0.6.8-20140306 +- Rebase on top of upstream version 0.6.8 +- Introduce git-rpm-ch tool. + Initial version of the git-rpm-ch tool which is intended for maintaining + RPM changelogs. Supports both spec files and separate "OBS style" + changelog files. + Implement '--tag' command line option (and other related options for + signing) for creating and tagging a release. These correspond the + tagging options in git-buildpackage-rpm. + The git-buildpackage-rpm tool does not commit anything to git. However, + in rpm-ch the '--tag' option causes the changelog modifications (and, + all other staged changes) to be committed to git before creating the + tag. This makes it possible to create a release and document the + packaging/release tag name in the rpm changelog. +- Changes to gbp-pq-rpm: + * implement 'convert' subcommand. + The new subcommand is intended to be a one-time-callable command for + converting a package to use the "orphan-packaging" development model and + git layout (where packaging files are maintained in a separate branch + and code development in another branch). + * implement '--import-files' command line option. + For defining the packaging file(s) that will be imported into the + development/patch-queue branch. + By default, the local gbp conf files are imported (so that gbp sees the + same settings on the development/pq branc as on the packaging branch). + Files defined with this option will appear as one monolithic commit in + the development/patch-queue branch. + * implement --new-packaging-dir cmdline option. + Used for setting the packaging directory to be used in the new orphan + packaging branch. Defaults to --packaging-dir so that the gbp.conf files + are more likely to work without modification. + * fail gracefully on invalid config files + * support '%(upstreamversion)s' as a replacable string in the pq branch + name. +- Changes to git-buildpackage-rpm + * patch-export from development branch. + Adds git-buildpackage-rpm --patch-export support for the 'orphan + packaging' development model (where packaging files and development sources are kept in separate + branches). + New functionality: + 1. If patch-export is enabled and gbp detects that the current branch + has a development/patch-queue branch it exports the patches from there, + instead of the tip of the packaging branch. + 2. If gbp detects that the current (or exported) branch is a + development/patch-queue branch it automatically enables patch-export + and exports packaging files from the base branch (instead of the + development/patch-queue branch. + Also, add a new '--git-patch-export-rev' command line option with which + the user can explicitly set the treeish from which patches are generated + (i.e. HEAD..) + * fix spec file vcs update when doing git-tag-only + * change default export directory. + So that the git working directory is not dirtied, by default. +- Changes ti git-import-orig-rpm: + * disable --merge by default. + Merging upstream to packaging does not usually make sense with rpm + packages. Merely, it can create confusion cause unapplicable patches to + be generated. + * implement --create-missing-branches option. + * implement automatic downloading. + Automatically try to download the archive if the archive filename points + to a remote location. + * get archive from spec file. + Try to get archive path/filename from spec file if no file name is given + on the command line. This should make version bumps more + straightforward: just update version number in the spec file and run + 'git-import-orig-rpm'. +- git-import-orig: implement --create-missing-branches option. +- Fixes and new features in GitRepository class + * GitRepository/get_commit_info: correctly handle file renames/copies. + * GitRepository.create_branch: add 'force' option + * GitRepository.archive: add 'paths' option + * GitRepository/git_command: strip stderr output in case of error. + * GitRepository/add_remote_repo: fix the 'tags' argument. + * GitRepository: allow interactive commands. + * GitRepository: fix the 'edit' mode in commit methods. + * GitRepository.commit_files: add committer_info and edit arguments. + * GitRepository.commit_all: add committer_info argument. +- Fixes and new features in internal rpm/spec support + * rpm.SpecFile: add 'specpath' attribute + * rpm: correctly handle 'NoSource' and 'NoPatch' tags in spec. + * rpm: Implement spec_from_repo() helper function + * rpm: track all "section" directives in spec file. + * rpm: add gbp.rpm.changelog module. + This new module contains basic containers and functionality for parsing + and updating rpm changelogs. It is coupled with the rpm packaging policy + class which now has definitions for rpm changelog formatting. + * rpm: support for getting/setting changelog in spec. + SpecFile objects now support reading and writing of the %changelog + section. + * rpm: store full path of spec file sources +- Fix detection of prefix directory in tarballs + gbs import failed to import tarball properly if paths in the archive + were prefixed with "./". + +* Tue Jan 07 2014 Markus Lehtonen 0.6.8 +- Rebase on top of upstream version 0.6.8 + +* Mon Dec 02 2013 Markus Lehtonen 0.6.6 +- Rebase on top of upstream version 0.6.6 +- UpstreamSource: fix archive prefix guessing +- pq-rpm: create diff up to the last merge commit + Before, merge commits broke patch generation, or, (most probably) caused + the generated patches to be unapplicable (similar to how + git-format-patch behaves). + Now, changes up to the last merge commit are squashed into one diff if + merge commits are found in the revision list from which patches are to + be generated. Individual patches (one per commit) are generated from the + last merge commit up to the exported revision. +- pq-rpm: magic word HEAD for patch-squashing + When given as the squash-point, 'HEAD' translates to the end-commit. + This allows one to configure gbp to always squash all commits into one + monolithic diff. + +* Thu Oct 17 2013 Markus Lehtonen 0.6.3 +- buildpackage: support 'commit' variable for VCS tags +- Clone a repository to a specific directory +- Fix gbs clone failure if repo url is like host:repo.git +- SpecFile.set_tag: change the default place for new tags +- remove 'repo' param to adapt to changes of write_patch_file +- pq: pass commit_info to format_patch() +- pq: pass commit_info to format_patch() +- pq.write_patch_file: drop unused argument +- pq: fix generation of series file +- common/pq: fix length of patch filenames +- buildpackage: refactor prefix handling +- tests: remove unused import in submodule tests +- tests: upper case global variables in submodule tests +- tests: add some docstrings to git submodule tests +- tests: use eq_ and ok_ from nose tools in submodule tests +- tests: use sanitize some submodule tests +- pq-rpm: don't overwrite patches in the same series +- pq: don't overwrite patches in the same series +- rpm: support guessing spec file from git treeish +- rpm.SpecFile: support parsing spec as raw text data +- rpm: minor docstring fixes +- rpm: change guess_spec() to return SpecFile object +- GitRepository.list_tree: add 'paths' option +- common/pq: support more flexible pq-branch naming +- pq-rpm: drop unused 'tries' option +- GitRepository: deprecate the get_remote_repos method +- GitRepository.has_remote_repo: use get_remotes method +- git: new class and method for remote repositories +- debian packaging: change to 1.0 source package format +- Version bump, rebase on top of 0.6.3 + * Disable cover-min-percentage setting of nose + * docs: minor sgml syntax fix +- rpm tests: slight modificatin of import-srpm tests +- ComponentTestBase: use eq_() ok_() from nose.tools +- tests: add tests for git-import-orig-rpm +- ComponentTestBase: keep tmpdir if GBP_TESTS_NOCLEAN is in env +- ComponentTestBase: fix env restore in teardown +- config: restore mangled env in doctests +- import-orig-rpm: remove unneeded try-except +- import-orig-rpm: don't crash on invalid gbp.conf +- ComponentTestBase: more verbose output in case of branch mismatch +- rpm tests: test --filter option of git-import-srpm +- tests.testutils: add ls_zip() helper +- import-orig: rework sourcecode preparation and filtering +- import-orig: new function for filtering/re-packing sources +- buildpackage: fix handling of empty tarball prefix +- rpm: import orig: import readline in the common module +- import orig: import readline in the common module +- UpstreamSource.unpack: return UpstreamSource instance +- UpstreamSource.pack: reuse unpacked dir if no filter is used +- rpm: UpstreamSource: implement prefix guessing +- UpstreamSource: implement prefix guessing +- UpstreamSource: check that the sources exist +- UpstreamSource: store the absolute path +- rpm: UpstreamSource: move version guessing logic to PkgPolicy +- UpstreamSource: move version guessing logic to PkgPolicy +- UpstreamSource.guess_version: don't check for directories +- Fix `fatal: fetch --all does not take a repository argument` + +* Thu Aug 22 2013 Markus Lehtonen 0.6.0git20130530 +- rpm: Improve logging in case of rpmlib error +- Add clean method to GitRepository class +- Add all_remotes option in git fetch command +- Add tags option in git push command +- rpm: print a warning if importing gbp-specific librpm fails +- rpm: implement a wrapper module for rpm lib +- rpm: simplify SpecFile._parse_filtered_spec() +- Fix pylint warnings, follow Markus' sugesstion +- rpm: enhance spec parsing error messages +- GitRepository/add_files: consistently update all tracked files +- GitRepository: Add refspec option to fetch method +- GitRepository: Add all option to pull method +- GitRepository: Add force option to push method diff --git a/packaging/git-buildpackage.spec b/packaging/git-buildpackage.spec index 7d0c638b..65f8065b 100644 --- a/packaging/git-buildpackage.spec +++ b/packaging/git-buildpackage.spec @@ -57,14 +57,17 @@ BuildRequires: perl-podlators %if 0%{?do_unittests} BuildRequires: python-coverage +BuildRequires: python-mock BuildRequires: python-nose BuildRequires: git-core BuildRequires: %{man_pkg_name} BuildRequires: %{dpkg_pkg_name} +BuildRequires: devscripts BuildRequires: rpm-build BuildRequires: %{rpm_python_pkg_name} +BuildRequires: pristine-tar BuildRequires: unzip -BuildRequires: libzip +BuildRequires: /usr/bin/zipmerge BuildRequires: gnupg # Missing dep of dpkg in openSUSE %if 0%{?suse_version} @@ -88,10 +91,10 @@ Requires: python-setuptools Requires: python-dateutil %if 0%{?centos_ver} && 0%{?centos_ver} <= 7 Requires: unzip -Requires: libzip +Requires: /usr/bin/zipmerge %else Recommends: unzip -Recommends: libzip +Recommends: /usr/bin/zipmerge Recommends: pristine-tar %endif @@ -105,16 +108,32 @@ Group: Development/Tools/Building Requires: %{name}-common = %{version}-%{release} Requires: rpm Requires: %{rpm_python_pkg_name} -%if 0%{?suse_version} || 0%{?tizen_version:1} +%if 0%{?tizen_version:1} Recommends: rpm-build %else Requires: rpm-build %endif +Provides: tizen-gbp-rpm = 20151027 %description rpm Set of tools from Debian that integrate the package build system with Git. This package contains the tools for building RPM packages. + +%package bb +Summary: Build with BitBake from git +Group: Development/Tools/Building +Requires: %{name}-common = %{version}-%{release} +Requires: %{name}-rpm = %{version}-%{release} +%if 0%{?suse_version} || 0%{?tizen_version:1} +Recommends: bitbake +%endif + +%description bb +Set of tools from Debian that integrate the package build system with Git. +This package contains the tools for building with the BitBake tool. + + %if %{with docs} %package doc Summary: Documentation for the git-buildpackage suite @@ -246,9 +265,18 @@ done %{_mandir}/man1/gbp-buildpackage-rpm.1* %{_mandir}/man1/gbp-pq-rpm.1* %{_mandir}/man1/gbp-import-srpm.1* +%{_mandir}/man1/gbp-rpm-ch.1* +%{_mandir}/man1/gbp-import-orig-rpm.1* %endif +%files bb +%defattr(-,root,root,-) +%dir %{python_sitelib}/gbp/bb +%{python_sitelib}/gbp/scripts/*bb*.py* +%{python_sitelib}/gbp/bb/*py* + + %if %{with docs} %files doc %defattr(-,root,root,-) diff --git a/setup.py b/setup.py index bbc0ba8a..d41c2c07 100644 --- a/setup.py +++ b/setup.py @@ -66,7 +66,7 @@ def readme(): packages = find_packages(exclude=['tests', 'tests.*']), data_files = [("/etc/git-buildpackage/", ["gbp.conf"]),], requires = ["six"], - setup_requires=['nose>=0.11.1', 'coverage>=2.85', 'nosexcover>=1.0.7'] if \ + setup_requires=['nose>=0.11.1', 'coverage>=2.85'] if \ os.getenv('WITHOUT_NOSETESTS') is None else [], entry_points = { 'console_scripts': [ 'gbp = gbp.scripts.supercommand:supercommand' ], diff --git a/tests/01_test_help.py b/tests/01_test_help.py index b0bb9a13..828ee3b1 100644 --- a/tests/01_test_help.py +++ b/tests/01_test_help.py @@ -4,7 +4,11 @@ from . import context -import unittest +# Try unittest2 for CentOS +try: + import unittest2 as unittest +except ImportError: + import unittest class TestHelp(unittest.TestCase): """Test help output of gbp commands""" diff --git a/tests/02_test_upstream_source_tar_unpack.py b/tests/02_test_upstream_source_tar_unpack.py index 02b50439..05821448 100644 --- a/tests/02_test_upstream_source_tar_unpack.py +++ b/tests/02_test_upstream_source_tar_unpack.py @@ -6,7 +6,11 @@ import os import tarfile -import unittest +# Try unittest2 for CentOS +try: + import unittest2 as unittest +except ImportError: + import unittest import six diff --git a/tests/04_test_submodules.py b/tests/04_test_submodules.py index 34dc9913..3981e3d9 100644 --- a/tests/04_test_submodules.py +++ b/tests/04_test_submodules.py @@ -138,7 +138,7 @@ def test_create_zip_archives(): contents = ls_zip('with-submodules.zip') ok_('test/test_submodule/testfile' in contents) - git_archive_single('HEAD', 'without-submodules.zip', 'test', + git_archive_single(REPO, 'HEAD', 'without-submodules.zip', 'test', '', '', '', 'zip') contents = ls_zip('without-submodules.zip') ok_('test/test_submodule/testfile' not in contents) diff --git a/tests/05_test_detection.py b/tests/05_test_detection.py index 126a2992..c3f729ba 100644 --- a/tests/05_test_detection.py +++ b/tests/05_test_detection.py @@ -4,7 +4,11 @@ from . import context -import unittest +# Try unittest2 for CentOS +try: + import unittest2 as unittest +except ImportError: + import unittest from gbp.scripts import buildpackage from gbp.deb import (DebianPkgPolicy, orig_file) diff --git a/tests/06_test_upstream_source.py b/tests/06_test_upstream_source.py index 3a6f92e8..1a978796 100644 --- a/tests/06_test_upstream_source.py +++ b/tests/06_test_upstream_source.py @@ -7,7 +7,12 @@ import glob import os import tarfile -import unittest +# Try unittest2 for CentOS +try: + import unittest2 as unittest +except ImportError: + import unittest +import tempfile import zipfile from gbp.pkg import UpstreamSource @@ -22,9 +27,11 @@ def test_directory(self): """Upstream source is a directory""" source = UpstreamSource(self.upstream_dir) self.assertEqual(source.is_orig(), False) + self.assertEqual(source.is_tarball(), False) self.assertEqual(source.path, self.upstream_dir) self.assertEqual(source.unpacked, self.upstream_dir) self.assertEqual(source.guess_version(), ('test', '1.0')) + self.assertEqual(source.prefix, 'test-1.0') def tearDown(self): context.teardown() @@ -57,8 +64,12 @@ def test_pack_tar(self): target = self.tmpdir.join("gbp_0.1.tar.bz2") repacked = self.source.pack(target) self.assertEqual(repacked.is_orig(), True) + self.assertEqual(repacked.is_tarball(), True) self.assertEqual(repacked.is_dir(), False) self.assertEqual(repacked.guess_version(), ('gbp', '0.1')) + self.assertEqual(repacked.archive_fmt, 'tar') + self.assertEqual(repacked.compression, 'bzip2') + self.assertEqual(repacked.prefix, 'gbp') self._check_tar(repacked, ["gbp/errors.py", "gbp/__init__.py"]) def test_pack_filtered(self): @@ -66,10 +77,20 @@ def test_pack_filtered(self): target = self.tmpdir.join("gbp_0.1.tar.bz2") repacked = self.source.pack(target, ["__init__.py"]) self.assertEqual(repacked.is_orig(), True) + self.assertEqual(repacked.is_tarball(), True) self.assertEqual(repacked.is_dir(), False) self._check_tar(repacked, ["gbp/errors.py"], ["gbp/__init__.py"]) + def test_pack_mangle_prefix(self): + """Check if mangling prefix works""" + source = UpstreamSource(os.path.abspath("gbp/")) + target = self.tmpdir.join("gbp_0.1.tar.bz2") + repacked = source.pack(target, newprefix="foobar") + self._check_tar(repacked, ["foobar/errors.py", "foobar/__init__.py"]) + repacked2 = source.pack(target, newprefix="") + self._check_tar(repacked2, ["./errors.py", "./__init__.py"]) + class TestZip(unittest.TestCase): """Test if unpacking zip archives works""" @@ -78,7 +99,8 @@ def setUp(self): self.zipfile = self.tmpdir.join("gbp-0.1.zip") z = zipfile.ZipFile(self.zipfile, "w") for f in glob.glob(os.path.join(context.projectdir, "gbp/*.py")): - z.write(f, f, zipfile.ZIP_DEFLATED) + arcname = os.path.relpath(f, context.projectdir) + z.write(f, arcname, zipfile.ZIP_DEFLATED) z.close() def tearDown(self): @@ -87,9 +109,13 @@ def tearDown(self): def test_unpack(self): source = UpstreamSource(self.zipfile) self.assertEqual(source.is_orig(), False) + self.assertEqual(source.is_tarball(), False) self.assertEqual(source.is_dir(), False) self.assertEqual(source.unpacked, None) self.assertEqual(source.guess_version(), ('gbp', '0.1')) + self.assertEqual(source.archive_fmt, 'zip') + self.assertEqual(source.compression, None) + self.assertEqual(source.prefix, 'gbp') source.unpack(str(self.tmpdir)) self.assertNotEqual(source.unpacked, None) diff --git a/tests/08_test_patch.py b/tests/08_test_patch.py index df1d43d4..dd1532e2 100644 --- a/tests/08_test_patch.py +++ b/tests/08_test_patch.py @@ -5,7 +5,11 @@ from . import context import os -import unittest +# Try unittest2 for CentOS +try: + import unittest2 as unittest +except ImportError: + import unittest from gbp.patch_series import Patch diff --git a/tests/09_test_write_tree.py b/tests/09_test_write_tree.py index bad362c8..a7375253 100644 --- a/tests/09_test_write_tree.py +++ b/tests/09_test_write_tree.py @@ -63,7 +63,7 @@ def test_commit_tree(self): # commit the same tree again using the previous commit as parent self.repo.commit_tree(sha1, "second commit", parents=[commit]) # commit the same tree again using a non existant parent - self.assertRaises(gbp.errors.GbpError, + self.assertRaises(gbp.git.GitRepositoryError, self.repo.commit_tree, sha1, "failed commit", diff --git a/tests/11_test_dch_main.py b/tests/11_test_dch_main.py index 7e2a197d..078df2fb 100644 --- a/tests/11_test_dch_main.py +++ b/tests/11_test_dch_main.py @@ -8,7 +8,11 @@ from gbp.scripts import dch -import unittest +# Try unittest2 for CentOS +try: + import unittest2 as unittest +except ImportError: + import unittest import os import re diff --git a/tests/12_test_deb.py b/tests/12_test_deb.py index d46f987c..2be30976 100644 --- a/tests/12_test_deb.py +++ b/tests/12_test_deb.py @@ -4,7 +4,12 @@ from . import context -import os, tempfile, unittest +import os, tempfile +# Try unittest2 for CentOS +try: + import unittest2 as unittest +except ImportError: + import unittest import gbp.deb diff --git a/tests/13_test_gbp_pq.py b/tests/13_test_gbp_pq.py index 8e8d2c3a..6a478818 100644 --- a/tests/13_test_gbp_pq.py +++ b/tests/13_test_gbp_pq.py @@ -19,7 +19,11 @@ from . import testutils import os -import unittest +# Try unittest2 for CentOS +try: + import unittest2 as unittest +except ImportError: + import unittest from gbp.scripts.pq import generate_patches, export_patches import gbp.scripts.common.pq as pq @@ -99,7 +103,8 @@ def test_apply_single_patch(self): patch = gbp.patch_series.Patch(_patch_path('foo.patch')) - pq.apply_single_patch(self.repo, 'master', patch, None) + dummy_opts = object() + pq.apply_single_patch(self.repo, 'master', patch, None, dummy_opts) self.assertIn('foo', self.repo.list_files()) @@ -220,7 +225,7 @@ def test_drop(self): repo = self.repo start = repo.get_branch() pq_branch = os.path.join('patch-queue', start) - pq.switch_pq(repo, start) + pq.switch_pq(repo, start, TestExport.Options) self.assertEqual(repo.get_branch(), pq_branch) export_patches(repo, pq_branch, TestExport.Options) self.assertEqual(repo.get_branch(), start) diff --git a/tests/16_test_supercommand.py b/tests/16_test_supercommand.py index 25529777..71a1fb29 100644 --- a/tests/16_test_supercommand.py +++ b/tests/16_test_supercommand.py @@ -16,7 +16,11 @@ """Test L{gbp} command wrapper""" import sys -import unittest +# Try unittest2 for CentOS +try: + import unittest2 as unittest +except ImportError: + import unittest import gbp.scripts.supercommand class TestSuperCommand(unittest.TestCase): diff --git a/tests/18_test_Config.py b/tests/18_test_Config.py index 306999f3..401435fc 100644 --- a/tests/18_test_Config.py +++ b/tests/18_test_Config.py @@ -1,7 +1,12 @@ # vim: set fileencoding=utf-8 : import os -import unittest +# Try unittest2 for CentOS +try: + import unittest2 as unittest +except ImportError: + import unittest +import sys from gbp.config import GbpOptionParser, GbpOptionGroup from .testutils import GbpLogTester diff --git a/tests/19_test_gbp_scripts_config.py b/tests/19_test_gbp_scripts_config.py index ae845611..8cd62d9d 100644 --- a/tests/19_test_gbp_scripts_config.py +++ b/tests/19_test_gbp_scripts_config.py @@ -16,7 +16,12 @@ """Test the L{gbp} config command""" import os -import unittest +# Try unittest2 for CentOS +try: + import unittest2 as unittest +except ImportError: + import unittest +import sys import gbp.scripts.config diff --git a/tests/component/__init__.py b/tests/component/__init__.py index 02c4829c..82b4d438 100644 --- a/tests/component/__init__.py +++ b/tests/component/__init__.py @@ -97,6 +97,7 @@ def setUpClass(cls): # Prevent local config files from messing up the tests os.environ['GBP_CONF_FILES'] = '%(top_dir)s/.gbp.conf:' \ '%(top_dir)s/debian/gbp.conf:%(git_dir)s/gbp.conf' + super(ComponentTestBase, cls).init_class() @classmethod def tearDownClass(cls): diff --git a/tests/component/rpm/test_buildpackage_rpm.py b/tests/component/rpm/test_buildpackage_rpm.py index ef8312eb..a4d3b907 100644 --- a/tests/component/rpm/test_buildpackage_rpm.py +++ b/tests/component/rpm/test_buildpackage_rpm.py @@ -30,7 +30,7 @@ from gbp.git import GitRepository from gbp.scripts.buildpackage_rpm import main as gbp_rpm from tests.component.rpm import RpmRepoTestBase, RPM_TEST_DATA_DIR -from tests.testutils import ls_tar, ls_zip +from tests.testutils import ls_dir, ls_tar, ls_zip, capture # Disable "Method could be a function warning" # pylint: disable=R0201 @@ -46,8 +46,9 @@ def mock_gbp(args): """Wrapper for gbp-buildpackage-rpm""" - return gbp_rpm(['arg0', '--git-notify=off'] + args + - ['-ba', '--clean', '--target=noarch', '--nodeps']) + with capture.capture_stderr(): + return gbp_rpm(['arg0', '--git-notify=off'] + args + + ['-ba', '--clean', '--target=noarch', '--nodeps']) def mock_notify(summary, message, notify_opt): """Mock notification system""" @@ -165,11 +166,15 @@ def test_options_ignore(self): with open('untracked-file', 'w') as fobj: fobj.write('this file is not tracked\n') + eq_(mock_gbp([]), 1) + eq_(mock_gbp(['--git-ignore-untracked']), 0) + self.check_rpms('../rpmbuild/RPMS/*') + # Modify tracked file with open('README', 'a') as fobj: fobj.write('new stuff\n') - eq_(mock_gbp([]), 1) + eq_(mock_gbp(['--git-ignore-untracked']), 1) eq_(mock_gbp(['--git-ignore-new']), 0) @mock.patch('gbp.notifications.notify', mock_notify) @@ -194,13 +199,13 @@ def test_option_tmp_dir(self): """Test the --git-tmp-dir option""" self.init_test_repo('gbp-test-native') - eq_(mock_gbp(['--git-tmp-dir=../gbptmp', '--git-builder=true']), 0) + eq_(mock_gbp(['--git-tmp-dir=../gbptmp', '--git-no-build']), 0) ok_(os.path.isdir('../gbptmp')) # Check tmpdir access/creation error os.chmod('../gbptmp', 0) try: - eq_(mock_gbp(['--git-tmp-dir=../gbptmp/foo', '--git-builder=true']), 1) + eq_(mock_gbp(['--git-tmp-dir=../gbptmp/foo', '--git-no-build']), 1) finally: os.chmod('../gbptmp', stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) @@ -237,13 +242,17 @@ def test_tagging(self): ok_(repo.has_tag('rel-tag2')) # Valid tag format string keys - tag_keys = ['upstreamversion', 'release', 'version', 'vendor'] + tag_keys = ['upstreamversion', 'release', 'version', 'vendor', + 'nowtime', 'authortime', 'committime', + 'nowtimenum', 'authortimenum', 'committimenum'] # Should fail if the fag format has invalid keys (foo here) tag_fmt = '_'.join(['%(' + key + ')s' for key in tag_keys + ['foo']]) eq_(mock_gbp(['--git-tag', '--git-packaging-tag=%(foo)s']), 1) # Remove 'foo' and should succeed tag_fmt = '_'.join(['%(' + key + ')s' for key in tag_keys]) eq_(mock_gbp(['--git-tag-only', '--git-packaging-tag=%s' % tag_fmt]), 0) + # New tag with same format should succeed when '*num' keys are present + eq_(mock_gbp(['--git-tag-only', '--git-packaging-tag=%s' % tag_fmt]), 0) def test_option_upstream_tree(self): """Test the --git-upstream-tree option""" @@ -293,6 +302,20 @@ def test_option_upstream_tree(self): eq_(mock_gbp(['--git-upstream-tree=invalid-tree']), 2) self._check_log(-1, ".*Invalid treeish object") + def test_option_orig_prefix(self): + """Test the --git-orig-prefix option""" + repo = self.init_test_repo('gbp-test') + + # Building with invalid prefix should fail + eq_(mock_gbp(['--git-orig-prefix=foo']), 1) + ref_files = ['foo/' + path for path in repo.ls_tree('upstream')] + tar_files = ls_tar('../rpmbuild/SOURCES/gbp-test-1.1.tar.bz2', False) + self.check_files(tar_files, ref_files) + + # Test invalid keys + eq_(mock_gbp(['--git-orig-prefix=%(foo)s', '--git-no-build']), 1) + self._check_log(-1, ".*Missing value 'foo' in") + def test_pristine_tar(self): """Test pristine-tar""" repo = self.init_test_repo('gbp-test') @@ -362,8 +385,8 @@ def test_packaging_branch_options(self): # Test building when not on any branch repo.set_branch(repo.rev_parse('HEAD')) - eq_(mock_gbp(['--git-builder=true']), 1) - eq_(mock_gbp(['--git-ignore-branch', '--git-builder=true']), 0) + eq_(mock_gbp(['--git-no-build']), 1) + eq_(mock_gbp(['--git-ignore-branch', '--git-no-build']), 0) def test_option_submodules(self): """Test the --git-submodules option""" @@ -384,14 +407,14 @@ def test_option_submodules(self): # Test the "no" option eq_(mock_gbp(['--git-no-submodules', '--git-upstream-tree=%s' % - upstr_branch, '--git-ignore-new']), 0) + upstr_branch, '--git-ignore-untracked']), 0) tar_files = ls_tar('../rpmbuild/SOURCES/gbp-test-1.1.tar.bz2', False) self.check_files(upstr_files, tar_files) shutil.rmtree('../rpmbuild') # Test the "yes" option eq_(mock_gbp(['--git-submodules', '--git-upstream-tree=%s' % - upstr_branch, '--git-ignore-new']), 0) + upstr_branch, '--git-ignore-untracked']), 0) tar_files = ls_tar('../rpmbuild/SOURCES/gbp-test-1.1.tar.bz2', False) ref_files = upstr_files + ['gbp-test/gbp-test-native.repo/' + path for path in sub_files] @@ -402,7 +425,7 @@ def test_option_submodules(self): shutil.rmtree('gbp-test-native.repo') repo.create('gbp-test-native.repo') eq_(mock_gbp(['--git-submodules', '--git-upstream-tree=%s' % - upstr_branch, '--git-ignore-new']), 2) + upstr_branch, '--git-ignore-untracked']), 2) def test_option_submodules_native(self): """Test the --git-submodules option for native packages""" @@ -428,7 +451,7 @@ def test_option_submodules_native(self): # Test submodule failure shutil.rmtree('gbp-test-native2.repo') repo.create('gbp-test-native2.repo') - eq_(mock_gbp(['--git-submodules', '--git-ignore-new']), 1) + eq_(mock_gbp(['--git-submodules', '--git-ignore-untracked']), 1) def test_option_builder(self): """Test --git-builder option and it's args""" @@ -452,6 +475,7 @@ def test_option_builder(self): args = fobj.read() eq_(args, '--arg1 --arg2 gbp-test-native.spec') + def test_option_cleaner(self): """Test --git-cleaner option""" self.init_test_repo('gbp-test-native') @@ -485,6 +509,11 @@ def test_hook_options(self): eq_(mock_gbp(args + ['--git-tag-only', '--git-packaging-tag=tag1']), 0) self.check_and_rm_file('../hooks', 'cleanerposttag') + # Prebuild is not run when only exporting + eq_(mock_gbp(args + ['--git-no-build']), 0) + self.check_and_rm_file('../hooks', 'cleanerpostexport') + shutil.rmtree('../rpmbuild') + # Export and build scripts are run when not tagging eq_(mock_gbp(args), 0) self.check_and_rm_file('../hooks', 'cleanerpostexportprebuildpostbuild') @@ -518,12 +547,12 @@ def test_export_failure(self): s_rwx = stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC # Pre-create all files - eq_(mock_gbp(['--git-builder=true']), 0) + eq_(mock_gbp(['--git-no-build']), 0) # Error in exporting packaging files os.chmod('../rpmbuild/SOURCES', 0) try: - eq_(mock_gbp(['--git-builder=true']), 1) + eq_(mock_gbp(['--git-no-build']), 1) finally: os.chmod('../rpmbuild/SOURCES', s_rwx) self._check_log(-1, ".*Error exporting packaging files") @@ -531,7 +560,7 @@ def test_export_failure(self): # Error in creating archive os.chmod('../rpmbuild/SOURCES/gbp-test-native-1.0.zip', 0) try: - eq_(mock_gbp(['--git-builder=true']), 1) + eq_(mock_gbp(['--git-no-build']), 1) finally: os.chmod('../rpmbuild/SOURCES/gbp-test-native-1.0.zip', s_rwx) self._check_log(-1, ".*Error creating ../rpmbuild/SOURCES/.*.zip") @@ -557,7 +586,7 @@ def test_option_export(self): with open('ignored.tmp', 'w') as fobj: fobj.write('ignored') - base_args = ['--git-ignore-new', '--git-builder=true'] + base_args = ['--git-ignore-new', '--git-no-build'] # Test exporting of git index foo_txt_index = repo.show('HEAD:foo.txt') + 'staged' eq_(mock_gbp(base_args + ['--git-export=INDEX']), 0) @@ -566,9 +595,23 @@ def test_option_export(self): ok_(not os.path.exists('../rpmbuild/SOURCES/ignored.tmp')) shutil.rmtree('../rpmbuild') + # Test exporting of working copy (tracked files only) + eq_(mock_gbp(base_args + ['--git-export=WC.TRACKED']), 0) + foo_txt_wc = repo.show('HEAD:foo.txt') + 'staged' + 'unstaged' + self.check_and_rm_file('../rpmbuild/SOURCES/foo.txt', foo_txt_wc) + ok_(not os.path.exists('../rpmbuild/SOURCES/untracked')) + ok_(not os.path.exists('../rpmbuild/SOURCES/ignored.tmp')) + shutil.rmtree('../rpmbuild') + + # Test exporting of working copy (include untracked files) + eq_(mock_gbp(base_args + ['--git-export=WC.UNTRACKED']), 0) + self.check_and_rm_file('../rpmbuild/SOURCES/foo.txt', foo_txt_wc) + self.check_and_rm_file('../rpmbuild/SOURCES/untracked', 'untracked') + ok_(not os.path.exists('../rpmbuild/SOURCES/ignored.tmp')) + shutil.rmtree('../rpmbuild') + # Test exporting of working copy (include all files) eq_(mock_gbp(base_args + ['--git-export=WC']), 0) - foo_txt_wc = repo.show('HEAD:foo.txt') + 'staged' + 'unstaged' self.check_and_rm_file('../rpmbuild/SOURCES/foo.txt', foo_txt_wc) self.check_and_rm_file('../rpmbuild/SOURCES/untracked', 'untracked') self.check_and_rm_file('../rpmbuild/SOURCES/ignored.tmp', 'ignored') @@ -606,3 +649,81 @@ def test_option_packaging_dir(self): eq_(mock_gbp(['--git-packaging-dir=foo', '--git-spec-file=packaging/gbp-test-native.spec']), 0) + def test_option_spec_vcs_tag(self): + """Test the --git-spec-vcs-tag cmdline option""" + repo = self.init_test_repo('gbp-test-native') + + eq_(mock_gbp(['--git-spec-vcs-tag=foobar-%(commit)s']), 0) + sha1 = repo.rev_parse('HEAD') + num_tags = 0 + with open('../rpmbuild/SPECS/gbp-test-native.spec') as fobj: + for line in fobj.readlines(): + if line.startswith('VCS: '): + ok_(re.match(r'VCS:\s+foobar-%s\n$' % sha1, line)) + num_tags += 1 + eq_(num_tags, 1) + + # Test invalid key + eq_(mock_gbp(['--git-spec-vcs-tag=%(invalid-key)s']), 1) + self._check_log(-1, r".*Failed to format %\(invalid-key\)s") + + def test_patch_export_options(self): + """Test patch export options""" + repo = self.init_test_repo('gbp-test2') + + # Test no-patch-export + base_args = ['--git-no-build', '--git-export-specdir=', + '--git-export-sourcedir='] + eq_(mock_gbp(base_args + ['--git-no-patch-export']), 0) + ref_files = repo.ls_tree('HEAD:packaging') + ref_files.add('gbp-test2-2.0.tar.gz') + self.check_files(ref_files, ls_dir('../rpmbuild', False)) + shutil.rmtree('../rpmbuild') + + # No patches should be generated if patch-export-rev is upstream version + + # Test patch compression and numbering + eq_(mock_gbp(base_args + ['--git-no-patch-numbers', + '--git-patch-compress=1']), 0) + new_files = ls_dir('../rpmbuild', False) - ref_files + ok_(len(new_files) > 0) + for fname in new_files: + # Patches should start with an alphabet and be compressed with gz + ok_(re.match(r'^[a-zA-Z]\S*.patch.gz$', fname), fname) + + def test_devel_branch_support(self): + """Test patch-generation from q/development branch""" + repo = self.init_test_repo('gbp-test') + pq_br_fmt = 'pq/%(branch)s' + + # Patch export with no apparent pq branch should fail + eq_(mock_gbp(['--git-patch-export']), 2) + self._check_log(-1, r".*Start commit \S+ not an ancestor of end commit") + + # With valid pq branch patch export should succeeded + eq_(mock_gbp(['--git-patch-export', '--git-pq-branch=%s' % pq_br_fmt]), + 0) + self.check_rpms('../rpmbuild/RPMS/*') + shutil.rmtree('../rpmbuild') + eq_(mock_gbp(['--git-patch-export', '--git-pq-branch=%s' % pq_br_fmt, + '--git-export=master']), 0) + self.check_rpms('../rpmbuild/RPMS/*') + shutil.rmtree('../rpmbuild') + + # With pq branch but with wrong patch-export rev build should fail + eq_(mock_gbp(['--git-patch-export', '--git-pq-branch=%s' % pq_br_fmt, + '--git-patch-export-rev=HEAD']), 2) + self._check_log(-1, r".*Start commit \S+ not an ancestor of end commit") + + # Patch-export should be auto-enabled when on pq branch + pq_br = pq_br_fmt % {'branch': repo.get_branch()} + repo.set_branch(pq_br) + eq_(mock_gbp(['--git-pq-branch=%s' % pq_br_fmt, '--git-ignore-branch']), + 0) + self.check_rpms('../rpmbuild/RPMS/*') + shutil.rmtree('../rpmbuild') + + # Fail when (apparently) on pq branch but no packaging branch found + eq_(mock_gbp(['--git-pq-branch=%s' % pq_br, '--git-ignore-branch', + '--git-packaging-branch=foo']), 1) + diff --git a/tests/component/rpm/test_import_orig_rpm.py b/tests/component/rpm/test_import_orig_rpm.py new file mode 100644 index 00000000..29b0d4b1 --- /dev/null +++ b/tests/component/rpm/test_import_orig_rpm.py @@ -0,0 +1,621 @@ +# vim: set fileencoding=utf-8 : +# +# (C) 2012-2015 Intel Corporation +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, please see +# +"""Tests for the git-import-orig-rpm tool""" + +import os +import shutil +import sys +import subprocess +import tarfile +import tempfile +from nose.plugins.skip import SkipTest +from nose.tools import assert_raises, eq_ # pylint: disable=E0611 +from StringIO import StringIO + +from gbp.scripts.import_orig_rpm import main as import_orig_rpm + +from tests.testutils import ls_dir, ls_tar, ls_zip +from tests.component import ComponentTestBase, ComponentTestGitRepository +from tests.component.rpm import RPM_TEST_DATA_DIR + +# Disable "Method could be a function warning" +# pylint: disable=R0201 + +DATA_DIR = os.path.join(RPM_TEST_DATA_DIR, 'orig') + + +DUMMY_SPEC = """ +Name: dummy +Version: 1.0 +Release: 0 +License: GPL-2.0 +Summary: Dummy package +Source: %(source)s + +%%description +Dummy package generated by unit tests +""" + +def mock_import(args, stdin_data="\n\n", cwd=None): + """Wrapper for import-orig-rpm for feeding mock stdin data to it""" + old_cwd = os.path.abspath(os.path.curdir) + if cwd: + os.chdir(cwd) + + # Create stub file with mock data + mock_stdin = StringIO() + mock_stdin.write(stdin_data) + mock_stdin.seek(0) + + # Call import-orig-rpm with mock data + sys.stdin = mock_stdin + ret = import_orig_rpm(['arg0'] + args) + sys.stdin = sys.__stdin__ + mock_stdin.close() + + # Return to original working directory + if cwd: + os.chdir(old_cwd) + return ret + + +class ImportOrigTestBase(ComponentTestBase): + """Base class for all import-orig-rpm unit tests""" + + @classmethod + def setUpClass(cls): + """Class setup, common for all test cases""" + super(ImportOrigTestBase, cls).setUpClass() + + def __init__(self, *args, **kwargs): + super(ImportOrigTestBase, self).__init__(*args, **kwargs) + + def setUp(self): + """Test case setup""" + super(ImportOrigTestBase, self).setUp() + + @classmethod + def check_tree(cls, repo, treeish, filelist): + """Check the contents (list of files) in a git treeish""" + treeish_files = repo.ls_tree(treeish) + ImportOrigTestBase.check_files(treeish_files, filelist) + + +class TestImportOrig(ImportOrigTestBase): + """Basic tests for git-import-orig-rpm""" + + @staticmethod + def _init_repo_with_dummy_packaging(): + """Create a dummy packaging branch with one commit""" + repo = ComponentTestGitRepository.create('.') + shutil.copy2('.git/HEAD', 'foobar') + repo.add_files('.') + repo.commit_all('First commit') + return repo + + @staticmethod + def _create_dummy_spec(path, **kwargs): + """Create a dummy spec file""" + with open(path, 'w') as fobj: + print kwargs + fobj.write(DUMMY_SPEC % kwargs) + + def test_invalid_args(self): + """ + See that import-orig-rpm fails gracefully when called with invalid args + """ + repo = ComponentTestGitRepository.create('.') + origs = [os.path.join(DATA_DIR, 'gbp-test-1.0.tar.bz2'), + os.path.join(DATA_DIR, 'gbp-test-1.1.tar.bz2')] + # Test multiple archives + eq_(mock_import([] + origs), 1) + self._check_log(0, 'gbp:error: More than one archive specified') + self._clear_log() + + # Test invalid archive + false_orig = os.path.join(RPM_TEST_DATA_DIR, 'gbp-test-1.0-1.src.rpm') + eq_(mock_import([false_orig], 'foo\n1\n'), 1) + self._check_log(0, "gbp:error: Unsupported archive format") + self._clear_log() + + # Test non-existing archive + eq_(mock_import(['none.tar.bz2'], 'foo\n1\n'), 1) + self._check_log(0, "gbp:error: UpstreamSource: unable to find") + self._clear_log() + + # Check that nothing is in the repo + self._check_repo_state(repo, None, []) + + # Test invalid cmdline options + with assert_raises(SystemExit): + mock_import(['--invalid-arg=123']) + + def test_import_outside_repo(self): + """Test importing when not in a git repository""" + orig = os.path.join(DATA_DIR, 'gbp-test-1.0.tar.bz2') + # Import should fail + eq_(mock_import([orig]), 1) + self._check_log(0, 'gbp:error: %s is not a git repository' % + os.path.abspath(os.getcwd())) + + def test_invalid_config_file(self): + """Test invalid config file""" + # Create dummy invalid config file and try to import (should fail) + ComponentTestGitRepository.create('.') + with open('.gbp.conf', 'w') as conffd: + conffd.write('foobar\n') + eq_(mock_import(['foo']), 1) + self._check_log(0, 'gbp:error: File contains no section headers.') + + def test_import_tars(self): + """Test importing of tarballs, with and without merging""" + repo = ComponentTestGitRepository.create('.') + # Import first version, with --merge + orig = os.path.join(DATA_DIR, 'gbp-test-1.0.tar.bz2') + eq_(mock_import(['--merge', orig]), 0) + files = ['Makefile', 'README', 'dummy.sh'] + self._check_repo_state(repo, 'master', ['master', 'upstream'], files) + eq_(len(repo.get_commits(until='master')), 1) + eq_(len(repo.get_commits(until='upstream')), 1) + eq_(repo.get_tags(), ['upstream/1.0']) + + # Import second version, don't merge to master branch + orig = os.path.join(DATA_DIR, 'gbp-test-1.1.tar.bz2') + eq_(mock_import([orig]), 0) + self._check_repo_state(repo, 'master', ['master', 'upstream'], files) + eq_(len(repo.get_commits(until='master')), 1) + eq_(len(repo.get_commits(until='upstream')), 2) + eq_(repo.get_tags(), ['upstream/1.0', 'upstream/1.1']) + # Check that master is based on v1.0 + sha1 = repo.rev_parse("%s^0" % 'upstream/1.0') + eq_(repo.get_merge_base('master', 'upstream'), sha1) + + def test_import_zip(self): + """Test importing of zip archive""" + repo = ComponentTestGitRepository.create('.') + # Import zip with, no master branch should be present + orig = os.path.join(DATA_DIR, 'gbp-test-native-1.0.zip') + eq_(mock_import([orig]), 0) + self._check_repo_state(repo, None, ['upstream'], []) + eq_(repo.get_tags(), ['upstream/1.0']) + + def test_import_to_existing(self): + """Test importing of to an existing repo""" + # Create new repo and add dummy files + repo = self._init_repo_with_dummy_packaging() + sha1 = repo.rev_parse('HEAD^0') + + # Test missing upstream branch + orig = os.path.join(DATA_DIR, 'gbp-test2-2.0.tar.gz') + eq_(mock_import([orig]), 1) + self._check_log(1, 'Repository does not have branch') + + # Create orphan, empty, 'usptream' branch + tree = repo.write_tree('.git/_empty_index') + commit = repo.commit_tree(tree=tree, msg='Initial upstream', parents=[]) + repo.update_ref("refs/heads/upstream", commit) + + # Test importing to non-clean repo + files = ['foobar'] + self._check_repo_state(repo, 'master', ['master', 'upstream'], files) + shutil.copy2('.git/HEAD', 'foobaz') + self._clear_log() + eq_(mock_import([orig]), 1) + self._check_log(0, 'gbp:error: Repository has uncommitted changes') + os.unlink('foobaz') + + # Create new branch + repo.create_branch('mytopic') + repo.set_branch('mytopic') + + # Finally, import should succeed + eq_(mock_import([orig, '--merge']), 0) + files = ['Makefile', 'README', 'dummy.sh', 'foobar'] + self._check_repo_state(repo, 'master', + ['master', 'mytopic', 'upstream'], files) + eq_(repo.get_tags(), ['upstream/2.0']) + # Our topic branch shouldn't have changed, unlike master + eq_(repo.rev_parse('mytopic^0'), sha1) + eq_(len(repo.get_commits(until='mytopic')), 1) + # One commit from topic branch, two from upstream, one merge commit + eq_(len(repo.get_commits(until='master')), 4) + + def test_branch_update(self): + """Check that the working copy is kept in sync with branch HEAD""" + repo = ComponentTestGitRepository.create('.') + orig1 = os.path.join(DATA_DIR, 'gbp-test-1.0.tar.bz2') + orig2 = os.path.join(DATA_DIR, 'gbp-test-1.1.tar.bz2') + eq_(mock_import(['--merge', orig1]), 0) + repo.set_branch('upstream') + eq_(mock_import([orig2]), 0) + files = ['Makefile', 'README', 'dummy.sh'] + self._check_repo_state(repo, 'upstream', ['master', 'upstream'], files) + eq_(len(repo.get_commits(until='upstream')), 2) + + def test_import_dir(self): + """Test importing of unpacked sources""" + # Unpack sources and create repo + orig = os.path.join(DATA_DIR, 'gbp-test-1.0.tar.bz2') + subprocess.Popen(['tar', 'xf', orig]) + repo = ComponentTestGitRepository.create('myrepo') + os.chdir('myrepo') + + # Import dir first, fool the version to be 0.9 + eq_(mock_import(['../gbp-test'], 'gbp-test\n0.9\n'), 0) + files = ['Makefile', 'README', 'dummy.sh'] + self.check_tree(repo, 'upstream', files) + self._check_repo_state(repo, None, ['upstream'], []) + + # Import from unpacked and check that the contents is the same + eq_(mock_import([orig]), 0) + eq_(len(repo.diff('upstream/0.9', 'upstream/1.0')), 0) + + def test_basic_filtering(self): + """Basic test for import filter""" + repo = ComponentTestGitRepository.create('.') + orig = os.path.join(DATA_DIR, 'gbp-test-1.1.with_dotgit.tar.bz2') + # Try importing a tarball with git metadata + eq_(mock_import([orig], 'gbp-test\n1.0\n'), 1) + self._check_log(0, 'gbp:error: The orig tarball contains .git') + + # Try filtering out .git directory and shell scripts + eq_(mock_import(['--filter=.git', '--filter=*.sh', '--merge', orig], + 'gbp-test\n1.0\n'), 0) + self._check_repo_state(repo, 'master', ['master', 'upstream']) + eq_(len(repo.get_commits(until='master')), 1) + eq_(len(repo.get_commits(until='upstream')), 1) + eq_(repo.get_tags(), ['upstream/1.0']) + added_files = repo.get_commit_info('upstream')['files']['A'] + eq_(set(added_files), set(['Makefile', 'README'])) + + def test_noninteractive(self): + """Test non-interactive mode""" + repo = ComponentTestGitRepository.create('testrepo') + orig = os.path.join(DATA_DIR, 'gbp-test-native-1.0.zip') + orig_renamed = os.path.join(os.path.abspath('.'), 'foo.zip') + shutil.copy(orig, orig_renamed) + os.chdir('testrepo') + + # Guessing name and version should fail + eq_(mock_import(['--no-interactive', orig_renamed]), 1) + self._check_log(-1, "gbp:error: Couldn't determine upstream package") + + # Guessing from the original archive should succeed + eq_(mock_import(['--no-interactive', '--merge', orig], + stdin_data=''), 0) + files = ['.gbp.conf', 'Makefile', 'README', 'dummy.sh', + 'packaging/gbp-test-native.spec'] + self._check_repo_state(repo, 'master', ['master', 'upstream'], files) + eq_(len(repo.get_commits(until='master')), 1) + + # Import "new" version, this time package name should be taken from spec + eq_(mock_import(['--no-interactive', orig_renamed], stdin_data=''), 1) + self._check_log(-1, "gbp:error: Couldn't determine upstream version") + + def test_option_create_missing(self): + """Test importing of to an existing repo""" + # Create new repo and add dummy files + repo = self._init_repo_with_dummy_packaging() + + # Test missing upstream branch + orig = os.path.join(DATA_DIR, 'gbp-test2-2.0.tar.gz') + eq_(mock_import([orig]), 1) + self._check_log(1, 'Repository does not have branch') + + # Try again, with --create-missing-branches + eq_(mock_import(['--create-missing-branches', orig]), 0) + self._check_repo_state(repo, 'master', ['master', 'upstream']) + eq_(len(repo.get_commits(until='upstream')), 1) + + def test_misc_options(self): + """Test various options of git-import-orig-rpm""" + repo = ComponentTestGitRepository.create('.') + # Import one orig with default options to get 'upstream' branch + orig = os.path.join(DATA_DIR, 'gbp-test-1.0.tar.bz2') + eq_(mock_import(['-u0.8', orig]), 0) + + # Import the "native" zip to get packaging files + orig = os.path.join(DATA_DIR, 'gbp-test-native-1.0.zip') + base_args = ['--packaging-branch=pack', '--upstream-branch=orig', + '--upstream-tag=orig/%(upstreamversion)s', '--merge'] + # Fake version to be 0.9 + extra_args = ['-u0.9', '--upstream-vcs-tag=upstream/0.8', orig] + eq_(mock_import(base_args + extra_args), 0) + # Check repository state + files = [] + self._check_repo_state(repo, None, ['pack', 'orig', 'upstream'], files) + eq_(len(repo.get_commits(until='pack')), 2) + # Check tags + tags = repo.get_tags() + eq_(set(tags), set(['upstream/0.8', 'orig/0.9'])) + + # Change to packaging branch and create new commit + repo.set_branch('pack') + shutil.copy2('.git/HEAD', 'my_new_file') + repo.add_files('.') + repo.commit_all('My new commit') + # Import a new version, name should be taken from spec + orig = os.path.join(DATA_DIR, 'gbp-test-1.1.tar.bz2') + extra_args = ['--packaging-dir=packaging', '--no-interactive', '-u1.1', + orig] + eq_(mock_import(base_args + extra_args, ''), 0) + # Threeupstream versions, "my new" commit and one merge commit + eq_(len(repo.get_commits(until='pack')), 5) + tags = repo.get_tags() + eq_(set(tags), set(['upstream/0.8', 'orig/0.9', 'orig/1.1'])) + + def test_import_hooks(self): + """Basic test for postimport hook""" + repo = ComponentTestGitRepository.create('.') + orig = os.path.join(DATA_DIR, 'gbp-test-1.0.tar.bz2') + + script = ("echo -n branch: $GBP_BRANCH, version: %(upstreamversion)s" + " > hook.txt") + eq_(mock_import(['--postimport', script, '--merge', orig]), 0) + self._check_repo_state(repo, 'master', ['master', 'upstream']) + eq_(repo.get_tags(), ['upstream/1.0']) + with open('hook.txt', 'r') as hookout: + data = hookout.read() + eq_(data, 'branch: master, version: 1.0') + + def test_hook_error(self): + """Test postimport hook failure""" + repo = ComponentTestGitRepository.create('.') + orig = os.path.join(DATA_DIR, 'gbp-test-1.0.tar.bz2') + eq_(mock_import(['--postimport=_notexist.sh', '--merge', orig]), 1) + self._check_log(-2, "gbp:error: '_notexist.sh' failed:") + self._check_log(-1, 'gbp:error: Import of %s failed' % orig) + # Other parts of the import should've succeeded + self._check_repo_state(repo, 'master', ['master', 'upstream']) + + def test_archive_from_spec(self): + """Test taking archive file path from spec file""" + repo = ComponentTestGitRepository.create('.') + orig = os.path.join(DATA_DIR, 'gbp-test-1.0.tar.bz2') + + # Test non-existent spec file + eq_(mock_import([]), 1) + self._check_log(0, '.*No archive to import specified and no spec file') + + # Test non-existent archive + self._create_dummy_spec('dummy.spec', source='non-existent.tar.gz') + eq_(mock_import([]), 1) + self._check_log(-1, '.*unable to find \S+non-existent.tar.gz') + + # Test failing download + self._create_dummy_spec('dummy.spec', source='foo://bar.tar.gz') + eq_(mock_import([]), 1) + self._check_log(-1, '.*Download failed') + + # Test existing archive + self._create_dummy_spec('dummy.spec', source=orig) + eq_(mock_import([]), 0) + self._check_repo_state(repo, None, ['upstream'], ['dummy.spec']) + + +class TestPristineTar(ImportOrigTestBase): + """ + Test importing with pristine-tar + + Especially, tests different options for mangling the tarball. We basically + have these mostly independent options: + - filter + - filter-pristine-tar + - pristine-tarball-name + - orig-prefix + And, these options can be used in importing directories and tarballs and zip + files. + """ + + @classmethod + def setUpClass(cls): + """Class setup, common for all test cases""" + if not os.path.exists('/usr/bin/pristine-tar'): + raise SkipTest('Skipping %s:%s as pristine-tar tool is not ' + 'available' % (__name__, cls.__name__)) + super(TestPristineTar, cls).setUpClass() + + def __init__(self, *args, **kwargs): + super(TestPristineTar, self).__init__(*args, **kwargs) + self.repo = None + + def setUp(self): + """Test case setup""" + super(TestPristineTar, self).setUp() + self.repo = ComponentTestGitRepository.create('repo') + + def check_repo(self, current_branch, branches=None, files=None): + """Check the state of repo""" + if branches is None: + # Default branches + branches = ['upstream', 'pristine-tar'] + return self._check_repo_state(self.repo, current_branch, branches, + files) + + def check_tree(self, treeish, filelist): + """Check treeish content""" + return super(TestPristineTar, self).check_tree(self.repo, treeish, + filelist) + + @staticmethod + def unpack_tar(archive): + """Unpack tarball, return directory containing sources""" + tarobj = tarfile.open(archive, 'r') + os.mkdir('unpacked') + tarobj.extractall('unpacked') + tarobj.close() + dirlist = os.listdir('unpacked') + if len(dirlist) == 1: + return os.path.abspath(os.path.join('unpacked', dirlist[0])) + else: + return os.path.abspath('unpacked') + + def mock_import(self, args, stdin_data="\n\n"): + """Import helper for pristine-tar""" + return mock_import(args, stdin_data, self.repo.path) + + def ls_pristine_tar(self, archive): + """List contents of the tarball committed into pristine-tar""" + tmpdir = os.path.abspath(tempfile.mkdtemp(dir='.')) + tarball = os.path.join(tmpdir, archive) + try: + popen = subprocess.Popen(['pristine-tar', 'checkout', tarball], + cwd=self.repo.path) + popen.wait() + if popen.returncode: + raise Exception('Pristine-tar checkout failed!') + return ls_tar(tarball) + finally: + shutil.rmtree(tmpdir) + + def test_basic_import_pristine_tar(self): + """Test importing with pristine-tar""" + orig = os.path.join(DATA_DIR, 'gbp-test-1.0.tar.bz2') + eq_(self.mock_import(['--pristine-tar', '--merge', orig]), 0) + files = ['Makefile', 'README', 'dummy.sh'] + branches = ['master', 'upstream', 'pristine-tar'] + self.check_repo('master', branches, files) + subject = self.repo.get_commit_info('pristine-tar')['subject'] + eq_(subject, 'pristine-tar data for %s' % os.path.basename(orig)) + self.check_files(ls_tar(orig), + self.ls_pristine_tar('gbp-test-1.0.tar.bz2')) + + def test_rename(self): + """Renaming orig archive""" + orig = os.path.join(DATA_DIR, 'gbp-test-1.0.tar.bz2') + args = ['--pristine-tar', '--pristine-tarball-name=my.tgz', orig] + eq_(self.mock_import(args), 0) + self.check_repo(None, None, []) + self.check_files(ls_tar(orig), self.ls_pristine_tar('my.tgz')) + + def test_branch_update(self): + """Check that the working copy is kept in sync with branch HEAD""" + orig1 = os.path.join(DATA_DIR, 'gbp-test-1.0.tar.bz2') + orig2 = os.path.join(DATA_DIR, 'gbp-test-1.1.tar.bz2') + eq_(self.mock_import(['--pristine-tar', orig1]), 0) + self.repo.set_branch('pristine-tar') + eq_(self.mock_import(['--pristine-tar', orig2]), 0) + self.check_repo('pristine-tar', None) + eq_(len(self.repo.get_commits(until='pristine-tar')), 2) + + def test_zip(self): + """Importing zip file""" + orig = os.path.join(DATA_DIR, 'gbp-test-native-1.0.zip') + eq_(self.mock_import(['--pristine-tar', orig]), 0) + files = ['.gbp.conf', 'Makefile', 'README', 'dummy.sh', + 'packaging/gbp-test-native.spec'] + self.check_repo(None, None, []) + self.check_tree('upstream', files) + self.check_files(ls_zip(orig), + self.ls_pristine_tar('gbp-test-native-1.0.tar.gz')) + +#{ Test tarball mangling + def test_nopristinefilter(self): + """Test --no-pristine-tar-filter""" + orig = os.path.join(DATA_DIR, 'gbp-test-1.0.tar.bz2') + eq_(self.mock_import(['--pristine-tar', '--filter=README', orig]), 0) + self.check_repo(None, None, []) + self.check_tree('upstream', ['Makefile', 'dummy.sh']) + self.check_files(ls_tar(orig), + self.ls_pristine_tar('gbp-test-1.0.tar.bz2')) + + def test_nofilter_prefix(self): + """Test prefix mangling without any filtering""" + orig = os.path.join(DATA_DIR, 'gbp-test-1.0.tar.bz2') + eq_(self.mock_import(['--pristine-tar', '--orig-prefix=new', orig]), 0) + self.check_repo(None, None, None) + self.check_tree('upstream', ['Makefile', 'dummy.sh', 'README']) + prist_ref = set([fname.replace('gbp-test', 'new') for + fname in ls_tar(orig)]) + self.check_files(prist_ref, + self.ls_pristine_tar('gbp-test-1.0.tar.bz2')) + + def test_nopristinefilter_prefix(self): + """Test --no-pristine-tar-filter with prefix mangling""" + orig = os.path.join(DATA_DIR, 'gbp-test-1.0.tar.bz2') + args = ['--pristine-tar', '--filter=README', '--orig-prefix=new', orig] + eq_(self.mock_import(args), 0) + self.check_repo(None, None, None) + self.check_tree('upstream', ['Makefile', 'dummy.sh']) + prist_ref = set([fname.replace('gbp-test', 'new') for + fname in ls_tar(orig)]) + self.check_files(prist_ref, + self.ls_pristine_tar('gbp-test-1.0.tar.bz2')) + + def test_filter_prefix_rename(self): + """Test --no-pristine-tar-filter with prefix mangling""" + orig = os.path.join(DATA_DIR, 'gbp-test2-2.0.tar.gz') + args = ['--pristine-tar', '--filter=README', '--orig-prefix=new', + '--pristine-tarball-name=new.tbz2', '--filter-pristine-tar', + orig] + eq_(self.mock_import(args), 0) + self.check_repo(None, None, []) + self.check_tree('upstream', ['Makefile', 'dummy.sh']) + prist_ref = set(['new', 'new/Makefile', 'new/dummy.sh']) + self.check_files(prist_ref, self.ls_pristine_tar('new.tbz2')) + + def test_dir_nopristinefilter(self): + """Test importing directory with --no-pristine-tar-filter""" + orig = self.unpack_tar(os.path.join(DATA_DIR, 'gbp-test-1.0.tar.bz2')) + args = ['--pristine-tar', '--filter=README', orig] + eq_(self.mock_import(args, 'gbp-test\n1.0\n'), 0) + files = ['Makefile', 'dummy.sh'] + self.check_repo(None, None, []) + self.check_tree('upstream', ['Makefile', 'dummy.sh']) + prist_ref = set(['gbp-test-1.0/%s' % fname for fname in ls_dir(orig)] + + ['gbp-test-1.0']) + self.check_files(prist_ref, self.ls_pristine_tar('gbp-test.tar.gz')) + + def test_dir_filter_prefix(self): + """Test importing directory with prefix mangling""" + orig = self.unpack_tar(os.path.join(DATA_DIR, 'gbp-test-1.0.tar.bz2')) + args = ['--pristine-tar', '--filter=README', '--filter-pristine-tar', + '--orig-prefix=', '--pristine-tarball-name=my.tgz', orig] + eq_(self.mock_import(args, 'gbp-test\n1.0\n'), 0) + files = ['Makefile', 'dummy.sh'] + self.check_repo(None, None, []) + self.check_tree('upstream', files) + self.check_files(set(files), self.ls_pristine_tar('my.tgz')) + + +class TestBareRepo(ImportOrigTestBase): + """Test importing to a bare repository""" + + def test_basic_import_to_bare_repo(self): + """Test importing inside bare git repository""" + repo = ComponentTestGitRepository.create('.', bare=True) + orig = os.path.join(DATA_DIR, 'gbp-test-1.0.tar.bz2') + eq_(mock_import([orig]), 0) + self._check_repo_state(repo, None, ['upstream']) + eq_(len(repo.get_commits(until='upstream')), 1) + eq_(repo.get_tags(), ['upstream/1.0']) + + # Import another version + repo.set_branch('upstream') + orig = os.path.join(DATA_DIR, 'gbp-test-1.1.tar.bz2') + eq_(mock_import([orig]), 0) + self._check_repo_state(repo, 'upstream', ['upstream']) + eq_(len(repo.get_commits(until='upstream')), 2) + + def test_pristine_import_to_bare(self): + """Test importing inside bare git repository""" + repo = ComponentTestGitRepository.create('.', bare=True) + orig = os.path.join(DATA_DIR, 'gbp-test-1.0.tar.bz2') + eq_(mock_import([orig]), 0) + # No pristine-tar branch should be present + self._check_repo_state(repo, None, ['upstream']) + diff --git a/tests/component/rpm/test_import_srpm.py b/tests/component/rpm/test_import_srpm.py index 31a18a5f..ad4937b3 100644 --- a/tests/component/rpm/test_import_srpm.py +++ b/tests/component/rpm/test_import_srpm.py @@ -54,11 +54,12 @@ def test_basic_import(self): eq_(mock_import(['--no-pristine-tar', srpm]), 0) # Check repository state repo = GitRepository('gbp-test') - files = {'Makefile', 'README', 'bar.tar.gz', 'dummy.sh', 'foo.txt', - 'gbp-test.spec', 'my.patch', 'my2.patch', 'my3.patch'} + files = set(['Makefile', 'README', 'bar.tar.gz', 'dummy.sh', 'foo.txt', + 'gbp-test.spec', 'my.patch', 'mydir/myfile.txt']) self._check_repo_state(repo, 'master', ['master', 'upstream'], files) - # Two commits: upstream and packaging files - eq_(len(repo.get_commits()), 2) + # Four commits: upstream, packaging files, one patch and the removal + # of imported patches + eq_(len(repo.get_commits()), 4) def test_basic_import2(self): """Import package with multiple spec files and full url patch""" @@ -66,13 +67,14 @@ def test_basic_import2(self): eq_(mock_import(['--no-pristine-tar', srpm]), 0) # Check repository state repo = GitRepository('gbp-test2') - files = {'Makefile', 'README', 'bar.tar.gz', 'dummy.sh', 'foo.txt', + files = set(['Makefile', 'README', 'bar.tar.gz', 'dummy.sh', 'foo.txt', 'gbp-test2.spec', 'gbp-test2-alt.spec', 'my.patch', - 'my2.patch', 'my3.patch'} + 'mydir/myfile.txt']) self._check_repo_state(repo, 'master', ['master', 'upstream'], files) - # Two commits: upstream and packaging files - eq_(len(repo.get_commits()), 2) + # Four commits: upstream, packaging files, one patch and the removal + # of imported patches + eq_(len(repo.get_commits()), 4) def test_basic_import_orphan(self): """ @@ -83,8 +85,8 @@ def test_basic_import_orphan(self): eq_(mock_import(['--no-pristine-tar', '--orphan-packaging', srpm]), 0) # Check repository state repo = GitRepository('gbp-test2') - files = {'bar.tar.gz', 'foo.txt', 'gbp-test2.spec', - 'gbp-test2-alt.spec', 'my.patch', 'my2.patch', 'my3.patch'} + files = set(['bar.tar.gz', 'foo.txt', 'gbp-test2.spec', + 'gbp-test2-alt.spec', 'my.patch', 'my2.patch', 'my3.patch']) self._check_repo_state(repo, 'master', ['master', 'upstream'], files) # Only one commit: the packaging files eq_(len(repo.get_commits()), 1) @@ -94,8 +96,8 @@ def test_basic_native_import(self): srpm = os.path.join(DATA_DIR, 'gbp-test-native-1.0-1.src.rpm') eq_(mock_import(['--native', srpm]), 0) # Check repository state - files = {'.gbp.conf', 'Makefile', 'README', 'dummy.sh', - 'packaging/gbp-test-native.spec'} + files = set(['.gbp.conf', 'Makefile', 'README', 'dummy.sh', + 'packaging/gbp-test-native.spec']) repo = GitRepository('gbp-test-native') self._check_repo_state(repo, 'master', ['master'], files) # Only one commit: the imported source tarball @@ -111,6 +113,20 @@ def test_import_no_orig_src(self): # Only one commit: packaging files eq_(len(repo.get_commits()), 1) + def test_import_compressed_patches(self): + """Test importing of non-native src.rpm with compressed patches""" + srpm = os.path.join(DATA_DIR, 'gbp-test-1.1-2.src.rpm') + eq_(import_srpm.main(['arg0', srpm]), 0) + # Check repository state + repo = GitRepository('gbp-test') + files = set(['Makefile', 'README', 'AUTHORS', 'NEWS', 'bar.tar.gz', + 'dummy.sh', 'foo.txt', 'gbp-test.spec', 'my.patch', + 'mydir/myfile.txt']) + self._check_repo_state(repo, 'master', ['master', 'upstream'], files) + # Four commits: upstream, packaging files, three patches and the removal + # of imported patches + eq_(len(repo.get_commits()), 6) + def test_multiple_versions(self): """Test importing of multiple versions""" srpms = [ os.path.join(DATA_DIR, 'gbp-test-1.0-1.src.rpm'), @@ -119,21 +135,22 @@ def test_multiple_versions(self): eq_(mock_import(['--no-pristine-tar', srpms[0]]), 0) repo = GitRepository('gbp-test') self._check_repo_state(repo, 'master', ['master', 'upstream']) - eq_(len(repo.get_commits()), 2) + eq_(len(repo.get_commits()), 4) # Try to import same version again eq_(mock_import([srpms[1]]), 0) - eq_(len(repo.get_commits()), 2) + eq_(len(repo.get_commits()), 4) eq_(len(repo.get_commits(until='upstream')), 1) - eq_(mock_import(['--no-pristine-tar', '--allow-same-version', srpms[1]]), 0) - # Added new version of packaging - eq_(len(repo.get_commits()), 3) + eq_(mock_import(['--no-pristine-tar', '--allow-same-version', + srpms[1]]), 0) + # Added new versio packaging plus one patch + eq_(len(repo.get_commits()), 7) eq_(len(repo.get_commits(until='upstream')), 1) # Import new version eq_(mock_import(['--no-pristine-tar', srpms[2]]), 0) - files = {'Makefile', 'README', 'bar.tar.gz', 'dummy.sh', 'foo.txt', - 'gbp-test.spec', 'my.patch', 'my2.patch', 'my3.patch'} + files = set(['Makefile', 'README', 'bar.tar.gz', 'dummy.sh', 'foo.txt', + 'gbp-test.spec', 'my.patch', 'mydir/myfile.txt']) self._check_repo_state(repo, 'master', ['master', 'upstream'], files) - eq_(len(repo.get_commits()), 5) + eq_(len(repo.get_commits()), 11) eq_(len(repo.get_commits(until='upstream')), 2) # Check number of tags eq_(len(repo.get_tags('upstream/*')), 2) @@ -162,8 +179,9 @@ def test_import_to_existing(self): self._check_log(-1, 'Also check the --create-missing-branches') eq_(mock_import(['--no-pristine-tar', '--create-missing', srpm]), 0) self._check_repo_state(repo, 'master', ['master', 'upstream']) - # Four commits: our initial, upstream and packaging files - eq_(len(repo.get_commits()), 3) + # Four commits: our initial, upstream, packaging files, one patch, + # and the removal of imported patches + eq_(len(repo.get_commits()), 5) # The import should fail because missing packaging-branch srpm = os.path.join(DATA_DIR, 'gbp-test-1.1-1.src.rpm') @@ -178,14 +196,77 @@ def test_filter(self): # Check repository state repo = GitRepository('gbp-test') files = set(['Makefile', 'dummy.sh', 'bar.tar.gz', 'foo.txt', - 'gbp-test.spec', 'my.patch', 'my2.patch', 'my3.patch']) + 'gbp-test.spec', 'my.patch', 'mydir/myfile.txt']) self._check_repo_state(repo, 'master', ['master', 'upstream'], files) + def test_tagging(self): + """Test tag options of import-srpm""" + srpm = os.path.join(DATA_DIR, 'gbp-test-1.0-1.src.rpm') + + # Invalid packaging tag keywords + eq_(mock_import(['--no-pristine-tar', '--packaging-tag=%(foo)s', srpm]), + 1) + self._check_log(-1, ".*Missing value 'foo' in {'release': '1', " + "'upstreamversion': '1.0', 'version': '1.0-1', " + "'vendor': 'downstream'}") + # Remove upstream tag + repo = GitRepository('gbp-test') + repo.delete_tag('upstream/1.0') + + # Invalid upstream tag keywords + eq_(mock_import(['--no-pristine-tar', '--upstream-tag=%(foo)s', srpm]), + 1) + self._check_log(-1, ".*Missing value 'foo' in " + "{'upstreamversion': '1.0', 'version': '1.0'}") + + # Try with good keywords, with --skip-packaging-tag + eq_(mock_import(['--no-pristine-tar', '--vendor=foo', + '--skip-packaging-tag', + '--packaging-tag=%(vendor)s/%(version)s', + '--upstream-tag=upst/%(version)s', srpm]), 0) + eq_(repo.describe('upstream'), 'upst/1.0') + eq_(len(repo.get_tags()), 1) + + # Re-import, creating packaging tag + eq_(mock_import(['--no-pristine-tar', '--vendor=foo', + '--packaging-tag=%(vendor)s/%(version)s', + '--upstream-tag=upst/%(version)s', srpm]), 0) + eq_(repo.describe('HEAD'), 'foo/1.0-1') + eq_(len(repo.get_tags()), 2) + + def test_tagging_native(self): + """Test tagging of native packages with import-srpm""" + srpm = os.path.join(DATA_DIR, 'gbp-test-native-1.0-1.src.rpm') + + # Invalid packaging tag keywords + eq_(mock_import(['--no-pristine-tar', '--packaging-tag=%(foo)s', + srpm, '--native']), 1) + self._check_log(-1, ".*Missing value 'foo' in {'release': '1', " + "'upstreamversion': '1.0', 'version': '1.0-1', " + "'vendor': 'downstream'}") + + # Try with good keywords, with --skip-packaging-tag. + # Upstream tag format should not matter + eq_(mock_import(['--no-pristine-tar', '--vendor=foo', '--native', + '--skip-packaging-tag', + '--packaging-tag=%(vendor)s/%(version)s', + '--upstream-tag=%(foo)s', srpm]), 0) + repo = GitRepository('gbp-test-native') + eq_(len(repo.get_tags()), 0) + + # Run again, now creating packaging tag + eq_(mock_import(['--no-pristine-tar', '--vendor=foo', '--native', + '--packaging-tag=%(vendor)s/%(version)s', + '--upstream-tag=%(foo)s', srpm]), 0) + eq_(repo.describe('HEAD'), 'foo/1.0-1') + + def test_misc_options(self): """Test various options of git-import-srpm""" srpm = os.path.join(DATA_DIR, 'gbp-test2-2.0-0.src.rpm') eq_(mock_import(['--no-pristine-tar', + '--no-patch-import', '--packaging-branch=pack', '--upstream-branch=orig', '--packaging-dir=packaging', @@ -195,10 +276,10 @@ def test_misc_options(self): srpm]), 0) # Check repository state repo = GitRepository('gbp-test2') - files = {'Makefile', 'README', 'dummy.sh', 'packaging/bar.tar.gz', + files = set(['Makefile', 'README', 'dummy.sh', 'packaging/bar.tar.gz', 'packaging/foo.txt', 'packaging/gbp-test2.spec', 'packaging/gbp-test2-alt.spec', 'packaging/my.patch', - 'packaging/my2.patch', 'packaging/my3.patch'} + 'packaging/my2.patch', 'packaging/my3.patch']) self._check_repo_state(repo, 'pack', ['pack', 'orig'], files) eq_(len(repo.get_commits()), 2) # Check packaging dir @@ -311,8 +392,9 @@ def test_basic_import_pristine_tar(self): repo = GitRepository('gbp-test') self._check_repo_state(repo, 'master', ['master', 'upstream', 'pristine-tar']) - # Two commits: upstream and packaging files - eq_(len(repo.get_commits()), 2) + # Four commits: upstream, packaging files, one patch and the removal + # of imported patches + eq_(len(repo.get_commits()), 4) def test_unsupported_archive(self): """Test importing of src.rpm with a zip source archive""" diff --git a/tests/component/rpm/test_pq_rpm.py b/tests/component/rpm/test_pq_rpm.py index 3746fb91..f0dcbe78 100644 --- a/tests/component/rpm/test_pq_rpm.py +++ b/tests/component/rpm/test_pq_rpm.py @@ -74,20 +74,20 @@ def test_invalid_config_file(self): def test_import_export(self): """Basic test for patch import and export""" repo = self.init_test_repo('gbp-test') - branches = repo.get_local_branches() + ['patch-queue/master'] + branches = repo.get_local_branches() + ['development/master'] # Test import eq_(mock_pq(['import']), 0) files = ['AUTHORS', 'dummy.sh', 'Makefile', 'NEWS', 'README', - 'mydir/myfile.txt'] - branches.append('patch-queue/master') - self._check_repo_state(repo, 'patch-queue/master', branches, files) - eq_(repo.get_merge_base('upstream', 'patch-queue/master'), + 'mydir/myfile.txt', '.gbp.conf'] + branches.append('development/master') + self._check_repo_state(repo, 'development/master', branches, files) + eq_(repo.get_merge_base('upstream', 'development/master'), repo.rev_parse('upstream')) ok_(len(repo.get_commits('', 'upstream')) < - len(repo.get_commits('', 'patch-queue/master'))) + len(repo.get_commits('', 'development/master'))) # Test export - eq_(mock_pq(['export', '--upstream-tag', 'upstream/%(version)s']), 0) + eq_(mock_pq(['export']), 0) files = ['.gbp.conf', '.gitignore', 'bar.tar.gz', 'foo.txt', 'gbp-test.spec', '0001-my-gz.patch', '0002-my-bzip2.patch', '0003-my2.patch', 'my.patch'] @@ -102,71 +102,92 @@ def test_import_export(self): def test_import_export2(self): """Another test for import and export""" repo = self.init_test_repo('gbp-test2') - branches = repo.get_local_branches() + ['patch-queue/master-orphan'] + branches = repo.get_local_branches() + ['development/master-orphan'] repo.set_branch('master-orphan') # Import eq_(mock_pq(['import']), 0) - files = ['dummy.sh', 'Makefile', 'README', 'mydir/myfile.txt'] - self._check_repo_state(repo, 'patch-queue/master-orphan', branches, + files = ['dummy.sh', 'Makefile', 'README', 'mydir/myfile.txt', + '.gbp.conf'] + self._check_repo_state(repo, 'development/master-orphan', branches, files) - # Test export - eq_(mock_pq(['export', '--upstream-tag', 'upstream/%(version)s', - '--spec-file', 'packaging/gbp-test2.spec']), 0) + # Test export with --drop + branches.remove('development/master-orphan') + eq_(mock_pq(['export', '--drop']), 0) self._check_repo_state(repo, 'master-orphan', branches) eq_(repo.status()[' M'], ['packaging/gbp-test2.spec']) + def test_import_in_subdir(self): + """Test running gbp-rpm-pq from a subdir in the git tree""" + repo = self.init_test_repo('gbp-test2') + repo.set_branch('master-orphan') + branches = repo.get_local_branches() + ['development/master-orphan'] + os.chdir('packaging') + + # Running from subdir should be ok + eq_(mock_pq(['import']), 0) + self._check_repo_state(repo, 'development/master-orphan', branches) + + def test_rebase(self): """Basic test for rebase action""" repo = self.init_test_repo('gbp-test') - repo.rename_branch('pq/master', 'patch-queue/master') - repo.set_branch('patch-queue/master') + repo.rename_branch('pq/master', 'development/master') + repo.set_branch('development/master') branches = repo.get_local_branches() # Make development branch out-of-sync + GitCommand("rebase")(['--onto', 'upstream^', 'upstream']) # Sanity check for our git rebase... - ok_(repo.get_merge_base('patch-queue/master', 'upstream') != + ok_(repo.get_merge_base('development/master', 'upstream') != repo.rev_parse('upstream')) # Do rebase eq_(mock_pq(['rebase']), 0) - self._check_repo_state(repo, 'patch-queue/master', branches) - ok_(repo.get_merge_base('patch-queue/master', 'upstream') == + self._check_repo_state(repo, 'development/master', branches) + ok_(repo.get_merge_base('development/master', 'upstream') == repo.rev_parse('upstream')) # Get to out-of-sync, again, and try rebase from master branch GitCommand("rebase")(['--onto', 'upstream^', 'upstream']) eq_(mock_pq(['switch']), 0) eq_(mock_pq(['rebase']), 0) - self._check_repo_state(repo, 'patch-queue/master', branches) - ok_(repo.get_merge_base('patch-queue/master', 'upstream') == + self._check_repo_state(repo, 'development/master', branches) + ok_(repo.get_merge_base('development/master', 'upstream') == repo.rev_parse('upstream')) def test_switch(self): """Basic test for switch action""" repo = self.init_test_repo('gbp-test') - branches = repo.get_local_branches() + ['patch-queue/master'] - # Switch to non-existent pq-branch should create one + branches = repo.get_local_branches() + ['development/master'] + # Switch to non-existent pq-branch should fail + eq_(mock_pq(['switch']), 1) + self._check_log(-1, ".*Branch 'development/master' does not exist") + + # Import and switch to base branch and back to pq + eq_(mock_pq(['import']), 0) eq_(mock_pq(['switch']), 0) - self._check_repo_state(repo, 'patch-queue/master', branches) + self._check_repo_state(repo, 'master', branches) + eq_(mock_pq(['switch']), 0) + self._check_repo_state(repo, 'development/master', branches) # Switch to base branch and back to pq eq_(mock_pq(['switch']), 0) self._check_repo_state(repo, 'master', branches) eq_(mock_pq(['switch']), 0) - self._check_repo_state(repo, 'patch-queue/master', branches) + self._check_repo_state(repo, 'development/master', branches) def test_switch_drop(self): """Basic test for drop action""" repo = self.init_test_repo('gbp-test') - repo.rename_branch('pq/master', 'patch-queue/master') - repo.set_branch('patch-queue/master') + repo.rename_branch('pq/master', 'development/master') + repo.set_branch('development/master') branches = repo.get_local_branches() # Drop pq should fail when on pq branch eq_(mock_pq(['drop']), 1) self._check_log(-1, "gbp:error: On a patch-queue branch, can't drop it") - self._check_repo_state(repo, 'patch-queue/master', branches) + self._check_repo_state(repo, 'development/master', branches) # Switch to master eq_(mock_pq(['switch']), 0) @@ -174,30 +195,28 @@ def test_switch_drop(self): # Drop should succeed when on master branch eq_(mock_pq(['drop']), 0) - branches.remove('patch-queue/master') + branches.remove('development/master') self._check_repo_state(repo, 'master', branches) def test_force_import(self): """Test force import""" repo = self.init_test_repo('gbp-test') pkg_files = repo.list_files() - repo.rename_branch('pq/master', 'patch-queue/master') - repo.set_branch('patch-queue/master') + repo.rename_branch('pq/master', 'development/master') + repo.set_branch('development/master') branches = repo.get_local_branches() pq_files = repo.list_files() # Re-import should fail eq_(mock_pq(['import']), 1) self._check_log(0, "gbp:error: Already on a patch-queue branch") - self._check_repo_state(repo, 'patch-queue/master', branches, pq_files) + self._check_repo_state(repo, 'development/master', branches, pq_files) # Mangle pq branch and try force import on top of that repo.force_head('master', hard=True) - self._check_repo_state(repo, 'patch-queue/master', branches, pkg_files) + self._check_repo_state(repo, 'development/master', branches, pkg_files) eq_(mock_pq(['import', '--force']), 0) - # .gbp.conf won't get imported by pq - pq_files.remove('.gbp.conf') - self._check_repo_state(repo, 'patch-queue/master', branches, pq_files) + self._check_repo_state(repo, 'development/master', branches, pq_files) # Switch back to master eq_(mock_pq(['switch']), 0) @@ -210,27 +229,27 @@ def test_force_import(self): # Force import should succeed eq_(mock_pq(['import', '--force']), 0) - self._check_repo_state(repo, 'patch-queue/master', branches, pq_files) + self._check_repo_state(repo, 'development/master', branches, pq_files) def test_apply(self): """Basic test for apply action""" repo = self.init_test_repo('gbp-test') upstr_files = ['dummy.sh', 'Makefile', 'README'] - branches = repo.get_local_branches() + ['patch-queue/master'] + branches = repo.get_local_branches() + ['development/master'] # No patch given eq_(mock_pq(['apply']), 1) self._check_log(-1, "gbp:error: No patch name given.") # Create a pristine pq-branch - repo.create_branch('patch-queue/master', 'upstream') + repo.create_branch('development/master', 'upstream') # Apply patch with tempfile.NamedTemporaryFile() as tmp_patch: tmp_patch.write(repo.show('master:%s' % 'my.patch')) tmp_patch.file.flush() eq_(mock_pq(['apply', tmp_patch.name]), 0) - self._check_repo_state(repo, 'patch-queue/master', branches, + self._check_repo_state(repo, 'development/master', branches, upstr_files) # Apply another patch, now when already on pq branch @@ -238,13 +257,53 @@ def test_apply(self): tmp_patch.write(repo.show('master:%s' % 'my2.patch')) tmp_patch.file.flush() eq_(mock_pq(['apply', tmp_patch.name]), 0) - self._check_repo_state(repo, 'patch-queue/master', branches, + self._check_repo_state(repo, 'development/master', branches, upstr_files + ['mydir/myfile.txt']) + def test_convert(self): + """Basic test for convert action""" + repo = self.init_test_repo('gbp-test2') + branches = repo.get_local_branches() + ['master-orphan'] + files = ['packaging/bar.tar.gz', 'packaging/foo.txt', + 'packaging/gbp-test2.spec', 'packaging/gbp-test2-alt.spec', + 'packaging/my.patch', 'packaging/0001-My-addition.patch', + '.gbp.conf'] + # First should fail because 'master-orphan' branch already exists + eq_(mock_pq(['convert']), 1) + self._check_log(-1, "gbp:error: Branch 'master-orphan' already exists") + + # Re-try with force + eq_(mock_pq(['convert', '--force']), 0) + self._check_repo_state(repo, 'master-orphan', branches, files) + + def test_convert_fail(self): + """Tests for convert action error cases""" + repo = self.init_test_repo('gbp-test') + branches = repo.get_local_branches() + + # Already on orphan packaging branch + eq_(mock_pq(['convert']), 1) + self._check_repo_state(repo, 'master', branches) + self._check_log(-1, ".*is not based on upstream version") + + # Create a pq branch and try from there + eq_(mock_pq(['import']), 0) + eq_(mock_pq(['convert']), 1) + self._check_repo_state(repo, 'development/master', + branches + ['development/master']) + self._check_log(-1, ".*you're on patch-queue branch") + + # Switch back to orphan packaging branch and try again + eq_(mock_pq(['switch']), 0) + eq_(mock_pq(['convert']), 1) + self._check_repo_state(repo, 'master', + branches + ['development/master']) + self._check_log(-1, r".*pq branch \S+ already exists") + def test_option_patch_numbers(self): """Test the --patch-numbers cmdline option""" repo = self.init_test_repo('gbp-test') - repo.rename_branch('pq/master', 'patch-queue/master') + repo.rename_branch('pq/master', 'development/master') branches = repo.get_local_branches() # Export eq_(mock_pq(['export', '--no-patch-numbers']), 0) @@ -283,8 +342,7 @@ def test_option_spec_file(self): eq_(mock_pq(['import', '--spec-file=gbp-test.spec']), 0) # Force import on top to test parsing spec from another branch - eq_(mock_pq(['import', '--spec-file=gbp-test.spec', '--force', - '--upstream-tag', 'upstream/%(version)s']), 0) + eq_(mock_pq(['import', '--spec-file=gbp-test.spec', '--force']), 0) # Test with export, too eq_(mock_pq(['export', '--spec-file=foo.spec']), 1) @@ -302,21 +360,127 @@ def test_option_packaging_dir(self): eq_(mock_pq(['import', '--packaging-dir=.']), 0) # Test with export, --spec-file option should override packaging dir - eq_(mock_pq(['export', '--packaging-dir=foo', '--upstream-tag', - 'upstream/%(version)s', + eq_(mock_pq(['export', '--packaging-dir=foo', '--spec-file=gbp-test.spec']), 0) + def test_option_pq_branch(self): + """Test the --pq-branch option""" + repo = self.init_test_repo('gbp-test') + branches = repo.get_local_branches() + + # Invalid branch name + eq_(mock_pq(['import', '--pq-branch=%(branch)s/foo:']), 1) + self._check_log(-1, "gbp:error: Cannot create patch-queue branch") + + # Try all possible keys in pq-branch format string + eq_(mock_pq(['import', + '--pq-branch=dev/%(branch)s/%(upstreamversion)s']), 0) + branches.append('dev/master/1.1') + self._check_repo_state(repo, 'dev/master/1.1', branches) + + # Switch to non-existent packaging branch should fail + eq_(mock_pq(['switch', '--pq-branch=dev/master/1.1']), 1) + self._check_log(-1, "gbp:error: Invalid pq-branch, name format") + self._check_repo_state(repo, 'dev/master/1.1', branches) + + # Switch to existing packaging branch should be ok + eq_(mock_pq(['switch', + '--pq-branch=dev/%(branch)s/%(upstreamversion)s']), 0) + self._check_repo_state(repo, 'master', branches) + + def test_option_export_rev(self): + """Test the --export-rev cmdline option""" + repo = self.init_test_repo('gbp-test') + repo.rename_branch('pq/master', 'development/master') + branches = repo.get_local_branches() + + # Export directly from upstream -> no patches expected + eq_(mock_pq(['export', '--export-rev=upstream']), 0) + files = ['.gbp.conf', '.gitignore', 'bar.tar.gz', 'foo.txt', + 'gbp-test.spec', 'my.patch'] + self._check_repo_state(repo, 'master', branches, files) + + # Export another rev + eq_(mock_pq(['export', '--export-rev=development/master~2']), 0) + self._check_repo_state(repo, 'master', branches, + files + ['0001-my-gz.patch']) + + # Export from upstream..master should fail + eq_(mock_pq(['export', '--export-rev=master']), 1) + self._check_log(-1, "gbp:error: Start commit .* not an ancestor of end") + # Export invalid rev should fail + eq_(mock_pq(['export', '--export-rev=foobar']), 1) + self._check_log(-1, "gbp:error: Invalid treeish object foobar") + + # Export plain treeish. Doesn't work in pq (at least) - + # just for testing exception handling here + content = repo.list_tree('development/master') + tree = repo.make_tree(content) + eq_(mock_pq(['export', '--export-rev=%s' % tree]), 1) + self._check_log(-1, "gbp:error: Start commit .* not an ancestor of end") + + def test_option_patch_compress(self): + """Test the --patch-compress cmdline option""" + repo = self.init_test_repo('gbp-test') + repo.rename_branch('pq/master', 'development/master') + branches = repo.get_local_branches() + + # Export, all generated patches should be compressed + eq_(mock_pq(['export', '--patch-compress=1']), 0) + files = ['.gbp.conf', '.gitignore', 'bar.tar.gz', 'foo.txt', + 'gbp-test.spec', '0001-my-gz.patch.gz', + '0002-my-bzip2.patch.gz', '0003-my2.patch.gz', 'my.patch'] + self._check_repo_state(repo, 'master', branches, files) + + def test_option_patch_squash(self): + """Test the --patch-squash cmdline option""" + repo = self.init_test_repo('gbp-test') + repo.rename_branch('pq/master', 'development/master') + repo.set_branch('development/master') + branches = repo.get_local_branches() + + # Non-existent squash point should fail + eq_(mock_pq(['export', '--patch-squash=foo']), 1) + self._check_log(-1, r"gbp:error: Git command failed: revision 'foo\^0'") + + # Invalid squash point should fail + eq_(mock_pq(['export', '--patch-squash=master']), 1) + self._check_log(-1, "gbp:error: Given squash point 'master' not in the " + "history of end commit 'development/master'") + + # Squashing up to the second latest patch -> 1 "normal" patch + squash = 'development/master~1' + eq_(mock_pq(['export', '--patch-squash=%s' % squash]), 0) + squash += ':squash' + eq_(mock_pq(['export', '--patch-squash=%s' % squash]), 0) + files = ['.gbp.conf', '.gitignore', 'bar.tar.gz', 'foo.txt', + 'gbp-test.spec', 'my.patch', 'squash.diff', '0002-my2.patch'] + self._check_repo_state(repo, 'master', branches, files) + + def test_option_patch_ignore_path(self): + """Test the --patch-ignore-path cmdline option""" + repo = self.init_test_repo('gbp-test') + repo.rename_branch('pq/master', 'development/master') + branches = repo.get_local_branches() + + # Export + eq_(mock_pq(['export', '--patch-ignore-path=mydir/.*']), 0) + files = ['.gbp.conf', '.gitignore', 'bar.tar.gz', 'foo.txt', + 'gbp-test.spec', '0001-my-gz.patch', '0002-my-bzip2.patch', + 'my.patch'] + self._check_repo_state(repo, 'master', branches, files) + def test_export_with_merges(self): """Test exporting pq-branch with merge commits""" repo = self.init_test_repo('gbp-test') - repo.rename_branch('pq/master', 'patch-queue/master') - repo.set_branch('patch-queue/master') + repo.rename_branch('pq/master', 'development/master') + repo.set_branch('development/master') branches = repo.get_local_branches() # Create a merge commit in pq-branch patches = repo.format_patches('HEAD^', 'HEAD', '.') repo.force_head('HEAD^', hard=True) - repo.commit_dir('.', 'Merge with master', 'patch-queue/master', + repo.commit_dir('.', 'Merge with master', 'development/master', ['master']) merge_rev = repo.rev_parse('HEAD', short=7) eq_(mock_pq(['apply', patches[0]]), 0) @@ -330,6 +494,61 @@ def test_export_with_merges(self): '%s-to-%s.diff' % (upstr_rev, merge_rev), '0002-my2.patch'] self._check_repo_state(repo, 'master', branches, files) + def test_option_import_files(self): + """Test the --import-files cmdline option""" + repo = self.init_test_repo('gbp-test') + + # Import with default settings (should import gbp conf files) + branches = repo.get_local_branches() + ['development/master'] + eq_(mock_pq(['import']), 0) + self._check_repo_state(repo, 'development/master', branches) + ok_('.gbp.conf' in repo.list_files()) + + # Re-import with user-defined files + eq_(mock_pq(['import', '--force', '--import-files', + 'foo.txt,my.patch']), 0) + self._check_repo_state(repo, 'development/master', branches) + ok_('foo.txt' in repo.list_files()) + ok_('my.patch' in repo.list_files()) + + # Drop and re-import with no files + eq_(mock_pq(['switch']), 0) + eq_(mock_pq(['drop']), 0) + eq_(mock_pq(['import', '--import-files=']), 0) + self._check_repo_state(repo, 'development/master', branches) + ok_('debian/gbp.conf' not in repo.list_files()) + ok_('.gbp.conf' not in repo.list_files()) + + def test_option_new_packaging_dir(self): + """Test the --new-packaging-dir cmdline option""" + repo = self.init_test_repo('gbp-test2') + branches = repo.get_local_branches() + ['master-orphan'] + files = ['rpm/bar.tar.gz', 'rpm/foo.txt', 'rpm/gbp-test2.spec', + 'rpm/gbp-test2-alt.spec', 'rpm/my.patch', + 'rpm/0001-My-addition.patch'] + # Drop already-existing master-orphan branch + repo.delete_branch('master-orphan') + # Try convert + eq_(mock_pq(['convert', '--import-files=', + '--new-packaging-dir=rpm']), 0) + self._check_repo_state(repo, 'master-orphan', branches, files) + + def test_option_retain_history(self): + """Test the --retain-history cmdline option""" + repo = self.init_test_repo('gbp-test2') + branches = repo.get_local_branches() + ['master-orphan'] + files = ['packaging/bar.tar.gz', 'packaging/foo.txt', + 'packaging/gbp-test2.spec', 'packaging/gbp-test2-alt.spec', + 'packaging/my.patch', 'packaging/0001-My-addition.patch', + '.gbp.conf'] + # Drop pre-existing master-orphan branch + repo.delete_branch('master-orphan') + + # Convert with history + eq_(mock_pq(['convert', '--retain-history']), 0) + self._check_repo_state(repo, 'master-orphan', branches, files) + eq_(len(repo.get_commits('', 'master-orphan')), 7) + def test_import_unapplicable_patch(self): """Test import when a patch does not apply""" repo = self.init_test_repo('gbp-test') @@ -338,7 +557,7 @@ def test_import_unapplicable_patch(self): with open('my2.patch', 'w') as patch_file: patch_file.write('-this-does\n+not-apply\n') eq_(mock_pq(['import']), 1) - self._check_log(-2, "(" + self._check_log(-1, "(" "Aborting|" "Please, commit your changes or stash them|" "gbp:error: Import failed.* You have local changes" @@ -349,6 +568,6 @@ def test_import_unapplicable_patch(self): repo.add_files(['my2.patch'], force=True) repo.commit_files(['my2.patch'], msg="Mangle patch") eq_(mock_pq(['import']), 1) - self._check_log(-2, "gbp:error: Import failed: Error running git apply") + self._check_log(-1, "gbp:error: Import failed: Error running git apply") self._check_repo_state(repo, 'master', branches) diff --git a/tests/component/rpm/test_rpm_ch.py b/tests/component/rpm/test_rpm_ch.py new file mode 100644 index 00000000..28d32c3f --- /dev/null +++ b/tests/component/rpm/test_rpm_ch.py @@ -0,0 +1,454 @@ +# vim: set fileencoding=utf-8 : +# +# (C) 2013-2015 Intel Corporation +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, please see +# +"""Tests for the git-rpm-ch tool""" + +import os +import re +from nose.tools import assert_raises, eq_, ok_ # pylint: disable=E0611 + +from gbp.scripts.rpm_ch import main as rpm_ch +from gbp.git import GitRepository + +from tests.component.rpm import RpmRepoTestBase + +# Disable "Method could be a function warning" +# pylint: disable=R0201 + + +def mock_ch(args): + """Wrapper for git-rpm-ch""" + + return rpm_ch(['arg0', '--packaging-branch=master', + '--spawn-editor=never'] + args) + +class TestRpmCh(RpmRepoTestBase): + """Basic tests for git-rpm-ch""" + + def setUp(self): + """Test case setup""" + super(TestRpmCh, self).setUp() + # Set environment so that commits succeed without git config + os.environ['GIT_AUTHOR_NAME'] = 'My Name' + os.environ['GIT_COMMITTER_NAME'] = 'My Name' + os.environ['EMAIL'] = 'me@example.com' + + @staticmethod + def read_file(filename): + """Read file to a list""" + with open(filename) as fobj: + return fobj.readlines() + + def test_invalid_args(self): + """See that git-rpm-ch fails gracefully when called with invalid args""" + GitRepository.create('.') + + with assert_raises(SystemExit): + mock_ch(['--invalid-opt']) + + def test_import_outside_repo(self): + """Run git-rpm-ch when not in a git repository""" + eq_(mock_ch([]), 1) + self._check_log(0, 'gbp:error: No Git repository at ') + + def test_invalid_config_file(self): + """Test invalid config file""" + # Create dummy invalid config file and run git-rpm-ch + GitRepository.create('.') + with open('.gbp.conf', 'w') as conffd: + conffd.write('foobar\n') + eq_(mock_ch([]), 1) + self._check_log(0, 'gbp:error: invalid config file: File contains no ' + 'section headers.') + + def test_update_spec_changelog(self): + """Test updating changelog in spec""" + repo = self.init_test_repo('gbp-test') + eq_(mock_ch([]), 0) + eq_(repo.status(), {' M': ['gbp-test.spec']}) + + def test_update_changes_file(self): + """Test updating a separate changes file""" + repo = self.init_test_repo('gbp-test-native') + eq_(mock_ch([]), 0) + eq_(repo.status(), {' M': ['packaging/gbp-test-native.changes']}) + + def test_create_spec_changelog(self): + """Test creating changelog in spec file""" + repo = self.init_test_repo('gbp-test2') + orig_content = self.read_file('packaging/gbp-test2.spec') + + # Fails if no starting point is given + eq_(mock_ch([]), 1) + self._check_log(-1, "gbp:error: Couldn't determine starting point") + + # Give starting point + eq_(mock_ch(['--since=HEAD^']), 0) + eq_(repo.status(), {' M': ['packaging/gbp-test2.spec']}) + content = self.read_file('packaging/gbp-test2.spec') + # Should contain 4 lines (%changelog, header, 1 entry and an empty line) + eq_(len(content), len(orig_content) + 4) + + def test_create_changes_file(self): + """Test creating a separate changes file""" + repo = self.init_test_repo('gbp-test2') + + # Fails if no starting point is given + eq_(mock_ch(['--changelog-file=CHANGES']), 1) + self._check_log(-1, "gbp:error: Couldn't determine starting point") + + # Give starting point + eq_(mock_ch(['--since=HEAD^', '--changelog-file=CHANGES']), 0) + eq_(repo.status(), {'??': ['packaging/gbp-test2.changes']}) + content = self.read_file('packaging/gbp-test2.changes') + # Should contain 3 lines (header, 1 entry and an empty line) + eq_(len(content), 3) + + def test_option_all(self): + """Test the --all cmdline option""" + repo = self.init_test_repo('gbp-test2') + + eq_(mock_ch(['--changelog-file=CHANGES', '--all']), 0) + content = self.read_file('packaging/gbp-test2.changes') + # Should contain N+2 lines (header, N commits and an empty line) + commit_cnt = len(repo.get_commits(since=None, until='master')) + eq_(len(content), commit_cnt + 2) + + def test_option_changelog_file(self): + """Test the --changelog-file cmdline option""" + repo = self.init_test_repo('gbp-test-native') + + # Guess changelog file + eq_(mock_ch(['--changelog-file=CHANGES']), 0) + eq_(repo.status(), {' M': ['packaging/gbp-test-native.changes']}) + + # Use spec file as changelog + eq_(mock_ch(['--changelog-file=SPEC', '--since=HEAD^']), 0) + eq_(repo.status(), {' M': ['packaging/gbp-test-native.changes', + 'packaging/gbp-test-native.spec']}) + + # Arbitrary name + eq_(mock_ch(['--changelog-file=foo.changes', '--since=HEAD^']), 0) + eq_(repo.status(), {' M': ['packaging/gbp-test-native.changes', + 'packaging/gbp-test-native.spec'], + '??': ['foo.changes']}) + + def test_option_spec_file(self): + """Test the --spec-file cmdline option""" + repo = self.init_test_repo('gbp-test2') + + eq_(mock_ch(['--spec-file=foo.spec']), 1) + self._check_log(-1, "gbp:error: Unable to read spec file") + + eq_(mock_ch(['--spec-file=']), 1) + self._check_log(-1, "gbp:error: Multiple spec files found") + + eq_(mock_ch(['--spec-file=packaging/gbp-test2.spec', '--since=HEAD^']), + 0) + eq_(repo.status(), {' M': ['packaging/gbp-test2.spec']}) + + def test_option_packaging_dir(self): + """Test the --packaging-dir cmdline option""" + repo = self.init_test_repo('gbp-test-native') + + eq_(mock_ch(['--packaging-dir=foo']), 1) + self._check_log(-1, "gbp:error: No spec file found") + + # Packaging dir should be taken from spec file if it is defined + eq_(mock_ch(['--packaging-dir', 'foo', '--spec-file', + 'packaging/gbp-test-native.spec']), 0) + eq_(repo.status(), {' M': ['packaging/gbp-test-native.changes']}) + + def test_branch_options(self): + """Test the --packaging-branch and --ignore-branch cmdline options""" + self.init_test_repo('gbp-test-native') + + eq_(mock_ch(['--packaging-branch=foo']), 1) + self._check_log(-2, "gbp:error: You are not on branch 'foo'") + + eq_(mock_ch(['--packaging-branch=foo', '--ignore-branch']), 0) + + def test_option_meta_bts(self): + """Test parsing of the bts meta tags""" + repo = self.init_test_repo('gbp-test-native') + + # Create a dummy commit that references bts + with open('new-file', 'w') as fobj: + fobj.write('foobar\n') + repo.add_files('new-file') + repo.commit_all('Fix\n\nCloses: #123\nFixes: #456\n Fixes: #789') + + eq_(mock_ch(['--since=HEAD^']), 0) + content = self.read_file('packaging/gbp-test-native.changes') + # rpm-ch shouldn't have picked the ref with leading whitespace + eq_(content[1], '- Fix (Closes: #123) (Fixes: #456)\n') + + # Check the --meta-bts option + eq_(mock_ch(['--since=HEAD^', '--meta-bts=Fixes']), 0) + content = self.read_file('packaging/gbp-test-native.changes') + eq_(content[1], '- Fix (Fixes: #456)\n') + + def test_option_no_release(self): + """Test the --no-release cmdline option""" + self.init_test_repo('gbp-test-native') + orig_content = self.read_file('packaging/gbp-test-native.changes') + + eq_(mock_ch(['--no-release']), 0) + content = self.read_file('packaging/gbp-test-native.changes') + # Only one line (entry) added + eq_(len(content), len(orig_content) + 1) + + def test_author(self): + """Test determining the author name/email""" + repo = self.init_test_repo('gbp-test-native') + + # Test taking email address from env + os.environ['EMAIL'] = 'user@host.com' + eq_(mock_ch([]), 0) + header = self.read_file('packaging/gbp-test-native.changes')[0] + ok_(re.match(r'.+ .+', header)) + + # Missing git config setting should not cause a failure + del os.environ['EMAIL'] + del os.environ['GIT_AUTHOR_NAME'] + os.environ['GIT_CONFIG_NOSYSTEM'] = '1' + os.environ['HOME'] = os.path.abspath('.') + eq_(mock_ch(['--git-author', '--since=HEAD^1']), 0) + + # Test the --git-author option + with open(os.path.join(repo.git_dir, 'config'), 'a') as fobj: + fobj.write('[user]\n name=John Doe\n email=jd@host.com\n') + eq_(mock_ch(['--git-author', '--since=HEAD^']), 0) + header = self.read_file('packaging/gbp-test-native.changes')[0] + ok_(re.match(r'.+ John Doe .+', header), header) + + def test_option_full(self): + """Test the --full cmdline option""" + repo = self.init_test_repo('gbp-test-native') + orig_content = self.read_file('packaging/gbp-test-native.changes') + + eq_(mock_ch(['--full', '--since=HEAD^']), 0) + commit_msg_body = repo.get_commit_info('HEAD')['body'] + full_msg = [line for line in commit_msg_body.splitlines() if line] + content = self.read_file('packaging/gbp-test-native.changes') + # New lines: header, 1 entry "header", entry "body" from commit message + # and one empty line + eq_(len(content), len(orig_content) + 3 + len(full_msg)) + + def test_option_ignore_regex(self): + """Test the --ignore-regex cmdline option""" + repo = self.init_test_repo('gbp-test-native') + orig_content = self.read_file('packaging/gbp-test-native.changes') + + eq_(mock_ch(['--full', '--since', 'HEAD^', '--ignore-regex', + 'Signed-off-by:.*']), 0) + commit_msg_body = repo.get_commit_info('HEAD')['body'] + full_msg = [line for line in commit_msg_body.splitlines() if + (line and not line.startswith('Signed-off-by:'))] + content = self.read_file('packaging/gbp-test-native.changes') + # New lines: header, 1 entry "header", filtered entry "body" from + # commit message and one empty line + eq_(len(content), len(orig_content) + 3 + len(full_msg)) + + def test_option_id_len(self): + """Test the --id-len cmdline option""" + repo = self.init_test_repo('gbp-test-native') + + eq_(mock_ch(['--id-len=10']), 0) + commit_id = repo.rev_parse('HEAD', 10) + content = self.read_file('packaging/gbp-test-native.changes') + ok_(content[1].startswith('- [%s] ' % commit_id)) + + def test_option_changelog_revision(self): + """Test the --id-len cmdline option""" + self.init_test_repo('gbp-test-native') + + # Test invalid format (unknown field) + eq_(mock_ch(['--changelog-revision=%(unknown_field)s']), 1) + self._check_log(-1, 'gbp:error: Unable to construct revision field') + + # Test acceptable format + eq_(mock_ch(['--changelog-revision=foobar']), 0) + header = self.read_file('packaging/gbp-test-native.changes')[0] + ok_(re.match(r'.+ foobar$', header)) + + def test_option_commit(self): + """Test the --commit cmdline option""" + repo = self.init_test_repo('gbp-test') + + # Check unclean repo + with open('untracked-file', 'w') as fobj: + fobj.write('this file is not tracked\n') + with open('foo.txt', 'a') as fobj: + fobj.write('new stuff\n') + + # Unstaged file (foo.txt) -> failure + eq_(mock_ch(['--commit', '--since=HEAD^']), 1) + self._check_log(-1, 'gbp:error: Please commit or stage your changes') + + # Add file, update and commit, untracked file should be ignored + repo.add_files('foo.txt') + sha = repo.rev_parse('HEAD') + eq_(mock_ch(['--commit', '--since=HEAD^']), 0) + eq_(sha, repo.rev_parse('HEAD^')) + eq_(repo.get_commit_info('HEAD')['files'], + {'M': ['foo.txt', 'gbp-test.spec']}) + + def test_option_commit_msg(self): + """Test the --commit-msg cmdline option""" + repo = self.init_test_repo('gbp-test2') + + eq_(mock_ch(['--commit', '--since=HEAD^', '--commit-msg=Foo']), 0) + eq_(repo.get_commit_info('HEAD')['subject'], 'Foo') + + # Unknown key in format string causes failure + eq_(mock_ch(['--commit', '--since=HEAD^', '--commit-msg=%(foo)s']), 1) + self._check_log(-1, "gbp:error: Unknown key 'foo' in commit-msg string") + + def test_tagging(self): + """Test commiting/tagging""" + repo = self.init_test_repo('gbp-test-native') + + # Update and commit+tag + eq_(mock_ch(['--tag', '--packaging-tag=new-tag', '--since=HEAD^']), 0) + ok_(repo.has_tag('new-tag')) + sha = repo.rev_parse('HEAD') + eq_(sha, repo.rev_parse('new-tag^0')) + + # Should fail if the tag already exists + eq_(mock_ch(['--tag', '--packaging-tag=new-tag', '--since=HEAD^']), 1) + + # Update and commit+tag + eq_(mock_ch(['--tag', '--packaging-tag=new-tag', '--since=HEAD^', + '--retag']), 0) + ok_(repo.has_tag('new-tag')) + sha2 = repo.rev_parse('HEAD') + ok_(sha2 != sha) + eq_(sha2, repo.rev_parse('new-tag^0')) + + def test_tagging2(self): + """Test commiting/tagging spec file""" + repo = self.init_test_repo('gbp-test2') + + # Check unclean repo + with open('untracked-file', 'w') as fobj: + fobj.write('this file is not tracked\n') + with open('README', 'a') as fobj: + fobj.write('some new content\n') + + # Unstaged file (README) -> failure + eq_(mock_ch(['--tag', '--packaging-tag=new-tag', '--since=HEAD^']), 1) + self._check_log(-1, 'gbp:error: Please commit or stage your changes') + + # Add file, update and commit+tag, untracked file should be ignored + repo.add_files('README') + eq_(mock_ch(['--tag', '--packaging-tag=new-tag', '--since=HEAD^']), 0) + ok_(repo.has_tag('new-tag')) + sha = repo.rev_parse('HEAD') + eq_(sha, repo.rev_parse('new-tag^0')) + + def test_option_editor_cmd(self): + """Test the --editor-cmd and --spawn-editor cmdline options""" + repo = self.init_test_repo('gbp-test-native') + eq_(mock_ch(['--spawn-editor=release', '--editor-cmd=rm']), 0) + eq_(repo.status(), {' D': ['packaging/gbp-test-native.changes']}) + + repo.force_head('HEAD', hard=True) + ok_(repo.is_clean()) + + os.environ['EDITOR'] = 'rm' + eq_(mock_ch(['--spawn-editor=always', '--editor-cmd=']), + 0) + + def test_option_message(self): + """Test the --message cmdline option""" + self.init_test_repo('gbp-test-native') + orig_content = self.read_file('packaging/gbp-test-native.changes') + + eq_(mock_ch(['--message', 'my entry\nanother entry']), 0) + content = self.read_file('packaging/gbp-test-native.changes') + # Added header, two entries and a blank line + eq_(len(content), len(orig_content) + 4) + eq_(content[2], '- another entry\n') + + def test_user_customizations(self): + """Test the user customizations""" + repo = self.init_test_repo('gbp-test-native') + + # Non-existent customization file + eq_(mock_ch(['--customizations=customizations.py']), 1) + + # Create user customizations file + with open('customizations.py', 'w') as fobj: + fobj.write("class ChangelogEntryFormatter(object):\n") + fobj.write(" @classmethod\n") + fobj.write(" def compose(cls, commit_info, **kwargs):\n") + fobj.write(" return ['- %s' % commit_info['id']]\n") + + eq_(mock_ch(['--customizations=customizations.py']), 0) + entry = self.read_file('packaging/gbp-test-native.changes')[1] + sha = repo.rev_parse('HEAD') + eq_(entry, '- %s\n' % sha) + + def test_paths(self): + """Test tracking of certain paths only""" + repo = self.init_test_repo('gbp-test-native') + orig_content = self.read_file('packaging/gbp-test-native.changes') + + # Add new commit with known content + with open('new-file.txt', 'w') as fobj: + fobj.write('this is new content\n') + repo.add_files('new-file.txt') + repo.commit_staged('Add new file') + + # Only track a non-existent file + eq_(mock_ch(['--since=HEAD^', 'non-existent-path']), 0) + content = self.read_file('packaging/gbp-test-native.changes') + # New lines: header and one empty line, no entries + eq_(len(content), len(orig_content) + 2) + + # Track existing file + repo.force_head('HEAD', hard=True) + eq_(mock_ch(['--since=HEAD^', 'new-file.txt']), 0) + content = self.read_file('packaging/gbp-test-native.changes') + # New lines: header, one entry line and one empty line + eq_(len(content), len(orig_content) + 3) + + def test_commit_guessing(self): + """Basic tests for guessing the starting point""" + repo = self.init_test_repo('gbp-test-native') + + # Check 'tagname' that is not found + eq_(mock_ch(['--changelog-revision=%(tagname)s']), 0) + self._check_log(0, 'gbp:warning: Changelog points to tagname') + + # Check 'upstreamversion' and 'release' fields + repo.force_head('HEAD', hard=True) + eq_(mock_ch(['--changelog-revision=%(upstreamversion)s-%(release)s']), + 0) + + def test_commit_guessing_fail(self): + """Test for failure of start commit guessing""" + repo = self.init_test_repo('gbp-test-native') + + # Add "very old" header to changelog + with open('packaging/gbp-test-native.changes', 'w') as ch_fp: + ch_fp.write('* Sat Jan 01 2000 User 123\n- foo\n') + # rpm-ch should fail by not being able to find any commits before the + # last changelog section + eq_(mock_ch([]), 1) + self._check_log(-1, "gbp:error: Couldn't determine starting point") + diff --git a/tests/test_GitRepository.py b/tests/test_GitRepository.py index fed8b02d..1a79d527 100644 --- a/tests/test_GitRepository.py +++ b/tests/test_GitRepository.py @@ -68,6 +68,7 @@ def test_add_files(): - L{gbp.git.GitRepository.add_files} - L{gbp.git.GitRepository.commit_all} - L{gbp.git.GitRepository.is_clean} + - L{gbp.git.GitRepository.status} Properties tested: - L{gbp.git.GitRepository.head} @@ -80,7 +81,12 @@ def test_add_files(): False >>> repo.is_clean(ignore_untracked=True)[0] True + >>> repo.add_files('testfile', force=True, untracked=False) + >>> repo.status().items() + [('??', ['testfile'])] >>> repo.add_files(repo.path, force=True) + >>> repo.status().items() + [('A ', ['testfile'])] >>> repo.commit_all(msg="foo") >>> repo.is_clean()[0] True @@ -590,8 +596,9 @@ def test_clone(): >>> clone.get_merge_branch('bar') # None if no merge branch exists >>> clone.get_local_branches() ['bar', 'foo', 'master'] - >>> clone.get_remote_repos() - ['origin'] + >>> remotes = clone.get_remote_repos() + >>> {'origin': [repo_dir, repo_dir]} == remotes + True >>> clone.has_remote_repo('origin') True >>> clone.has_branch('origin/master', remote=True) @@ -696,7 +703,7 @@ def test_create_bare(): >>> bare = gbp.git.GitRepository.create(bare_dir, bare=True, description="msg") >>> bare.path == bare_dir True - >>> bare.git_dir[:-1] == bare_dir + >>> bare.git_dir == bare_dir True >>> type(bare) == gbp.git.GitRepository True @@ -717,7 +724,7 @@ def test_nonexistant(): >>> bare = gbp.git.GitRepository("/does/not/exist") Traceback (most recent call last): ... - GitRepositoryError: No Git repository at '/does/not/exist' + GitRepositoryError: No Git repository at '/does/not/exist' (or any parent dir) """ def test_create_noperm(): diff --git a/tests/test_PristineTar.py b/tests/test_PristineTar.py index 150dbcfb..addfa2ae 100644 --- a/tests/test_PristineTar.py +++ b/tests/test_PristineTar.py @@ -72,7 +72,7 @@ def test_create_tarball(): >>> import gbp.deb.git >>> repo = gbp.deb.git.DebianGitRepository(repo_dir) - >>> repo.archive('tar', 'upstream/', '../upstream_1.0.orig.tar', 'upstream') + >>> repo.archive('tar', 'upstream/', '%s/../upstream_1.0.orig.tar' % repo_dir, 'upstream') >>> gbp.command_wrappers.Command('gzip', [ '-n', '%s/../upstream_1.0.orig.tar' % repo_dir])() """ diff --git a/tests/test_import_orig.py b/tests/test_import_orig.py new file mode 100644 index 00000000..32ee82e1 --- /dev/null +++ b/tests/test_import_orig.py @@ -0,0 +1,194 @@ +# vim: set fileencoding=utf-8 : +# +# (C) 2013-2015 Intel Corporation +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, please see +# +"""Test import-orig functions""" +from . import context + +import glob +import os +import tarfile +import tempfile +# Try unittest2 for CentOS +try: + import unittest2 as unittest +except ImportError: + import unittest + +from gbp.errors import GbpError +from gbp.pkg import UpstreamSource +from gbp.scripts.common.import_orig import prepare_sources +from gbp.scripts.import_orig import find_source +from tests.testutils import ls_dir, ls_tar + + +class TestImportOrigBase(unittest.TestCase): + """Base class for handling context""" + @classmethod + def setup_class(cls): + """Class set-up, run only once""" + cls._tmpdir = str(context.new_tmpdir(__name__)) + + @classmethod + def teardown_class(cls): + """Class teardown, run only once""" + context.teardown() + + +class TestFindSource(TestImportOrigBase): + """Test the Debian-specific find_source() function""" + + def test_failure(self): + """Test failure modes""" + with self.assertRaisesRegexp(GbpError, + "More than one archive specified"): + find_source(False, ['too', 'much']) + + with self.assertRaisesRegexp(GbpError, + "No archive to import specified"): + find_source(False, []) + + with self.assertRaisesRegexp(GbpError, + "you can't pass both --uscan and a filename"): + find_source(True, ['tarball']) + + def test_success(self): + """Successfully get source archive""" + tar_fn = 'tarball.tar' + # Create dummy (empty) tarball + tarfile.open(tar_fn, 'w' ).close() + self.assertEqual(os.path.abspath(tar_fn), + find_source(False, [tar_fn]).path) + + +class TestPrepareSources(TestImportOrigBase): + """Test the prepare_sources() function""" + test_pkg_name = 'test' + test_pkg_ver = '1.0' + + @staticmethod + def _create_test_sources(destdir): + """Create dummy source archives""" + destdir = os.path.abspath(destdir) + origs = {} + + # "Normall" gzipped tarball + archive_fn = os.path.join(destdir, 'test-1.0.tar.gz') + src_dir = os.path.join(context.projectdir, 'gbp') + tarobj = tarfile.open(archive_fn, mode='w:gz') + for fname in (glob.glob('%s/*.py' % src_dir) + + glob.glob('%s/pkg/*.py' % src_dir)): + arcname = 'test-1.0/' + os.path.relpath(fname, src_dir) + tarobj.add(fname, arcname=arcname) + tarobj.close() + origs['tar'] = archive_fn + + # Unpacked sources + tarobj = tarfile.open(origs['tar'], 'r') + tarobj.extractall(destdir) + tarobj.close() + origs['dir'] = os.path.join(destdir,'test-1.0') + return origs + + @classmethod + def setup_class(cls): + """Class set-up, run only once""" + super(TestPrepareSources, cls).setup_class() + # Different source archives + cls._origs = cls._create_test_sources(cls._tmpdir) + + def test_dir(self): + """Basic test for unpacked sources, no filtering etc""" + tmpdir = tempfile.mkdtemp(dir=self._tmpdir, prefix='dir_basic_') + source = UpstreamSource(self._origs['dir']) + orig, prist = prepare_sources(source, 'test', '1.0', None, + None, False, None, tmpdir) + self.assertEqual(ls_dir(self._origs['dir']), ls_dir(orig)) + self.assertEqual(prist, '') + + def test_dir_filter(self): + """Test filtering of unpacked sources""" + tmpdir = tempfile.mkdtemp(dir=self._tmpdir, prefix='dir_filter_') + source = UpstreamSource(self._origs['dir']) + orig, prist = prepare_sources(source, 'test', '1.0', None, + ['pkg'], False, None, tmpdir) + orig_filt_ref = set([fname for fname in ls_dir(self._origs['dir']) + if not fname.startswith('pkg')]) + self.assertEqual(orig_filt_ref, ls_dir(orig)) + self.assertEqual(prist, '') + + def test_dir_pristine_nofilter(self): + """Test filtering of unpacked sources, not filtering pristine-tar""" + tmpdir = tempfile.mkdtemp(dir=self._tmpdir, prefix='dir_filter2_') + source = UpstreamSource(self._origs['dir']) + orig, prist = prepare_sources(source, 'test', '1.0', 'test.tar.gz', + ['pkg'], False, None, tmpdir) + src_ls = ls_dir(self._origs['dir']) + orig_filt_ref = set([fname for fname in src_ls + if not fname.startswith('pkg')]) + prist_ref = set(['test-1.0/%s' % fname for fname in src_ls] + + ['test-1.0']) + self.assertEqual(orig_filt_ref, ls_dir(orig)) + self.assertEqual(prist_ref, ls_tar(prist)) + + def test_dir_pristine_filter(self): + """Test filtering pristine-tar and mangling prefix""" + tmpdir = tempfile.mkdtemp(dir=self._tmpdir, prefix='dir_filter3_') + source = UpstreamSource(self._origs['dir']) + orig, prist = prepare_sources(source, 'test', '1.0', 'test.tar.gz', + ['pkg'], True, 'newpref', tmpdir) + src_ls = ls_dir(self._origs['dir']) + orig_filt_ref = set([fname for fname in src_ls + if not fname.startswith('pkg')]) + prist_ref = set(['newpref/%s' % fname for fname in orig_filt_ref] + + ['newpref']) + self.assertEqual(orig_filt_ref, ls_dir(orig)) + self.assertEqual(prist_ref, ls_tar(prist)) + + def test_tar(self): + """Basic test for tarball sources, with pristine-tar""" + tmpdir = tempfile.mkdtemp(dir=self._tmpdir, prefix='tar_basic_') + source = UpstreamSource(self._origs['tar']) + orig, prist = prepare_sources(source, 'test', '1.0', 'test.tgz', + None, False, 'test-1.0', tmpdir) + src_ls = ls_tar(self._origs['tar']) + orig_ref = set([fname.replace('test-1.0/', '') for fname in src_ls + if fname != 'test-1.0']) + self.assertEqual(orig_ref, ls_dir(orig)) + self.assertEqual(src_ls, ls_tar(prist)) + + def test_tar_pristine_prefix(self): + """Test tarball import with prefix mangling""" + tmpdir = tempfile.mkdtemp(dir=self._tmpdir, prefix='tar_prefix_') + source = UpstreamSource(self._origs['tar']) + _orig, prist = prepare_sources(source, 'test', '1.0', 'test.tgz', + None, False, 'np', tmpdir) + src_ls = ls_tar(self._origs['tar']) + prist_ref = set([fname.replace('test-1.0', 'np') for fname in src_ls]) + self.assertEqual(prist_ref, ls_tar(prist)) + + def test_tar_filter_pristine_prefix(self): + """Filter tarball, pristine-tar prefix mangling but not filter""" + tmpdir = tempfile.mkdtemp(dir=self._tmpdir, prefix='tar_filter_') + source = UpstreamSource(self._origs['tar']) + orig, prist = prepare_sources(source, 'test', '1.0', 'test.tgz', + ['pkg'], False, 'newp', tmpdir) + src_ls = ls_tar(self._origs['tar']) + orig_ref = set([fname.replace('test-1.0/', '') for fname in src_ls + if fname != 'test-1.0' and not fname.startswith('test-1.0/pkg')]) + prist_ref = set([fname.replace('test-1.0', 'newp') for fname in src_ls]) + self.assertEqual(orig_ref, ls_dir(orig)) + self.assertEqual(prist_ref, ls_tar(prist)) + diff --git a/tests/test_rpm_changelog.py b/tests/test_rpm_changelog.py new file mode 100644 index 00000000..b31824ab --- /dev/null +++ b/tests/test_rpm_changelog.py @@ -0,0 +1,226 @@ +# vim: set fileencoding=utf-8 : +# +# (C) 2014-2015 Intel Corporation +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, please see +# +"""Test RPM changelog classes and parsing""" + +from datetime import datetime +from nose.tools import assert_raises, eq_, ok_ # pylint: disable=E0611 +from tempfile import NamedTemporaryFile + +from gbp.rpm.changelog import _ChangelogHeader, _ChangelogEntry +from gbp.rpm.changelog import _ChangelogSection, Changelog +from gbp.rpm.changelog import ChangelogParser, ChangelogError +from gbp.rpm.policy import RpmPkgPolicy + + +class TestChangelogHeader(object): + """Test the _ChangelogHeader class""" + + def test_str_format(self): + """Basic test for header""" + time = datetime(2014, 01, 29, 12, 13, 14) + header = _ChangelogHeader(RpmPkgPolicy, time, name="John Doe", + email="user@host.com", revision="1") + eq_(str(header), "* Wed Jan 29 2014 John Doe 1\n") + + def test_str_format_err(self): + """Test missing properties""" + time = datetime(2014, 01, 29, 12, 13, 14) + header = _ChangelogHeader(RpmPkgPolicy, time, name="John", revision="1") + with assert_raises(ChangelogError): + str(header) + + def test_container(self): + """Test the container methods of the class""" + header = _ChangelogHeader(RpmPkgPolicy, datetime(2014, 1, 1), name="N", + revision="1") + # Test __getitem__() + eq_(header['name'], "N") + eq_(header['email'], None) + # Test __contains__() + ok_('name' in header) + ok_('foo' not in header) + + +class TestChangelogEntry(object): + """Test the _ChangelogEntry class""" + + def test_str_format(self): + """Basic test""" + entry = _ChangelogEntry(RpmPkgPolicy, author="John Doe", + text="- foo\n bar") + eq_(str(entry), "- foo\n bar\n") + + +class TestChangelogSection(object): + """Test the _ChangelogSection class""" + + def setup(self): + """Initialize test""" + time = datetime(2014, 01, 29, 12, 13, 14) + self.default_sect = _ChangelogSection(RpmPkgPolicy, time, name="J. D.", + email="u@h", revision="1") + entry = _ChangelogEntry(RpmPkgPolicy, "J. D.", "- my change") + self.default_sect.entries = [entry] + + def test_str_format(self): + """Basic test""" + section = self.default_sect + eq_(str(section), "* Wed Jan 29 2014 J. D. 1\n- my change\n\n") + + def test_append_entry(self): + """Test add_entry() method""" + section = self.default_sect + entry = _ChangelogEntry(RpmPkgPolicy, author="", + text="- another\n change") + new_entry = section.append_entry(entry) + eq_(str(section), "* Wed Jan 29 2014 J. D. 1\n- my change\n" + "- another\n change\n\n") + eq_(new_entry, section.entries[-1]) + + + def test_set_header(self): + """Test set_header() method""" + section = self.default_sect + time = datetime(2014, 01, 30) + section.set_header(time=time, name="Jane", email="u@h", revision="1.1") + eq_(str(section), "* Thu Jan 30 2014 Jane 1.1\n- my change\n\n") + +class TestChangelogParser(object): + """Test the default changelog parser""" + + cl_default_style = """\ +* Wed Jan 29 2014 Markus Lehtonen 0.3-1 +- Version bump +- Drop foo.patch + +* Tue Jan 28 2014 Markus Lehtonen 0.2 +- Update to 0.2 + +* Mon Jan 27 2014 Markus Lehtonen 0.1 +- Initial version +""" + cl_with_authors = """\ +* Wed Jan 29 2014 Markus Lehtonen 0.3-1 +[Markus Lehtonen] +- Version bump +[John Doe] +- Bug fix +""" + # Invalid timestamp / name + cl_broken_header_1 = """\ +* Wed Jan 29 2014Markus Lehtonen 0.3-1 +- Version bump +""" + # Whitespace before the asterisk in the header + cl_broken_header_2 = """\ + * Wed Jan 29 2014 Markus Lehtonen 0.3-1 +- Version bump +""" + # Invalid timestamp + cl_broken_header_3 = """\ +* Wed Jan 32 2014 Markus Lehtonen 0.3-1 +- Version bump +""" + # Missing email + cl_broken_header_4 = """\ +* Wed Jan 29 2014 Markus Lehtonen 0.3-1 +- Version bump +""" + # Garbage before section header + cl_broken_header_5 = """\ +---garbage--- +* Wed Jan 29 2014 Markus Lehtonen 0.3-1 +- Version bump +""" + + parser = ChangelogParser(RpmPkgPolicy) + + def test_parse_changelog(self): + """Basic tests for successful parsing""" + # Raw parsing of changelog + changelog = self.parser.raw_parse_string(self.cl_default_style) + eq_(len(changelog.sections), 3) + + # Check that re-creating the changelog doesn't mangle it + eq_(str(changelog), self.cl_default_style) + + # Parse and check section + section = self.parser.parse_section(changelog.sections[0]) + + eq_(section.header['time'], datetime(2014, 1, 29)) + eq_(section.header['name'], "Markus Lehtonen") + eq_(section.header['email'], "markus.lehtonen@linux.intel.com") + eq_(section.header['revision'], "0.3-1") + + # Check that re-creating section doesn't mangle it + eq_(str(section), changelog.sections[0]) + + def test_parse_authors(self): + """Test parsing of authors from changelog entries""" + section = self.parser.parse_section(self.cl_with_authors) + eq_(section.entries[0].author, "Markus Lehtonen") + eq_(section.entries[1].author, "John Doe") + + def test_parse_changelog_file(self): + """Basic tests for parsing a file""" + # Create file and parse it + tmpfile = NamedTemporaryFile() + tmpfile.write(self.cl_default_style) + tmpfile.file.flush() + changelog = self.parser.raw_parse_file(tmpfile.name) + # Check parsing results + eq_(len(changelog.sections), 3) + eq_(str(changelog), self.cl_default_style) + # Cleanup + tmpfile.close() + + def test_parse_section_fail(self): + """Basic tests for failures of changelog section parsing""" + with assert_raises(ChangelogError): + self.parser.parse_section(self.cl_broken_header_1) + + with assert_raises(ChangelogError): + self.parser.parse_section(self.cl_broken_header_2) + + with assert_raises(ChangelogError): + self.parser.parse_section(self.cl_broken_header_3) + + with assert_raises(ChangelogError): + self.parser.parse_section(self.cl_broken_header_4) + + def test_parse_changelog_fail(self): + """Basic tests for changelog parsing failures""" + with assert_raises(ChangelogError): + self.parser.raw_parse_string(self.cl_broken_header_5) + + +class TestChangelog(object): + """Unit tests for the Changelog class""" + + def basic_test(self): + """Test basic initialization""" + changelog = Changelog(RpmPkgPolicy) + eq_(str(changelog), "") + + def test_add_section(self): + """Test the add_section() method""" + changelog = Changelog(RpmPkgPolicy) + time = datetime(2014, 01, 30) + new_section = changelog.add_section(time=time, name="Jane Doe", + email="j@doe.com", revision="1.2") + eq_(str(changelog), "* Thu Jan 30 2014 Jane Doe 1.2\n\n") + eq_(new_section, changelog.sections[0]) diff --git a/tests/testutils/debiangittestrepo.py b/tests/testutils/debiangittestrepo.py index 61456f1e..cec45fbe 100644 --- a/tests/testutils/debiangittestrepo.py +++ b/tests/testutils/debiangittestrepo.py @@ -3,7 +3,11 @@ from .. import context import os -import unittest +# Try unittest2 for CentOS +try: + import unittest2 as unittest +except ImportError: + import unittest import gbp.deb.git diff --git a/tests/testutils/gbplogtester.py b/tests/testutils/gbplogtester.py index 0c951992..9182eed5 100644 --- a/tests/testutils/gbplogtester.py +++ b/tests/testutils/gbplogtester.py @@ -10,6 +10,12 @@ class GbpLogTester(object): """ Helper class for tests that need to capture logging output """ + @classmethod + def init_class(cls): + """Test class setup""" + # Initialize gbp logging module + gbp.log.initialize() + def __init__(self): """Object initialization""" self._log = None