Home Home > GIT Browse > SLE12-SP3-AZURE
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMichal Suchanek <msuchanek@suse.de>2019-01-16 11:14:37 +0100
committerMichal Suchanek <msuchanek@suse.de>2019-01-16 11:14:37 +0100
commit67e7ae95e7780590054db8b1b019772f6363f672 (patch)
tree81fb554de0f98f54caadd498944825323c735c04
parent896d0f8108f9c58ec97735ed5c06242c819cd3c9 (diff)
parent70993c128b6fbbe5e47a9757e14d60ffa7843668 (diff)
Merge branch 'scripts' into cve/linux-4.4
-rw-r--r--README37
-rw-r--r--scripts/README.scripts4
-rwxr-xr-xscripts/bs-upload-kernel7
-rwxr-xr-xscripts/bugzilla-cli18
-rwxr-xr-xscripts/bugzilla-create189
-rwxr-xr-xscripts/bugzilla-resolve24
-rw-r--r--scripts/bugzilla/__init__.py39
-rw-r--r--scripts/bugzilla/_cli.py1181
-rw-r--r--scripts/bugzilla/apiversion.py11
-rw-r--r--scripts/bugzilla/base.py1856
-rw-r--r--scripts/bugzilla/bug.py450
-rw-r--r--scripts/bugzilla/oldclasses.py23
-rw-r--r--scripts/bugzilla/rhbugzilla.py352
-rw-r--r--scripts/bugzilla/transport.py196
-rwxr-xr-xscripts/check-patch-dirs73
-rwxr-xr-xscripts/check-patchfmt13
-rwxr-xr-xscripts/cvs-wd-timestamp2
-rwxr-xr-x[-rw-r--r--]scripts/git-pre-commit13
-rw-r--r--scripts/git_sort/README.md362
-rwxr-xr-xscripts/git_sort/check_missing_fixes.sh63
-rw-r--r--scripts/git_sort/clean_conflicts.awk47
-rwxr-xr-xscripts/git_sort/clean_header.sh384
-rw-r--r--scripts/git_sort/exc.py30
-rwxr-xr-xscripts/git_sort/git_sort.py734
-rw-r--r--scripts/git_sort/lib.py550
-rw-r--r--scripts/git_sort/lib.sh181
-rw-r--r--scripts/git_sort/lib_from.sh39
-rw-r--r--scripts/git_sort/lib_tag.sh510
-rwxr-xr-xscripts/git_sort/merge_tool.py149
-rw-r--r--scripts/git_sort/patch.py123
-rw-r--r--scripts/git_sort/patch_body.awk28
-rw-r--r--scripts/git_sort/patch_header.awk29
-rwxr-xr-xscripts/git_sort/pre-commit.sh97
-rwxr-xr-xscripts/git_sort/qcp.py138
-rwxr-xr-xscripts/git_sort/qdupcheck.py76
-rwxr-xr-xscripts/git_sort/qgoto.py62
-rw-r--r--scripts/git_sort/quilt-mode.sh412
-rw-r--r--scripts/git_sort/quiltrc.qf11
-rwxr-xr-xscripts/git_sort/refs_in_series.sh103
-rwxr-xr-xscripts/git_sort/sequence-insert.py51
-rwxr-xr-xscripts/git_sort/series_conf.py169
-rwxr-xr-xscripts/git_sort/series_insert.py143
-rwxr-xr-xscripts/git_sort/series_sort.py149
-rw-r--r--scripts/git_sort/tests/__init__.py0
-rw-r--r--scripts/git_sort/tests/opensuse-15.0/Dockerfile25
-rw-r--r--scripts/git_sort/tests/opensuse-42.3/Dockerfile25
-rw-r--r--scripts/git_sort/tests/opensuse-tumbleweed/Dockerfile22
-rwxr-xr-xscripts/git_sort/tests/run_all.sh27
-rw-r--r--scripts/git_sort/tests/sle12-sp2/Dockerfile24
-rw-r--r--scripts/git_sort/tests/sle12-sp3/Dockerfile25
-rw-r--r--scripts/git_sort/tests/sle15/Dockerfile25
-rw-r--r--scripts/git_sort/tests/support.py122
-rwxr-xr-xscripts/git_sort/tests/test_git_sort.py388
-rwxr-xr-xscripts/git_sort/tests/test_quilt_mode.py726
-rwxr-xr-xscripts/git_sort/tests/test_series_insert.py118
-rwxr-xr-xscripts/git_sort/tests/test_series_sort.py931
-rwxr-xr-xscripts/git_sort/update_clone.py116
-rwxr-xr-xscripts/gitlog2changes17
-rwxr-xr-xscripts/install-git-hooks6
-rwxr-xr-xscripts/ld-version.sh11
-rw-r--r--scripts/lib/SUSE/MyBS.pm23
-rwxr-xr-xscripts/linux_git.sh16
-rwxr-xr-xscripts/log2165
-rwxr-xr-xscripts/osc_wrapper26
-rw-r--r--scripts/patch-tag-template2
-rwxr-xr-xscripts/patch-tags-from-git8
-rwxr-xr-xscripts/python/check-patchhdr26
-rwxr-xr-xscripts/python/suse_git/header.py12
-rw-r--r--scripts/python/suse_git/patch.py4
-rw-r--r--scripts/python/test-all.sh2
-rwxr-xr-xscripts/python/tests/test_header.py321
-rwxr-xr-xscripts/run_oldconfig.sh44
-rwxr-xr-xscripts/sequence-patch.sh72
-rwxr-xr-xscripts/series2git4
l---------scripts/series_sort.py1
-rwxr-xr-xscripts/stableids170
-rwxr-xr-xscripts/stableup229
-rwxr-xr-xscripts/tar-up.sh10
-rw-r--r--scripts/tests/lib.py9
-rwxr-xr-xscripts/tests/test_linux_git.py53
-rwxr-xr-xscripts/tests/test_log2.py571
-rw-r--r--scripts/wd-functions.sh16
82 files changed, 13062 insertions, 448 deletions
diff --git a/README b/README
index 00cb1062ab..d7a29d1739 100644
--- a/README
+++ b/README
@@ -105,13 +105,13 @@ what the patch does, who wrote it, and who inside SUSE/Novell we'll
Patch-mainline: Never, <reason>
- * The patch should include a References: tag that identifies the
+ * The patch should include a References: tag that identifies the
Bugzilla bug number, FATE entry, etc. where the patch is discussed.
Please prefix bugzilla.novell.com bug numbers with bnc# and fate
feature numbers with fate#. Have a look at
http://en.opensuse.org/openSUSE:Packaging_Patches_guidelines#Current_set_of_abbreviations
for a full list of abbreviations.
-
+
* The patch header may (and often, should) include a more extensive
description of what the patch does, why, and how. The idea is to
allow others to quickly identify what each patch is about, and to
@@ -136,24 +136,39 @@ Example patch header:
| From: Peter Leckie <pleckie@sgi.com>
| References: SGI:PV986789 bnc#482148
| Subject: Clean up dquot pincount code
- | Patch-mainline: 2.6.28
- |
+ | Patch-mainline: v2.6.28
+ |
| Clean up dquot pincount code.
- |
+ |
| This is a code cleanup and optimization that removes a per mount point
| spinlock from the quota code and cleans up the code.
- |
+ |
| The patch changes the pincount from being an int protected by a spinlock
| to an atomic_t allowing the pincount to be manipulated without holding
| the spinlock.
- |
+ |
| This cleanup also protects against random wakup's of both the aild and
| xfssyncd by reevaluating the pincount after been woken. Two latter patches
| will address the Spurious wakeups.
- |
+ |
| Signed-off-by: Peter Leckie <pleckie@sgi.com>
| Acked-by: Jan Kara <jack@suse.cz>
+Patch sorting
+=============
+
+Patches added to the "sorted patches" section of series.conf must be sorted
+according to the upstream order of the commit that they backport.
+
+After you've added a patch file to one of the patches.* directory and added the
+required tags described in the section "Patch Headers", run
+
+ $ ./scripts/git_sort/series_insert.py <patch>
+
+to insert an entry for a new patch file to the sorted section of series.conf.
+
+For more information, please read "scripts/git_sort/README.md".
+
Before You Commit -- Things To Check
====================================
@@ -220,10 +235,10 @@ example:
| -------------------------------------------------------------------
| Wed Dec 1 18:29:44 CET 2004 - agruen@suse.de
- |
+ |
| - patches.fixes/serialize-dgram-read.diff: Serialize dgram read
| using semaphore just like stream (#48427).
- |
+ |
There is a simple helper script for creating changelog entries in this
format (/work/src/bin/vc).
@@ -281,7 +296,7 @@ During kernel builds, two things related to the kernel ABI happen:
are added, but all previous symbols are still available: in this
case, all modules will continue to load into the new kernel just
fine.
-
+
If a reference symsets file (/boot/symsets-* in kernel-$FLAVOR
packages) for the particular architecture and flavor is available, we
check which of the symbol sets in the reference file can still be
diff --git a/scripts/README.scripts b/scripts/README.scripts
index 99be006f54..0b497dcd0c 100644
--- a/scripts/README.scripts
+++ b/scripts/README.scripts
@@ -59,6 +59,10 @@ install-git-hooks
lib
- Perl module path
+linux-git.sh
+ - Determine the location of the mainline linux git repository to use as a
+ reference by other scripts.
+
list-symsets
- Helper installed by install-git-hooks. Only needed in old branches
that have symsets.
diff --git a/scripts/bs-upload-kernel b/scripts/bs-upload-kernel
index 12a80d912e..b592cc85a7 100755
--- a/scripts/bs-upload-kernel
+++ b/scripts/bs-upload-kernel
@@ -51,7 +51,7 @@ my ($dir, $project, $package) = @ARGV[0..2];
$project =~ s/\//:/g;
if (!$package) {
($package = $dir) =~ s:/+\.?$::;
- $package =~ s:.*/::;
+ $package =~ s@.*/@@;
if (! -e "$dir/$package.spec") {
opendir(my $dh, $dir) or die;
my @specfiles = sort grep { /\.spec$/ } readdir($dh);
@@ -99,10 +99,7 @@ if (!@limit_packages && !$BS_SUFFIX) {
}
my @add_packages = ();
-if ($repos{""} !~ /^SUSE:SLE-9\b/) {
- push(@add_packages, "icecream");
-}
-my @macros = ("%opensuse_bs 1", "%is_kotd 1");
+my @macros = ("%is_kotd 1");
if ($ignore_kabi) {
push(@macros, "%ignore_kabi_badness 1");
}
diff --git a/scripts/bugzilla-cli b/scripts/bugzilla-cli
new file mode 100755
index 0000000000..c06c04b6dc
--- /dev/null
+++ b/scripts/bugzilla-cli
@@ -0,0 +1,18 @@
+#!/usr/bin/env python3
+
+# This is a small wrapper script to simplify running the 'bugzilla'
+# cli tool from a git checkout
+
+import sys
+import os
+import os.path
+scriptdir = os.path.dirname(sys.argv[0])
+if scriptdir[0] != '/':
+ scriptdir = "{}/{}/bugzilla".format(os.getcwd(), scriptdir)
+sys.path.insert(0, scriptdir)
+
+from bugzilla import _cli
+if _cli.DEFAULT_BZ != "https://apibugzilla.suse.com/xmlrpc.cgi":
+ raise RuntimeError("Use of this script requires the SUSE version of python-bugzilla.")
+
+_cli.main()
diff --git a/scripts/bugzilla-create b/scripts/bugzilla-create
new file mode 100755
index 0000000000..6036c60cd9
--- /dev/null
+++ b/scripts/bugzilla-create
@@ -0,0 +1,189 @@
+#!/bin/bash
+
+URL="https://apibugzilla.suse.com/xmlrpc.cgi"
+COMPONENT="Kernel"
+COMMENT="This is an automated report for a proactive fix, documented below."
+
+handle_one_patch() {
+ FILE="$1"
+ SUBJ=$(formail -x Subject < "${FILE}")
+ BODY="$(formail -I "" < "${FILE}" | awk '/^---/ { exit; }; { print; } ')"
+
+ # Use the last available version
+ ${BUGZILLA} info -v "${PRODUCT}" > ${tmpdir}/version
+
+ if [ $? -ne 0 ]; then
+ echo "Could not get versions for ${PRODUCT}." >&2
+ echo "This sometimes produces weird connection aborted errors." >&2
+ cat ${tmpdir}/version >&2
+ exit 1
+ fi
+
+ VERSION=$(grep -v unspecified ${tmpdir}/version | tail -1)
+
+ ${BUGZILLA} new -p "${PRODUCT}" -c "${COMPONENT}" -a "${EMAIL}" \
+ ${QA_EMAIL} -t "${SUBJ}" -v "${VERSION}" \
+ --comment "${COMMENT}" -s "CONFIRMED" \
+ --no-refresh --ids > ${tmpdir}/bugid
+
+ if [ $? -ne 0 ]; then
+ echo "Bug creation failed for "${FILE}"." >&2
+ cat ${tmpdir}/bugid >&2
+ exit 1
+ fi
+
+ read BUGID < ${tmpdir}/bugid
+
+ # If we didn't get just a bug number, we got an error
+ if [ -z "${BUGID}" -o "${BUGID}" != "${BUGID%[^0-9]}" ]; then
+ echo "Bug creation failed for ${FILE}; Errors follow." >&2
+ cat ${tmpdir}/bugid >&2
+ exit 1
+ fi
+
+ OLDREFS=$(grep "^References:" "${FILE}"|sed -e 's#References: *##')
+ if [ -n "${OLDREFS}" ]; then
+ OLDREFS="${OLDREFS} "
+ fi
+
+ REFERENCE="References: ${OLDREFS}bsc#${BUGID}"
+ if [ -n "${MORE_REFERENCES}" ]; then
+ REFERENCE="${REFERENCE} ${MORE_REFERENCES}"
+ fi
+
+ # Update the references header before uploading
+ FILENAME="$(basename "${FILE}")"
+ grep -v "^References:" "${FILE}" | formail -f -I "${REFERENCE}" \
+ > "${tmpdir}/${FILENAME}"
+ if [ $? -eq 0 ]; then
+ mv "${tmpdir}/${FILENAME}" "${FILE}"
+ fi
+
+ if [ "${SUBJ}" = "${SUBJ/\[PATCH}" ]; then
+ SUBJ="[PATCH] ${SUBJ}"
+ fi
+
+ ${BUGZILLA} attach --file="${FILE}" -d "${SUBJ}" -t "text/x-patch" \
+ --comment "${BODY}" ${BUGID}
+
+ echo "Filed report ${BUGID} for ${FILE}"
+}
+
+usage () {
+cat <<END
+usage: $(basename $0) [options...] PATCH [PATCH ...]
+Options:
+ -e | --email <address>
+ * Email address to which this report will be assigned
+ -p | --product <product>
+ * Bugzilla product to fail this report against
+ -d | --debug
+ * Enable debugging output (also assigns QA contact as you)
+ -r | --reference
+ * Any additional references to assign to this report,
+ e.g. FATE#123456
+ -h | --help
+ * Print this message
+
+Notes:
+ * If no email address specified, the username component of git-config
+ user.email will be used with @suse.com appended.
+ * If no product is specified, the \$BUGZILLA_PRODUCT variable in
+ rpm/config.sh will be used, if present.
+ * If the patch already contains references, they will be preserved with
+ the bugzilla ID created and any additional references specified with
+ --reference appended. Otherwise, a new References tag will be created.
+ * The patch will be updated with new references prior to upload.
+ * Use of this script requires a ~/.bugzillarc with the following contents:
+ [apibugzilla.suse.com]
+ user = <bugzilla username>
+ password = <bugzilla password>
+ authtype = basic
+END
+ exit $1
+}
+
+# Defaults
+EMAIL="$(git config user.email)"
+if [ "${EMAIL}" != "${EMAIL%%*@suse.com}" ]; then
+ EMAIL="${EMAIL%%@*}@suse.com"
+fi
+
+. $(dirname $0)/../rpm/config.sh
+PRODUCT="${BUGZILLA_PRODUCT}"
+
+OPTS="$(getopt -o e:p:dr:h --long email:,product:,debug,reference: -n 'bugzilla-upload' -- "$@")"
+
+if [ $? -ne 0 ]; then
+ echo "" >&2
+ usage 1
+fi
+
+eval set -- "${OPTS}"
+
+while true; do
+ case "$1" in
+ -e | --email)
+ EMAIL="$2" ; shift 2 ;;
+ -p | --product)
+ PRODUCT="$2" ; shift 2 ;;
+ -d | --debug)
+ DEBUG=true; shift ;;
+ -r | --reference)
+ MORE_REFERENCES="${MORE_REFERENCES} $2" ; shift 2 ;;
+ -h | --help)
+ usage 0 ;;
+ --)
+ shift ; break ;;
+ *)
+ break ;;
+ esac
+done
+
+if [ "$#" -eq 0 ]; then
+ echo "" >&2
+ usage 1
+fi
+
+DEBUG=true
+
+if [ -z "${PRODUCT}" ]; then
+ cat <<-END >&2
+ ERROR: No product specified.
+
+ Product may be specified via -p or rpm/config.sh:\$BUGZILLA_PRODUCT
+ END
+ exit 1
+fi
+
+if [ -z "${EMAIL}" ]; then
+ cat <<-END >&2
+ ERROR: No assignee email address specified.
+
+ Email may be specified via -e or git-config user.email
+ END
+fi
+
+# Don't spam QA while testing
+if [ -n "${DEBUG}" ]; then
+ QA_EMAIL="-q ${EMAIL}"
+fi
+
+DIR="$(realpath "$(dirname "$0")")"
+if [ ! -e "${DIR}/bugzilla-cli" ]; then
+ echo "Missing ${DIR}/bugzilla-cli" >&2
+ exit 1
+fi
+
+BUGZILLA="${DIR}/bugzilla-cli --bugzilla ${URL} --ensure-logged-in"
+
+cleanup () {
+ rm -rf ${tmpdir}
+}
+
+trap cleanup EXIT
+tmpdir=$(mktemp -d /tmp/bugzilla.XXXXXX)
+
+for patch in "$@"; do
+ handle_one_patch "${patch}"
+done
diff --git a/scripts/bugzilla-resolve b/scripts/bugzilla-resolve
new file mode 100755
index 0000000000..7fd62374ec
--- /dev/null
+++ b/scripts/bugzilla-resolve
@@ -0,0 +1,24 @@
+#!/bin/bash
+
+URL="https://apibugzilla.suse.com/xmlrpc.cgi"
+
+resolve_one() {
+ ${BUGZILLA} modify -l "Automated update: This patch was committed to the kernel git repository. Closing as FIXED." -k FIXED $1
+}
+
+DIR="$(realpath $(dirname $0))"
+if [ ! -e "${DIR}/bugzilla-cli" ]; then
+ echo "Missing ${DIR}/bugzilla-cli" >&2
+ exit 1
+fi
+
+BUGZILLA="${DIR}/bugzilla-cli --bugzilla ${URL} --ensure-logged-in"
+
+if [ "$#" -eq 0 ]; then
+ echo "usage: $(basename $0) <bug id> [bug ids ...]" >&2
+ exit 1
+fi
+
+for bugid in "$@"; do
+ resolve_one ${bugid}
+done
diff --git a/scripts/bugzilla/__init__.py b/scripts/bugzilla/__init__.py
new file mode 100644
index 0000000000..74f5514856
--- /dev/null
+++ b/scripts/bugzilla/__init__.py
@@ -0,0 +1,39 @@
+# python-bugzilla - a Python interface to bugzilla using xmlrpclib.
+#
+# Copyright (C) 2007, 2008 Red Hat Inc.
+# Author: Will Woods <wwoods@redhat.com>
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version. See http://www.gnu.org/copyleft/gpl.html for
+# the full text of the license.
+
+from .apiversion import version, __version__
+from .base import Bugzilla
+from .transport import BugzillaError
+from .rhbugzilla import RHBugzilla
+from .oldclasses import (Bugzilla3, Bugzilla32, Bugzilla34, Bugzilla36,
+ Bugzilla4, Bugzilla42, Bugzilla44,
+ NovellBugzilla, RHBugzilla3, RHBugzilla4)
+
+
+# This is the public API. If you are explicitly instantiating any other
+# class, using some function, or poking into internal files, don't complain
+# if things break on you.
+__all__ = [
+ "Bugzilla3", "Bugzilla32", "Bugzilla34", "Bugzilla36",
+ "Bugzilla4", "Bugzilla42", "Bugzilla44",
+ "NovellBugzilla",
+ "RHBugzilla3", "RHBugzilla4", "RHBugzilla",
+ 'BugzillaError',
+ 'Bugzilla', "version",
+]
+
+
+# Clear all other locals() from the public API
+for __sym in locals().copy():
+ if __sym.startswith("__") or __sym in __all__:
+ continue
+ locals().pop(__sym)
+locals().pop("__sym")
diff --git a/scripts/bugzilla/_cli.py b/scripts/bugzilla/_cli.py
new file mode 100644
index 0000000000..a0a6d8302b
--- /dev/null
+++ b/scripts/bugzilla/_cli.py
@@ -0,0 +1,1181 @@
+#!/usr/bin/env python3
+#
+# bugzilla - a commandline frontend for the python bugzilla module
+#
+# Copyright (C) 2007-2017 Red Hat Inc.
+# Author: Will Woods <wwoods@redhat.com>
+# Author: Cole Robinson <crobinso@redhat.com>
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version. See http://www.gnu.org/copyleft/gpl.html for
+# the full text of the license.
+
+from __future__ import print_function
+
+import locale
+from logging import getLogger, DEBUG, INFO, WARN, StreamHandler, Formatter
+import argparse
+import os
+import re
+import socket
+import sys
+import tempfile
+
+# pylint: disable=import-error
+if sys.version_info[0] >= 3:
+ # pylint: disable=no-name-in-module,redefined-builtin
+ from xmlrpc.client import Fault, ProtocolError
+ from urllib.parse import urlparse
+ basestring = (str, bytes)
+else:
+ from xmlrpclib import Fault, ProtocolError
+ from urlparse import urlparse
+# pylint: enable=import-error
+
+import requests.exceptions
+
+import bugzilla
+
+DEFAULT_BZ = 'https://apibugzilla.suse.com/xmlrpc.cgi'
+
+format_field_re = re.compile("%{([a-z0-9_]+)(?::([^}]*))?}")
+
+log = getLogger(bugzilla.__name__)
+
+
+################
+# Util helpers #
+################
+
+def _is_unittest():
+ return bool(os.getenv("__BUGZILLA_UNITTEST"))
+
+
+def _is_unittest_debug():
+ return bool(os.getenv("__BUGZILLA_UNITTEST_DEBUG"))
+
+
+def to_encoding(ustring):
+ string = ''
+ if isinstance(ustring, basestring):
+ string = ustring
+ elif ustring is not None:
+ string = str(ustring)
+
+ if sys.version_info[0] >= 3:
+ return string
+
+ preferred = locale.getpreferredencoding()
+ if _is_unittest():
+ preferred = "UTF-8"
+ return string.encode(preferred, 'replace')
+
+
+def open_without_clobber(name, *args):
+ '''Try to open the given file with the given mode; if that filename exists,
+ try "name.1", "name.2", etc. until we find an unused filename.'''
+ fd = None
+ count = 1
+ orig_name = name
+ while fd is None:
+ try:
+ fd = os.open(name, os.O_CREAT | os.O_EXCL, 0o666)
+ except OSError as err:
+ if err.errno == os.errno.EEXIST:
+ name = "%s.%i" % (orig_name, count)
+ count += 1
+ else:
+ raise IOError(err.errno, err.strerror, err.filename)
+ fobj = open(name, *args)
+ if fd != fobj.fileno():
+ os.close(fd)
+ return fobj
+
+
+def get_default_url():
+ """
+ Grab a default URL from bugzillarc [DEFAULT] url=X
+ """
+ from bugzilla.base import _open_bugzillarc
+ cfg = _open_bugzillarc()
+ if cfg:
+ cfgurl = cfg.defaults().get("url", None)
+ if cfgurl is not None:
+ log.debug("bugzillarc: found cli url=%s", cfgurl)
+ return cfgurl
+ return DEFAULT_BZ
+
+
+def setup_logging(debug, verbose):
+ handler = StreamHandler(sys.stderr)
+ handler.setFormatter(Formatter(
+ "[%(asctime)s] %(levelname)s (%(module)s:%(lineno)d) %(message)s",
+ "%H:%M:%S"))
+ log.addHandler(handler)
+
+ if debug:
+ log.setLevel(DEBUG)
+ elif verbose:
+ log.setLevel(INFO)
+ else:
+ log.setLevel(WARN)
+
+ if _is_unittest_debug():
+ log.setLevel(DEBUG)
+
+
+##################
+# Option parsing #
+##################
+
+def _setup_root_parser():
+ epilog = 'Try "bugzilla COMMAND --help" for command-specific help.'
+ p = argparse.ArgumentParser(epilog=epilog)
+
+ default_url = get_default_url()
+
+ # General bugzilla connection options
+ p.add_argument('--bugzilla', default=default_url,
+ help="bugzilla XMLRPC URI. default: %s" % default_url)
+ p.add_argument("--nosslverify", dest="sslverify",
+ action="store_false", default=True,
+ help="Don't error on invalid bugzilla SSL certificate")
+ p.add_argument('--cert',
+ help="client side certificate file needed by the webserver")
+
+ p.add_argument('--login', action="store_true",
+ help='Run interactive "login" before performing the '
+ 'specified command.')
+ p.add_argument('--username', help="Log in with this username")
+ p.add_argument('--password', help="Log in with this password")
+
+ p.add_argument('--ensure-logged-in', action="store_true",
+ help="Raise an error if we aren't logged in to bugzilla. "
+ "Consider using this if you are depending on "
+ "cached credentials, to ensure that when they expire the "
+ "tool errors, rather than subtly change output.")
+ p.add_argument('--no-cache-credentials',
+ action='store_false', default=True, dest='cache_credentials',
+ help="Don't save any bugzilla cookies or tokens to disk, and "
+ "don't use any pre-existing credentials.")
+
+ p.add_argument('--cookiefile', default=None,
+ help="cookie file to use for bugzilla authentication")
+ p.add_argument('--tokenfile', default=None,
+ help="token file to use for bugzilla authentication")
+
+ p.add_argument('--verbose', action='store_true',
+ help="give more info about what's going on")
+ p.add_argument('--debug', action='store_true',
+ help="output bunches of debugging info")
+ p.add_argument('--version', action='version',
+ version=bugzilla.__version__)
+
+ # Allow user to specify BZClass to initialize. Kinda weird for the
+ # CLI, I'd rather people file bugs about this so we can fix our detection.
+ # So hide it from the help output but keep it for back compat
+ p.add_argument('--bztype', default='auto', help=argparse.SUPPRESS)
+
+ return p
+
+
+def _parser_add_output_options(p):
+ outg = p.add_argument_group("Output format options")
+ outg.add_argument('--full', action='store_const', dest='output',
+ const='full', default='normal',
+ help="output detailed bug info")
+ outg.add_argument('-i', '--ids', action='store_const', dest='output',
+ const='ids', help="output only bug IDs")
+ outg.add_argument('-e', '--extra', action='store_const',
+ dest='output', const='extra',
+ help="output additional bug information "
+ "(keywords, Whiteboards, etc.)")
+ outg.add_argument('--oneline', action='store_const', dest='output',
+ const='oneline',
+ help="one line summary of the bug (useful for scripts)")
+ outg.add_argument('--raw', action='store_const', dest='output',
+ const='raw', help="raw output of the bugzilla contents")
+ outg.add_argument('--outputformat',
+ help="Print output in the form given. "
+ "You can use RPM-style tags that match bug "
+ "fields, e.g.: '%%{id}: %%{summary}'. See the man page "
+ "section 'Output options' for more details.")
+
+
+def _parser_add_bz_fields(rootp, command):
+ cmd_new = (command == "new")
+ cmd_query = (command == "query")
+ cmd_modify = (command == "modify")
+ if cmd_new:
+ comment_help = "Set initial bug comment/description"
+ elif cmd_query:
+ comment_help = "Search all bug comments"
+ else:
+ comment_help = "Add new bug comment"
+
+ p = rootp.add_argument_group("Standard bugzilla options")
+
+ p.add_argument('-p', '--product', help="Product name")
+ p.add_argument('-v', '--version', help="Product version")
+ p.add_argument('-c', '--component', help="Component name")
+ p.add_argument('-t', '--summary', '--short_desc', help="Bug summary")
+ p.add_argument('-l', '--comment', '--long_desc', help=comment_help)
+ if not cmd_query:
+ p.add_argument("--comment-tag", action="append",
+ help="Comment tag for the new comment")
+ p.add_argument("--sub-component", action="append",
+ help="RHBZ sub component field")
+ p.add_argument('-o', '--os', help="Operating system")
+ p.add_argument('--arch', help="Arch this bug occurs on")
+ p.add_argument('-x', '--severity', help="Bug severity")
+ p.add_argument('-z', '--priority', help="Bug priority")
+ p.add_argument('--alias', help='Bug alias (name)')
+ p.add_argument('-s', '--status', '--bug_status',
+ help='Bug status (NEW, ASSIGNED, etc.)')
+ p.add_argument('-u', '--url', help="URL field")
+ p.add_argument('-m', '--target_milestone', help="Target milestone")
+ p.add_argument('--target_release', help="RHBZ Target release")
+
+ p.add_argument('--blocked', action="append",
+ help="Bug IDs that this bug blocks")
+ p.add_argument('--dependson', action="append",
+ help="Bug IDs that this bug depends on")
+ p.add_argument('--keywords', action="append",
+ help="Bug keywords")
+ p.add_argument('--groups', action="append",
+ help="Which user groups can view this bug")
+
+ p.add_argument('--cc', action="append", help="CC list")
+ p.add_argument('-a', '--assigned_to', '--assignee', help="Bug assignee")
+ p.add_argument('-q', '--qa_contact', help='QA contact')
+
+ if not cmd_new:
+ p.add_argument('-f', '--flag', action='append',
+ help="Bug flags state. Ex:\n"
+ " --flag needinfo?\n"
+ " --flag dev_ack+ \n"
+ " clear with --flag needinfoX")
+ p.add_argument("--tags", action="append",
+ help="Tags/Personal Tags field.")
+
+ p.add_argument('-w', "--whiteboard", '--status_whiteboard',
+ action="append", help='Whiteboard field')
+ p.add_argument("--devel_whiteboard", action="append",
+ help='RHBZ devel whiteboard field')
+ p.add_argument("--internal_whiteboard", action="append",
+ help='RHBZ internal whiteboard field')
+ p.add_argument("--qa_whiteboard", action="append",
+ help='RHBZ QA whiteboard field')
+ p.add_argument('-F', '--fixed_in',
+ help="RHBZ 'Fixed in version' field")
+
+ # Put this at the end, so it sticks out more
+ p.add_argument('--field',
+ metavar="FIELD=VALUE", action="append", dest="fields",
+ help="Manually specify a bugzilla XMLRPC field. FIELD is "
+ "the raw name used by the bugzilla instance. For example if your "
+ "bugzilla instance has a custom field cf_my_field, do:\n"
+ " --field cf_my_field=VALUE")
+
+ # Used by unit tests, not for end user consumption
+ p.add_argument('--__test-return-result', action="store_true",
+ dest="test_return_result", help=argparse.SUPPRESS)
+
+ if not cmd_modify:
+ _parser_add_output_options(rootp)
+
+
+def _setup_action_new_parser(subparsers):
+ description = ("Create a new bug report. "
+ "--product, --component, --version, --summary, and --comment "
+ "must be specified. "
+ "Options that take multiple values accept comma separated lists, "
+ "including --cc, --blocks, --dependson, --groups, and --keywords.")
+ p = subparsers.add_parser("new", description=description)
+
+ _parser_add_bz_fields(p, "new")
+ p.add_argument('--no-refresh', action='store_true',
+ help='Do not refresh bug after creating')
+
+
+def _setup_action_query_parser(subparsers):
+ description = ("List bug reports that match the given criteria. "
+ "Certain options can accept a comma separated list to query multiple "
+ "values, including --status, --component, --product, --version, --id.")
+ epilog = ("Note: querying via explicit command line options will only "
+ "get you so far. See the --from-url option for a way to use powerful "
+ "Web UI queries from the command line.")
+ p = subparsers.add_parser("query",
+ description=description, epilog=epilog)
+
+ _parser_add_bz_fields(p, "query")
+
+ g = p.add_argument_group("'query' specific options")
+ g.add_argument('-b', '--id', '--bug_id',
+ help="specify individual bugs by IDs, separated with commas")
+ g.add_argument('-r', '--reporter',
+ help="Email: search reporter email for given address")
+ g.add_argument('--quicksearch',
+ help="Search using bugzilla's quicksearch functionality.")
+ g.add_argument('--savedsearch',
+ help="Name of a bugzilla saved search. If you don't own this "
+ "saved search, you must passed --savedsearch_sharer_id.")
+ g.add_argument('--savedsearch-sharer-id',
+ help="Owner ID of the --savedsearch. You can get this ID from "
+ "the URL bugzilla generates when running the saved search "
+ "from the web UI.")
+
+ # Keep this at the end so it sticks out more
+ g.add_argument('--from-url', metavar="WEB_QUERY_URL",
+ help="Make a working query via bugzilla's 'Advanced search' web UI, "
+ "grab the url from your browser (the string with query.cgi or "
+ "buglist.cgi in it), and --from-url will run it via the "
+ "bugzilla API. Don't forget to quote the string! "
+ "This only works for Bugzilla 5 and Red Hat bugzilla")
+
+ # Deprecated options
+ p.add_argument('-E', '--emailtype', help=argparse.SUPPRESS)
+ p.add_argument('--components_file', help=argparse.SUPPRESS)
+ p.add_argument('-U', '--url_type',
+ help=argparse.SUPPRESS)
+ p.add_argument('-K', '--keywords_type',
+ help=argparse.SUPPRESS)
+ p.add_argument('-W', '--status_whiteboard_type',
+ help=argparse.SUPPRESS)
+ p.add_argument('-B', '--booleantype',
+ help=argparse.SUPPRESS)
+ p.add_argument('--boolean_query', action="append",
+ help=argparse.SUPPRESS)
+ p.add_argument('--fixed_in_type', help=argparse.SUPPRESS)
+
+
+def _setup_action_info_parser(subparsers):
+ description = ("List products or component information about the "
+ "bugzilla server.")
+ p = subparsers.add_parser("info", description=description)
+
+ x = p.add_mutually_exclusive_group(required=True)
+ x.add_argument('-p', '--products', action='store_true',
+ help='Get a list of products')
+ x.add_argument('-c', '--components', metavar="PRODUCT",
+ help='List the components in the given product')
+ x.add_argument('-o', '--component_owners', metavar="PRODUCT",
+ help='List components (and their owners)')
+ x.add_argument('-v', '--versions', metavar="PRODUCT",
+ help='List the versions for the given product')
+ p.add_argument('--active-components', action="store_true",
+ help='Only show active components. Combine with --components*')
+
+
+
+def _setup_action_modify_parser(subparsers):
+ usage = ("bugzilla modify [options] BUGID [BUGID...]\n"
+ "Fields that take multiple values have a special input format.\n"
+ "Append: --cc=foo@example.com\n"
+ "Overwrite: --cc==foo@example.com\n"
+ "Remove: --cc=-foo@example.com\n"
+ "Options that accept this format: --cc, --blocked, --dependson,\n"
+ " --groups, --tags, whiteboard fields.")
+ p = subparsers.add_parser("modify", usage=usage)
+
+ _parser_add_bz_fields(p, "modify")
+
+ g = p.add_argument_group("'modify' specific options")
+ g.add_argument("ids", nargs="+", help="Bug IDs to modify")
+ g.add_argument('-k', '--close', metavar="RESOLUTION",
+ help='Close with the given resolution (WONTFIX, NOTABUG, etc.)')
+ g.add_argument('-d', '--dupeid', metavar="ORIGINAL",
+ help='ID of original bug. Implies --close DUPLICATE')
+ g.add_argument('--private', action='store_true', default=False,
+ help='Mark new comment as private')
+ g.add_argument('--reset-assignee', action="store_true",
+ help='Reset assignee to component default')
+ g.add_argument('--reset-qa-contact', action="store_true",
+ help='Reset QA contact to component default')
+
+
+def _setup_action_attach_parser(subparsers):
+ usage = """
+bugzilla attach --file=FILE --desc=DESC [--type=TYPE] BUGID [BUGID...]
+bugzilla attach --get=ATTACHID --getall=BUGID [...]
+bugzilla attach --type=TYPE BUGID [BUGID...]"""
+ description = "Attach files or download attachments."
+ p = subparsers.add_parser("attach", description=description, usage=usage)
+
+ p.add_argument("ids", nargs="*", help="BUGID references")
+ p.add_argument('-f', '--file', metavar="FILENAME",
+ help='File to attach, or filename for data provided on stdin')
+ p.add_argument('-d', '--description', '--summary',
+ metavar="SUMMARY", dest='desc',
+ help="A short summary of the file being attached")
+ p.add_argument('-t', '--type', metavar="MIMETYPE",
+ help="Mime-type for the file being attached")
+ p.add_argument('-g', '--get', metavar="ATTACHID", action="append",
+ default=[], help="Download the attachment with the given ID")
+ p.add_argument("--getall", "--get-all", metavar="BUGID", action="append",
+ default=[], help="Download all attachments on the given bug")
+ p.add_argument('-l', '--comment', '--long_desc', help="Add comment with attachment")
+
+
+def _setup_action_login_parser(subparsers):
+ usage = 'bugzilla login [username [password]]'
+ description = "Log into bugzilla and save a login cookie or token."
+ p = subparsers.add_parser("login", description=description, usage=usage)
+ p.add_argument("pos_username", nargs="?", help="Optional username",
+ metavar="username")
+ p.add_argument("pos_password", nargs="?", help="Optional password",
+ metavar="password")
+
+
+def setup_parser():
+ rootparser = _setup_root_parser()
+ subparsers = rootparser.add_subparsers(dest="command")
+ subparsers.required = True
+ _setup_action_new_parser(subparsers)
+ _setup_action_query_parser(subparsers)
+ _setup_action_info_parser(subparsers)
+ _setup_action_modify_parser(subparsers)
+ _setup_action_attach_parser(subparsers)
+ _setup_action_login_parser(subparsers)
+ return rootparser
+
+
+####################
+# Command routines #
+####################
+
+def _merge_field_opts(query, opt, parser):
+ # Add any custom fields if specified
+ if opt.fields is None:
+ return
+
+ for f in opt.fields:
+ try:
+ f, v = f.split('=', 1)
+ query[f] = v
+ except Exception:
+ parser.error("Invalid field argument provided: %s" % (f))
+
+
+def _do_query(bz, opt, parser):
+ q = {}
+
+ # Parse preconstructed queries.
+ u = opt.from_url
+ if u:
+ q = bz.url_to_query(u)
+
+ if opt.components_file:
+ # Components slurped in from file (one component per line)
+ # This can be made more robust
+ clist = []
+ f = open(opt.components_file, 'r')
+ for line in f.readlines():
+ line = line.rstrip("\n")
+ clist.append(line)
+ opt.component = clist
+
+ if opt.status:
+ val = opt.status
+ stat = val
+ if val == 'ALL':
+ # leaving this out should return bugs of any status
+ stat = None
+ elif val == 'DEV':
+ # Alias for all development bug statuses
+ stat = ['NEW', 'ASSIGNED', 'NEEDINFO', 'ON_DEV',
+ 'MODIFIED', 'POST', 'REOPENED']
+ elif val == 'QE':
+ # Alias for all QE relevant bug statuses
+ stat = ['ASSIGNED', 'ON_QA', 'FAILS_QA', 'PASSES_QA']
+ elif val == 'EOL':
+ # Alias for EndOfLife bug statuses
+ stat = ['VERIFIED', 'RELEASE_PENDING', 'RESOLVED']
+ elif val == 'OPEN':
+ # non-Closed statuses
+ stat = ['NEW', 'ASSIGNED', 'MODIFIED', 'ON_DEV', 'ON_QA',
+ 'VERIFIED', 'RELEASE_PENDING', 'POST']
+ opt.status = stat
+
+ # Convert all comma separated list parameters to actual lists,
+ # which is what bugzilla wants
+ # According to bugzilla docs, any parameter can be a list, but
+ # let's only do this for options we explicitly mention can be
+ # comma separated.
+ for optname in ["severity", "id", "status", "component",
+ "priority", "product", "version"]:
+ val = getattr(opt, optname, None)
+ if not isinstance(val, str):
+ continue
+ setattr(opt, optname, val.split(","))
+
+ include_fields = None
+ if opt.output == 'raw':
+ # 'raw' always does a getbug() call anyways, so just ask for ID back
+ include_fields = ['id']
+
+ elif opt.outputformat:
+ include_fields = []
+ for fieldname, rest in format_field_re.findall(opt.outputformat):
+ if fieldname == "whiteboard" and rest:
+ fieldname = rest + "_" + fieldname
+ elif fieldname == "flag":
+ fieldname = "flags"
+ elif fieldname == "cve":
+ fieldname = ["keywords", "blocks"]
+ elif fieldname == "__unicode__":
+ # Needs to be in sync with bug.__unicode__
+ fieldname = ["id", "status", "assigned_to", "summary"]
+
+ flist = isinstance(fieldname, list) and fieldname or [fieldname]
+ for f in flist:
+ if f not in include_fields:
+ include_fields.append(f)
+
+ if include_fields is not None:
+ include_fields.sort()
+
+ built_query = bz.build_query(
+ product=opt.product or None,
+ component=opt.component or None,
+ sub_component=opt.sub_component or None,
+ version=opt.version or None,
+ reporter=opt.reporter or None,
+ bug_id=opt.id or None,
+ short_desc=opt.summary or None,
+ long_desc=opt.comment or None,
+ cc=opt.cc or None,
+ assigned_to=opt.assigned_to or None,
+ qa_contact=opt.qa_contact or None,
+ status=opt.status or None,
+ blocked=opt.blocked or None,
+ dependson=opt.dependson or None,
+ keywords=opt.keywords or None,
+ keywords_type=opt.keywords_type or None,
+ url=opt.url or None,
+ url_type=opt.url_type or None,
+ status_whiteboard=opt.whiteboard or None,
+ status_whiteboard_type=opt.status_whiteboard_type or None,
+ fixed_in=opt.fixed_in or None,
+ fixed_in_type=opt.fixed_in_type or None,
+ flag=opt.flag or None,
+ alias=opt.alias or None,
+ qa_whiteboard=opt.qa_whiteboard or None,
+ devel_whiteboard=opt.devel_whiteboard or None,
+ boolean_query=opt.boolean_query or None,
+ bug_severity=opt.severity or None,
+ priority=opt.priority or None,
+ target_release=opt.target_release or None,
+ target_milestone=opt.target_milestone or None,
+ emailtype=opt.emailtype or None,
+ booleantype=opt.booleantype or None,
+ include_fields=include_fields,
+ quicksearch=opt.quicksearch or None,
+ savedsearch=opt.savedsearch or None,
+ savedsearch_sharer_id=opt.savedsearch_sharer_id or None,
+ tags=opt.tags or None)
+
+ _merge_field_opts(built_query, opt, parser)
+
+ built_query.update(q)
+ q = built_query
+
+ if not q:
+ parser.error("'query' command requires additional arguments")
+ if opt.test_return_result:
+ return q
+ return bz.query(q)
+
+
+def _do_info(bz, opt):
+ """
+ Handle the 'info' subcommand
+ """
+ # All these commands call getproducts internally, so do it up front
+ # with minimal include_fields for speed
+ def _filter_components(compdetails):
+ ret = {}
+ for k, v in compdetails.items():
+ if v.get("is_active", True):
+ ret[k] = v
+ return ret
+
+ productname = (opt.components or opt.component_owners or opt.versions)
+ include_fields = ["name", "id"]
+ fastcomponents = (opt.components and not opt.active_components)
+ if opt.versions:
+ include_fields += ["versions"]
+ if opt.component_owners:
+ include_fields += [
+ "components.default_assigned_to",
+ "components.name",
+ ]
+ if (opt.active_components and
+ any(["components" in i for i in include_fields])):
+ include_fields += ["components.is_active"]
+
+ bz.refresh_products(names=productname and [productname] or None,
+ include_fields=include_fields)
+
+ if opt.products:
+ for name in sorted([p["name"] for p in bz.getproducts()]):
+ print(name)
+
+ elif fastcomponents:
+ for name in sorted(bz.getcomponents(productname)):
+ print(name)
+
+ elif opt.components:
+ details = bz.getcomponentsdetails(productname)
+ for name in sorted(_filter_components(details)):
+ print(name)
+
+ elif opt.versions:
+ proddict = bz.getproducts()[0]
+ for v in proddict['versions']:
+ print(to_encoding(v["name"]))
+
+ elif opt.component_owners:
+ details = bz.getcomponentsdetails(productname)
+ for c in sorted(_filter_components(details)):
+ print(to_encoding(u"%s: %s" % (c,
+ details[c]['default_assigned_to'])))
+
+
+def _convert_to_outputformat(output):
+ fmt = ""
+
+ if output == "normal":
+ fmt = "%{__unicode__}"
+
+ elif output == "ids":
+ fmt = "%{id}"
+
+ elif output == 'full':
+ fmt += "%{__unicode__}\n"
+ fmt += "Component: %{component}\n"
+ fmt += "CC: %{cc}\n"
+ fmt += "Blocked: %{blocks}\n"
+ fmt += "Depends: %{depends_on}\n"
+ fmt += "%{comments}\n"
+
+ elif output == 'extra':
+ fmt += "%{__unicode__}\n"
+ fmt += " +Keywords: %{keywords}\n"
+ fmt += " +QA Whiteboard: %{qa_whiteboard}\n"
+ fmt += " +Status Whiteboard: %{status_whiteboard}\n"
+ fmt += " +Devel Whiteboard: %{devel_whiteboard}\n"
+
+ elif output == 'oneline':
+ fmt += "#%{bug_id} %{status} %{assigned_to} %{component}\t"
+ fmt += "[%{target_milestone}] %{flags} %{cve}"
+
+ else:
+ raise RuntimeError("Unknown output type '%s'" % output)
+
+ return fmt
+
+
+def _format_output(bz, opt, buglist):
+ if opt.output == 'raw':
+ buglist = bz.getbugs([b.bug_id for b in buglist])
+ for b in buglist:
+ print("Bugzilla %s: " % b.bug_id)
+ for attrname in sorted(b.__dict__):
+ print(to_encoding(u"ATTRIBUTE[%s]: %s" %
+ (attrname, b.__dict__[attrname])))
+ print("\n\n")
+ return
+
+ def bug_field(matchobj):
+ # whiteboard and flag allow doing
+ # %{whiteboard:devel} and %{flag:needinfo}
+ # That's what 'rest' matches
+ (fieldname, rest) = matchobj.groups()
+
+ if fieldname == "whiteboard" and rest:
+ fieldname = rest + "_" + fieldname
+
+ if fieldname == "flag" and rest:
+ val = b.get_flag_status(rest)
+
+ elif fieldname == "flags" or fieldname == "flags_requestee":
+ tmpstr = []
+ for f in getattr(b, "flags", []):
+ requestee = f.get('requestee', "")
+ if fieldname == "flags":
+ requestee = ""
+ if fieldname == "flags_requestee":
+ if requestee == "":
+ continue
+ tmpstr.append("%s" % requestee)
+ else:
+ tmpstr.append("%s%s%s" %
+ (f['name'], f['status'], requestee))
+
+ val = ",".join(tmpstr)
+
+ elif fieldname == "cve":
+ cves = []
+ for key in getattr(b, "keywords", []):
+ # grab CVE from keywords and blockers
+ if key.find("Security") == -1:
+ continue
+ for bl in b.blocks:
+ cvebug = bz.getbug(bl)
+ for cb in cvebug.alias:
+ if cb.find("CVE") == -1:
+ continue
+ if cb.strip() not in cves:
+ cves.append(cb)
+ val = ",".join(cves)
+
+ elif fieldname == "comments":
+ val = ""
+ for c in getattr(b, "comments", []):
+ val += ("\n* %s - %s:\n%s\n" % (c['time'],
+ c.get("creator", c.get("author", "")), c['text']))
+
+ elif fieldname == "external_bugs":
+ val = ""
+ for e in getattr(b, "external_bugs", []):
+ url = e["type"]["full_url"].replace("%id%", e["ext_bz_bug_id"])
+ if not val:
+ val += "\n"
+ val += "External bug: %s\n" % url
+
+ elif fieldname == "__unicode__":
+ val = b.__unicode__()
+ else:
+ val = getattr(b, fieldname, "")
+
+ vallist = isinstance(val, list) and val or [val]
+ val = ','.join([to_encoding(v) for v in vallist])
+
+ return val
+
+ for b in buglist:
+ print(format_field_re.sub(bug_field, opt.outputformat))
+
+
+def _parse_triset(vallist, checkplus=True, checkminus=True, checkequal=True,
+ splitcomma=False):
+ add_val = []
+ rm_val = []
+ set_val = None
+
+ def make_list(v):
+ if not v:
+ return []
+ if splitcomma:
+ return v.split(",")
+ return [v]
+
+ for val in isinstance(vallist, list) and vallist or [vallist]:
+ val = val or ""
+
+ if val.startswith("+") and checkplus:
+ add_val += make_list(val[1:])
+ elif val.startswith("-") and checkminus:
+ rm_val += make_list(val[1:])
+ elif val.startswith("=") and checkequal:
+ # Intentionally overwrite this
+ set_val = make_list(val[1:])
+ else:
+ add_val += make_list(val)
+
+ return add_val, rm_val, set_val
+
+
+def _do_new(bz, opt, parser):
+ # Parse options that accept comma separated list
+ def parse_multi(val):
+ return _parse_triset(val, checkplus=False, checkminus=False,
+ checkequal=False, splitcomma=True)[0]
+
+ ret = bz.build_createbug(
+ blocks=parse_multi(opt.blocked) or None,
+ cc=parse_multi(opt.cc) or None,
+ component=opt.component or None,
+ depends_on=parse_multi(opt.dependson) or None,
+ description=opt.comment or None,
+ groups=parse_multi(opt.groups) or None,
+ keywords=parse_multi(opt.keywords) or None,
+ op_sys=opt.os or None,
+ platform=opt.arch or None,
+ priority=opt.priority or None,
+ product=opt.product or None,
+ severity=opt.severity or None,
+ summary=opt.summary or None,
+ url=opt.url or None,
+ version=opt.version or None,
+ assigned_to=opt.assigned_to or None,
+ qa_contact=opt.qa_contact or None,
+ sub_component=opt.sub_component or None,
+ alias=opt.alias or None,
+ comment_tags=opt.comment_tag or None,
+ )
+
+ _merge_field_opts(ret, opt, parser)
+
+ if opt.test_return_result:
+ return ret
+
+ b = bz.createbug(ret)
+ if not opt.no_refresh:
+ b.refresh()
+ return [b]
+
+
+def _do_modify(bz, parser, opt):
+ bugid_list = [bugid for a in opt.ids for bugid in a.split(',')]
+
+ add_wb, rm_wb, set_wb = _parse_triset(opt.whiteboard)
+ add_devwb, rm_devwb, set_devwb = _parse_triset(opt.devel_whiteboard)
+ add_intwb, rm_intwb, set_intwb = _parse_triset(opt.internal_whiteboard)
+ add_qawb, rm_qawb, set_qawb = _parse_triset(opt.qa_whiteboard)
+
+ add_blk, rm_blk, set_blk = _parse_triset(opt.blocked, splitcomma=True)
+ add_deps, rm_deps, set_deps = _parse_triset(opt.dependson, splitcomma=True)
+ add_key, rm_key, set_key = _parse_triset(opt.keywords)
+ add_cc, rm_cc, ignore = _parse_triset(opt.cc,
+ checkplus=False,
+ checkequal=False)
+ add_groups, rm_groups, ignore = _parse_triset(opt.groups,
+ checkequal=False,
+ splitcomma=True)
+ add_tags, rm_tags, ignore = _parse_triset(opt.tags, checkequal=False)
+
+ status = opt.status or None
+ if opt.dupeid is not None:
+ opt.close = "DUPLICATE"
+ if opt.close:
+ status = "RESOLVED"
+
+ flags = []
+ if opt.flag:
+ # Convert "foo+" to tuple ("foo", "+")
+ for f in opt.flag:
+ flags.append({"name": f[:-1], "status": f[-1]})
+
+ update = bz.build_update(
+ assigned_to=opt.assigned_to or None,
+ comment=opt.comment or None,
+ comment_private=opt.private or None,
+ component=opt.component or None,
+ product=opt.product or None,
+ blocks_add=add_blk or None,
+ blocks_remove=rm_blk or None,
+ blocks_set=set_blk,
+ url=opt.url or None,
+ cc_add=add_cc or None,
+ cc_remove=rm_cc or None,
+ depends_on_add=add_deps or None,
+ depends_on_remove=rm_deps or None,
+ depends_on_set=set_deps,
+ groups_add=add_groups or None,
+ groups_remove=rm_groups or None,
+ keywords_add=add_key or None,
+ keywords_remove=rm_key or None,
+ keywords_set=set_key,
+ op_sys=opt.os or None,
+ platform=opt.arch or None,
+ priority=opt.priority or None,
+ qa_contact=opt.qa_contact or None,
+ severity=opt.severity or None,
+ status=status,
+ summary=opt.summary or None,
+ version=opt.version or None,
+ reset_assigned_to=opt.reset_assignee or None,
+ reset_qa_contact=opt.reset_qa_contact or None,
+ resolution=opt.close or None,
+ target_release=opt.target_release or None,
+ target_milestone=opt.target_milestone or None,
+ dupe_of=opt.dupeid or None,
+ fixed_in=opt.fixed_in or None,
+ whiteboard=set_wb and set_wb[0] or None,
+ devel_whiteboard=set_devwb and set_devwb[0] or None,
+ internal_whiteboard=set_intwb and set_intwb[0] or None,
+ qa_whiteboard=set_qawb and set_qawb[0] or None,
+ sub_component=opt.sub_component or None,
+ alias=opt.alias or None,
+ flags=flags or None,
+ comment_tags=opt.comment_tag or None,
+ )
+
+ # We make this a little convoluted to facilitate unit testing
+ wbmap = {
+ "whiteboard": (add_wb, rm_wb),
+ "internal_whiteboard": (add_intwb, rm_intwb),
+ "qa_whiteboard": (add_qawb, rm_qawb),
+ "devel_whiteboard": (add_devwb, rm_devwb),
+ }
+
+ for k, v in wbmap.copy().items():
+ if not v[0] and not v[1]:
+ del(wbmap[k])
+
+ _merge_field_opts(update, opt, parser)
+
+ log.debug("update bug dict=%s", update)
+ log.debug("update whiteboard dict=%s", wbmap)
+
+ if not any([update, wbmap, add_tags, rm_tags]):
+ parser.error("'modify' command requires additional arguments")
+
+ if opt.test_return_result:
+ return (update, wbmap, add_tags, rm_tags)
+
+ if add_tags or rm_tags:
+ ret = bz.update_tags(bugid_list,
+ tags_add=add_tags, tags_remove=rm_tags)
+ log.debug("bz.update_tags returned=%s", ret)
+ if update:
+ ret = bz.update_bugs(bugid_list, update)
+ log.debug("bz.update_bugs returned=%s", ret)
+
+ if not wbmap:
+ return
+
+ # Now for the things we can't blindly batch.
+ # Being able to prepend/append to whiteboards, which are just
+ # plain string values, is an old rhbz semantic that we try to maintain
+ # here. This is a bit weird for traditional bugzilla XMLRPC
+ log.debug("Adjusting whiteboard fields one by one")
+ for bug in bz.getbugs(bugid_list):
+ for wb, (add_list, rm_list) in wbmap.items():
+ for tag in add_list:
+ newval = getattr(bug, wb) or ""
+ if newval:
+ newval += " "
+ newval += tag
+ bz.update_bugs([bug.id],
+ bz.build_update(**{wb: newval}))
+
+ for tag in rm_list:
+ newval = (getattr(bug, wb) or "").split()
+ for t in newval[:]:
+ if t == tag:
+ newval.remove(t)
+ bz.update_bugs([bug.id],
+ bz.build_update(**{wb: " ".join(newval)}))
+
+
+def _do_get_attach(bz, opt):
+ for bug in bz.getbugs(opt.getall):
+ opt.get += bug.get_attachment_ids()
+
+ for attid in set(opt.get):
+ att = bz.openattachment(attid)
+ outfile = open_without_clobber(att.name, "wb")
+ data = att.read(4096)
+ while data:
+ outfile.write(data)
+ data = att.read(4096)
+ print("Wrote %s" % outfile.name)
+
+ return
+
+
+def _do_set_attach(bz, opt, parser):
+ if not opt.ids:
+ parser.error("Bug ID must be specified for setting attachments")
+
+ if sys.stdin.isatty():
+ if not opt.file:
+ parser.error("--file must be specified")
+ fileobj = open(opt.file, "rb")
+ else:
+ # piped input on stdin
+ if not opt.desc:
+ parser.error("--description must be specified if passing "
+ "file on stdin")
+
+ fileobj = tempfile.NamedTemporaryFile(prefix="bugzilla-attach.")
+ data = sys.stdin.read(4096)
+
+ while data:
+ fileobj.write(data.encode(locale.getpreferredencoding()))
+ data = sys.stdin.read(4096)
+ fileobj.seek(0)
+
+ kwargs = {}
+ if opt.file:
+ kwargs["filename"] = os.path.basename(opt.file)
+ if opt.type:
+ kwargs["contenttype"] = opt.type
+ if opt.type in ["text/x-patch"]:
+ kwargs["ispatch"] = True
+ if opt.comment:
+ kwargs["comment"] = opt.comment
+ desc = opt.desc or os.path.basename(fileobj.name)
+
+ # Upload attachments
+ for bugid in opt.ids:
+ attid = bz.attachfile(bugid, fileobj, desc, **kwargs)
+ print("Created attachment %i on bug %s" % (attid, bugid))
+
+
+#################
+# Main handling #
+#################
+
+def _make_bz_instance(opt):
+ """
+ Build the Bugzilla instance we will use
+ """
+ if opt.bztype != 'auto':
+ log.info("Explicit --bztype is no longer supported, ignoring")
+
+ cookiefile = None
+ tokenfile = None
+ if opt.cache_credentials:
+ cookiefile = opt.cookiefile or -1
+ tokenfile = opt.tokenfile or -1
+
+ bz = bugzilla.Bugzilla(
+ url=opt.bugzilla,
+ cookiefile=cookiefile,
+ tokenfile=tokenfile,
+ sslverify=opt.sslverify,
+ cert=opt.cert)
+ return bz
+
+
+def _handle_login(opt, action, bz):
+ """
+ Handle all login related bits
+ """
+ is_login_command = (action == 'login')
+
+ do_interactive_login = (is_login_command or
+ opt.login or opt.username or opt.password)
+ username = getattr(opt, "pos_username", None) or opt.username
+ password = getattr(opt, "pos_password", None) or opt.password
+
+ try:
+ if do_interactive_login:
+ if bz.url:
+ print("Logging into %s" % urlparse(bz.url)[1])
+ bz.interactive_login(username, password)
+ except bugzilla.BugzillaError as e:
+ print(str(e))
+ sys.exit(1)
+
+ if opt.ensure_logged_in and not bz.logged_in:
+ print("--ensure-logged-in passed but you aren't logged in to %s" %
+ bz.url)
+ sys.exit(1)
+
+ if is_login_command:
+ msg = "Login successful."
+ if bz.cookiefile or bz.tokenfile:
+ msg = "Login successful, token cache updated."
+
+ print(msg)
+ sys.exit(0)
+
+
+def _main(unittest_bz_instance):
+ parser = setup_parser()
+ opt = parser.parse_args()
+ action = opt.command
+ setup_logging(opt.debug, opt.verbose)
+
+ log.debug("Launched with command line: %s", " ".join(sys.argv))
+
+ # Connect to bugzilla
+ log.info('Connecting to %s', opt.bugzilla)
+
+ if unittest_bz_instance:
+ bz = unittest_bz_instance
+ else:
+ bz = _make_bz_instance(opt)
+
+ # Handle login options
+ _handle_login(opt, action, bz)
+
+
+ ###########################
+ # Run the actual commands #
+ ###########################
+
+ if hasattr(opt, "outputformat"):
+ if not opt.outputformat and opt.output not in ['raw', None]:
+ opt.outputformat = _convert_to_outputformat(opt.output)
+
+ buglist = []
+ if action == 'info':
+ if not (opt.products or
+ opt.components or
+ opt.component_owners or
+ opt.versions):
+ parser.error("'info' command requires additional arguments")
+
+ _do_info(bz, opt)
+
+ elif action == 'query':
+ buglist = _do_query(bz, opt, parser)
+ if opt.test_return_result:
+ return buglist
+
+ elif action == 'new':
+ buglist = _do_new(bz, opt, parser)
+ if opt.test_return_result:
+ return buglist
+
+ elif action == 'attach':
+ if opt.get or opt.getall:
+ if opt.ids:
+ parser.error("Bug IDs '%s' not used for "
+ "getting attachments" % opt.ids)
+ _do_get_attach(bz, opt)
+ else:
+ _do_set_attach(bz, opt, parser)
+
+ elif action == 'modify':
+ modout = _do_modify(bz, parser, opt)
+ if opt.test_return_result:
+ return modout
+ else:
+ raise RuntimeError("Unexpected action '%s'" % action)
+
+ # If we're doing new/query/modify, output our results
+ if action in ['new', 'query']:
+ _format_output(bz, opt, buglist)
+
+
+def main(unittest_bz_instance=None):
+ try:
+ try:
+ return _main(unittest_bz_instance)
+ except (Exception, KeyboardInterrupt):
+ log.debug("", exc_info=True)
+ raise
+ except (Fault, bugzilla.BugzillaError) as e:
+ print("\nServer error: %s" % str(e))
+ sys.exit(3)
+ except requests.exceptions.SSLError as e:
+ # Give SSL recommendations
+ print("SSL error: %s" % e)
+ print("\nIf you trust the remote server, you can work "
+ "around this error with:\n"
+ " bugzilla --nosslverify ...")
+ sys.exit(4)
+ except (socket.error,
+ requests.exceptions.HTTPError,
+ requests.exceptions.ConnectionError,
+ ProtocolError) as e:
+ print("\nConnection lost/failed: %s" % str(e))
+ sys.exit(2)
+
+
+def cli():
+ try:
+ main()
+ except KeyboardInterrupt:
+ log.debug("", exc_info=True)
+ print("\nExited at user request.")
+ sys.exit(1)
diff --git a/scripts/bugzilla/apiversion.py b/scripts/bugzilla/apiversion.py
new file mode 100644
index 0000000000..4e6e2c1810
--- /dev/null
+++ b/scripts/bugzilla/apiversion.py
@@ -0,0 +1,11 @@
+#
+# Copyright (C) 2014 Red Hat Inc.
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version. See http://www.gnu.org/copyleft/gpl.html for
+# the full text of the license.
+
+version = "2.2.0.dev0"
+__version__ = version
diff --git a/scripts/bugzilla/base.py b/scripts/bugzilla/base.py
new file mode 100644
index 0000000000..483b0ee52a
--- /dev/null
+++ b/scripts/bugzilla/base.py
@@ -0,0 +1,1856 @@
+# base.py - the base classes etc. for a Python interface to bugzilla
+#
+# Copyright (C) 2007, 2008, 2009, 2010 Red Hat Inc.
+# Author: Will Woods <wwoods@redhat.com>
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version. See http://www.gnu.org/copyleft/gpl.html for
+# the full text of the license.
+
+import collections
+import getpass
+import locale
+from logging import getLogger
+import os
+import sys
+
+from io import BytesIO
+
+# pylint: disable=import-error
+if sys.version_info[0] >= 3:
+ # pylint: disable=no-name-in-module
+ from configparser import SafeConfigParser
+ from http.cookiejar import LoadError, MozillaCookieJar
+ from urllib.parse import urlparse, parse_qsl
+ from xmlrpc.client import Binary, Fault
+else:
+ from ConfigParser import SafeConfigParser
+ from cookielib import LoadError, MozillaCookieJar
+ from urlparse import urlparse, parse_qsl
+ from xmlrpclib import Binary, Fault
+# pylint: enable=import-error
+
+
+from .apiversion import __version__
+from .bug import Bug, User
+from .transport import BugzillaError, _BugzillaServerProxy, _RequestsTransport
+
+
+log = getLogger(__name__)
+
+mimemagic = None
+
+
+def _detect_filetype(fname):
+ global mimemagic
+
+ if mimemagic is None:
+ try:
+ # pylint: disable=import-error
+ import magic
+ mimemagic = magic.open(getattr(magic, "MAGIC_MIME_TYPE", 16))
+ mimemagic.load()
+ except ImportError as e:
+ log.debug("Could not load python-magic: %s", e)
+ mimemagic = None
+ if not mimemagic:
+ return None
+
+ if not os.path.isabs(fname):
+ return None
+
+ try:
+ return mimemagic.file(fname)
+ except Exception as e:
+ log.debug("Could not detect content_type: %s", e)
+ return None
+
+
+def _nested_update(d, u):
+ # Helper for nested dict update()
+ # https://stackoverflow.com/questions/3232943/update-value-of-a-nested-dictionary-of-varying-depth
+ for k, v in list(u.items()):
+ if isinstance(v, collections.Mapping):
+ d[k] = _nested_update(d.get(k, {}), v)
+ else:
+ d[k] = v
+ return d
+
+
+def _default_auth_location(filename):
+ """
+ Determine auth location for filename, like 'bugzillacookies'. If
+ old style ~/.bugzillacookies exists, we use that, otherwise we
+ use ~/.cache/python-bugzilla/bugzillacookies. Same for bugzillatoken
+ """
+ homepath = os.path.expanduser("~/.%s" % filename)
+ xdgpath = os.path.expanduser("~/.cache/python-bugzilla/%s" % filename)
+ if os.path.exists(xdgpath):
+ return xdgpath
+ if os.path.exists(homepath):
+ return homepath
+
+ if not os.path.exists(os.path.dirname(xdgpath)):
+ os.makedirs(os.path.dirname(xdgpath), 0o700)
+ return xdgpath
+
+
+def _build_cookiejar(cookiefile):
+ cj = MozillaCookieJar(cookiefile)
+ if cookiefile is None:
+ return cj
+ if not os.path.exists(cookiefile):
+ # Make sure a new file has correct permissions
+ open(cookiefile, 'a').close()
+ os.chmod(cookiefile, 0o600)
+ cj.save()
+ return cj
+
+ try:
+ cj.load()
+ return cj
+ except LoadError:
+ raise BugzillaError("cookiefile=%s not in Mozilla format" %
+ cookiefile)
+
+
+_default_configpaths = [
+ '/etc/bugzillarc',
+ '~/.bugzillarc',
+ '~/.config/python-bugzilla/bugzillarc',
+]
+
+
+def _open_bugzillarc(configpaths=-1):
+ if configpaths == -1:
+ configpaths = _default_configpaths[:]
+
+ # pylint: disable=protected-access
+ configpaths = [os.path.expanduser(p) for p in
+ Bugzilla._listify(configpaths)]
+ # pylint: enable=protected-access
+ cfg = SafeConfigParser()
+ read_files = cfg.read(configpaths)
+ if not read_files:
+ return
+
+ log.info("Found bugzillarc files: %s", read_files)
+ return cfg
+
+
+class _FieldAlias(object):
+ """
+ Track API attribute names that differ from what we expose in users.
+
+ For example, originally 'short_desc' was the name of the property that
+ maps to 'summary' on modern bugzilla. We want pre-existing API users
+ to be able to continue to use Bug.short_desc, and
+ query({"short_desc": "foo"}). This class tracks that mapping.
+
+ @oldname: The old attribute name
+ @newname: The modern attribute name
+ @is_api: If True, use this mapping for values sent to the xmlrpc API
+ (like the query example)
+ @is_bug: If True, use this mapping for Bug attribute names.
+ """
+ def __init__(self, newname, oldname, is_api=True, is_bug=True):
+ self.newname = newname
+ self.oldname = oldname
+ self.is_api = is_api
+ self.is_bug = is_bug
+
+
+class _BugzillaAPICache(object):
+ """
+ Helper class that holds cached API results for things like products,
+ components, etc.
+ """
+ def __init__(self):
+ self.products = []
+ self.component_names = {}
+ self.bugfields = []
+
+
+class Bugzilla(object):
+ """
+ The main API object. Connects to a bugzilla instance over XMLRPC, and
+ provides wrapper functions to simplify dealing with API calls.
+
+ The most common invocation here will just be with just a URL:
+
+ bzapi = Bugzilla("http://bugzilla.example.com")
+
+ If you have previously logged into that URL, and have cached login
+ cookies/tokens, you will automatically be logged in. Otherwise to
+ log in, you can either pass auth options to __init__, or call a login
+ helper like interactive_login().
+
+ If you are not logged in, you won be able to access restricted data like
+ user email, or perform write actions like bug create/update. But simple
+ querys will work correctly.
+
+ If you are unsure if you are logged in, you can check the .logged_in
+ property.
+
+ Another way to specify auth credentials is via a 'bugzillarc' file.
+ See readconfig() documentation for details.
+ """
+
+ # bugzilla version that the class is targeting. filled in by
+ # subclasses
+ bz_ver_major = 0
+ bz_ver_minor = 0
+
+ @staticmethod
+ def url_to_query(url):
+ '''
+ Given a big huge bugzilla query URL, returns a query dict that can
+ be passed along to the Bugzilla.query() method.
+ '''
+ q = {}
+
+ # pylint: disable=unpacking-non-sequence
+ (ignore, ignore, path,
+ ignore, query, ignore) = urlparse(url)
+
+ base = os.path.basename(path)
+ if base not in ('buglist.cgi', 'query.cgi'):
+ return {}
+
+ for (k, v) in parse_qsl(query):
+ if k not in q:
+ q[k] = v
+ elif isinstance(q[k], list):
+ q[k].append(v)
+ else:
+ oldv = q[k]
+ q[k] = [oldv, v]
+
+ # Handle saved searches
+ if base == "buglist.cgi" and "namedcmd" in q and "sharer_id" in q:
+ q = {
+ "sharer_id": q["sharer_id"],
+ "savedsearch": q["namedcmd"],
+ }
+
+ return q
+
+ @staticmethod
+ def fix_url(url):
+ """
+ Turn passed url into a bugzilla XMLRPC web url
+ """
+ if '://' not in url:
+ log.debug('No scheme given for url, assuming https')
+ url = 'https://' + url
+ if url.count('/') < 3:
+ log.debug('No path given for url, assuming /xmlrpc.cgi')
+ url = url + '/xmlrpc.cgi'
+ return url
+
+ @staticmethod
+ def _listify(val):
+ if val is None:
+ return val
+ if isinstance(val, list):
+ return val
+ return [val]
+
+
+ def __init__(self, url=-1, user=None, password=None, cookiefile=-1,
+ sslverify=True, tokenfile=-1, use_creds=True, api_key=None,
+ cert=None, authtype=None):
+ """
+ :param url: The bugzilla instance URL, which we will connect
+ to immediately. Most users will want to specify this at
+ __init__ time, but you can defer connecting by passing
+ url=None and calling connect(URL) manually
+ :param user: optional username to connect with
+ :param password: optional password for the connecting user
+ :param cert: optional certificate file for client side certificate
+ authentication
+ :param cookiefile: Location to cache the login session cookies so you
+ don't have to keep specifying username/password. Bugzilla 5+ will
+ use tokens instead of cookies.
+ If -1, use the default path. If None, don't use or save
+ any cookiefile.
+ :param sslverify: Set this to False to skip SSL hostname and CA
+ validation checks, like out of date certificate
+ :param tokenfile: Location to cache the API login token so youi
+ don't have to keep specifying username/password.
+ If -1, use the default path. If None, don't use
+ or save any tokenfile.
+ :param use_creds: If False, this disables cookiefile, tokenfile,
+ and any bugzillarc reading. This overwrites any tokenfile
+ or cookiefile settings
+ :param sslverify: Maps to 'requests' sslverify parameter. Set to
+ False to disable SSL verification, but it can also be a path
+ to file or directory for custom certs.
+ :param api_key: A bugzilla
+ :param authtype: Authentication type: empty or 'basic'
+ """
+ if url == -1:
+ raise TypeError("Specify a valid bugzilla url, or pass url=None")
+
+ # Settings the user might want to tweak
+ self.user = user or ''
+ self.password = password or ''
+ self.api_key = api_key
+ self.cert = cert or ''
+ self.url = ''
+ self.authtype = authtype or ''
+
+ self._proxy = None
+ self._transport = None
+ self._cookiejar = None
+ self._sslverify = sslverify
+ self._cache = _BugzillaAPICache()
+ self._bug_autorefresh = False
+
+ self._field_aliases = []
+ self._init_field_aliases()
+
+ self.configpath = _default_configpaths[:]
+ if not use_creds:
+ cookiefile = None
+ tokenfile = None
+ self.configpath = []
+
+ if cookiefile == -1:
+ cookiefile = _default_auth_location("bugzillacookies")
+ if tokenfile == -1:
+ tokenfile = _default_auth_location("bugzillatoken")
+ log.debug("Using tokenfile=%s", tokenfile)
+ self.cookiefile = cookiefile
+ self.tokenfile = tokenfile
+
+ if url:
+ self.connect(url)
+ self._init_class_from_url()
+ self._init_class_state()
+
+ def _init_class_from_url(self):
+ """
+ Detect if we should use RHBugzilla class, and if so, set it
+ """
+ from bugzilla import RHBugzilla
+ if isinstance(self, RHBugzilla):
+ return
+
+ c = None
+ if "bugzilla.redhat.com" in self.url:
+ log.info("Using RHBugzilla for URL containing bugzilla.redhat.com")
+ c = RHBugzilla
+ else:
+ try:
+ extensions = self._proxy.Bugzilla.extensions()
+ if "RedHat" in extensions.get('extensions', {}):
+ log.info("Found RedHat bugzilla extension, "
+ "using RHBugzilla")
+ c = RHBugzilla
+ except Fault:
+ log.debug("Failed to fetch bugzilla extensions", exc_info=True)
+
+ if not c:
+ return
+
+ self.__class__ = c
+
+ def _init_class_state(self):
+ """
+ Hook for subclasses to do any __init__ time setup
+ """
+ pass
+
+ def _init_field_aliases(self):
+ # List of field aliases. Maps old style RHBZ parameter
+ # names to actual upstream values. Used for createbug() and
+ # query include_fields at least.
+ self._add_field_alias('summary', 'short_desc')
+ self._add_field_alias('description', 'comment')
+ self._add_field_alias('platform', 'rep_platform')
+ self._add_field_alias('severity', 'bug_severity')
+ self._add_field_alias('status', 'bug_status')
+ self._add_field_alias('id', 'bug_id')
+ self._add_field_alias('blocks', 'blockedby')
+ self._add_field_alias('blocks', 'blocked')
+ self._add_field_alias('depends_on', 'dependson')
+ self._add_field_alias('creator', 'reporter')
+ self._add_field_alias('url', 'bug_file_loc')
+ self._add_field_alias('dupe_of', 'dupe_id')
+ self._add_field_alias('dupe_of', 'dup_id')
+ self._add_field_alias('comments', 'longdescs')
+ self._add_field_alias('creation_time', 'opendate')
+ self._add_field_alias('creation_time', 'creation_ts')
+ self._add_field_alias('whiteboard', 'status_whiteboard')
+ self._add_field_alias('last_change_time', 'delta_ts')
+
+ def _get_user_agent(self):
+ return 'python-bugzilla/%s' % __version__
+ user_agent = property(_get_user_agent)
+
+
+ ###################
+ # Private helpers #
+ ###################
+
+ def _check_version(self, major, minor):
+ """
+ Check if the detected bugzilla version is >= passed major/minor pair.
+ """
+ if major < self.bz_ver_major:
+ return True
+ if (major == self.bz_ver_major and minor <= self.bz_ver_minor):
+ return True
+ return False
+
+ def _add_field_alias(self, *args, **kwargs):
+ self._field_aliases.append(_FieldAlias(*args, **kwargs))
+
+ def _get_bug_aliases(self):
+ return [(f.newname, f.oldname)
+ for f in self._field_aliases if f.is_bug]
+
+ def _get_api_aliases(self):
+ return [(f.newname, f.oldname)
+ for f in self._field_aliases if f.is_api]
+
+
+ ###################
+ # Cookie handling #
+ ###################
+
+ def _getcookiefile(self):
+ '''cookiefile is the file that bugzilla session cookies are loaded
+ and saved from.
+ '''
+ return self._cookiejar.filename
+
+ def _delcookiefile(self):
+ self._cookiejar = None
+
+ def _setcookiefile(self, cookiefile):
+ if (self._cookiejar and cookiefile == self._cookiejar.filename):
+ return
+
+ if self._proxy is not None:
+ raise RuntimeError("Can't set cookies with an open connection, "
+ "disconnect() first.")
+
+ log.debug("Using cookiefile=%s", cookiefile)
+ self._cookiejar = _build_cookiejar(cookiefile)
+
+ cookiefile = property(_getcookiefile, _setcookiefile, _delcookiefile)
+
+
+ #############################
+ # Login/connection handling #
+ #############################
+
+ def readconfig(self, configpath=None):
+ """
+ :param configpath: Optional bugzillarc path to read, instead of
+ the default list.
+
+ This function is called automatically from Bugzilla connect(), which
+ is called at __init__ if a URL is passed. Calling it manually is
+ just for passing in a non-standard configpath.
+
+ The locations for the bugzillarc file are preferred in this order:
+
+ ~/.config/python-bugzilla/bugzillarc
+ ~/.bugzillarc
+ /etc/bugzillarc
+
+ It has content like:
+ [bugzilla.yoursite.com]
+ user = username
+ password = password
+ Or
+ [bugzilla.yoursite.com]
+ api_key = key
+
+ The file can have multiple sections for different bugzilla instances.
+ A 'url' field in the [DEFAULT] section can be used to set a default
+ URL for the bugzilla command line tool.
+
+ Be sure to set appropriate permissions on bugzillarc if you choose to
+ store your password in it!
+ """
+ cfg = _open_bugzillarc(configpath or self.configpath)
+ if not cfg:
+ return
+
+ section = ""
+ log.debug("bugzillarc: Searching for config section matching %s",
+ self.url)
+ for s in sorted(cfg.sections()):
+ # Substring match - prefer the longest match found
+ if s in self.url:
+ log.debug("bugzillarc: Found matching section: %s", s)
+ section = s
+
+ if not section:
+ log.debug("bugzillarc: No section found")
+ return
+
+ for key, val in cfg.items(section):
+ if key == "api_key":
+ log.debug("bugzillarc: setting api_key")
+ self.api_key = val
+ elif key == "user":
+ log.debug("bugzillarc: setting user=%s", val)
+ self.user = val
+ elif key == "password":
+ log.debug("bugzillarc: setting password")
+ self.password = val
+ elif key == "cert":
+ log.debug("bugzillarc: setting cert")
+ self.cert = val
+ elif key == "authtype":
+ log.debug("bugzillarc: setting authtype=%s", val)
+ self.authtype = val
+ else:
+ log.debug("bugzillarc: unknown key=%s", key)
+
+ def _set_bz_version(self, version):
+ try:
+ self.bz_ver_major, self.bz_ver_minor = [
+ int(i) for i in version.split(".")[0:2]]
+ except Exception:
+ log.debug("version doesn't match expected format X.Y.Z, "
+ "assuming 5.0", exc_info=True)
+ self.bz_ver_major = 5
+ self.bz_ver_minor = 0
+
+ def connect(self, url=None):
+ '''
+ Connect to the bugzilla instance with the given url. This is
+ called by __init__ if a URL is passed. Or it can be called manually
+ at any time with a passed URL.
+
+ This will also read any available config files (see readconfig()),
+ which may set 'user' and 'password', and others.
+
+ If 'user' and 'password' are both set, we'll run login(). Otherwise
+ you'll have to login() yourself before some methods will work.
+ '''
+ if self._transport:
+ self.disconnect()
+
+ if url is None and self.url:
+ url = self.url
+ url = self.fix_url(url)
+
+ self.url = url
+ # we've changed URLs - reload config
+ self.readconfig()
+
+ self._transport = _RequestsTransport(
+ url, self._cookiejar, sslverify=self._sslverify, cert=self.cert)
+ if self.authtype == 'basic' and self.user and self.password:
+ self._transport.session.auth = (self.user, self.password)
+ self._transport.user_agent = self.user_agent
+ self._proxy = _BugzillaServerProxy(url, self.tokenfile,
+ self._transport)
+
+ if (self.authtype == '' and self.user and self.password):
+ log.info("user and password present - doing login()")
+ self.login()
+
+ if self.api_key:
+ log.debug("using API key")
+ self._proxy.use_api_key(self.api_key)
+
+ version = self._proxy.Bugzilla.version()["version"]
+ log.debug("Bugzilla version string: %s", version)
+ self._set_bz_version(version)
+
+ def disconnect(self):
+ '''
+ Disconnect from the given bugzilla instance.
+ '''
+ self._proxy = None
+ self._transport = None
+ self._cache = _BugzillaAPICache()
+
+
+ def _login(self, user, password):
+ '''Backend login method for Bugzilla3'''
+ return self._proxy.User.login({'login': user, 'password': password})
+
+ def _logout(self):
+ '''Backend login method for Bugzilla3'''
+ return self._proxy.User.logout()
+
+ def login(self, user=None, password=None):
+ '''Attempt to log in using the given username and password. Subsequent
+ method calls will use this username and password. Returns False if
+ login fails, otherwise returns some kind of login info - typically
+ either a numeric userid, or a dict of user info.
+
+ If user is not set, the value of Bugzilla.user will be used. If *that*
+ is not set, ValueError will be raised. If login fails, BugzillaError
+ will be raised.
+
+ This method will be called implicitly at the end of connect() if user
+ and password are both set. So under most circumstances you won't need
+ to call this yourself.
+ '''
+ if self.api_key:
+ raise ValueError("cannot login when using an API key")
+
+ if user:
+ self.user = user
+ if password:
+ self.password = password
+
+ if not self.user:
+ raise ValueError("missing username")
+ if not self.password:
+ raise ValueError("missing password")
+
+ try:
+ ret = self._login(self.user, self.password)
+ self.password = ''
+ log.info("login successful for user=%s", self.user)
+ return ret
+ except Fault as e:
+ raise BugzillaError("Login failed: %s" % str(e.faultString))
+
+ def interactive_login(self, user=None, password=None, force=False):
+ """
+ Helper method to handle login for this bugzilla instance.
+
+ :param user: bugzilla username. If not specified, prompt for it.
+ :param password: bugzilla password. If not specified, prompt for it.
+ :param force: Unused
+ """
+ ignore = force
+ log.debug('Calling interactive_login')
+
+ if not user:
+ sys.stdout.write('Bugzilla Username: ')
+ sys.stdout.flush()
+ user = sys.stdin.readline().strip()
+ if not password:
+ password = getpass.getpass('Bugzilla Password: ')
+
+ log.info('Logging in... ')
+ self.login(user, password)
+ log.info('Authorization cookie received.')
+
+ def logout(self):
+ '''Log out of bugzilla. Drops server connection and user info, and
+ destroys authentication cookies.'''
+ self._logout()
+ self.disconnect()
+ self.user = ''
+ self.password = ''
+
+ @property
+ def logged_in(self):
+ """
+ This is True if this instance is logged in else False.
+
+ We test if this session is authenticated by calling the User.get()
+ XMLRPC method with ids set. Logged-out users cannot pass the 'ids'
+ parameter and will result in a 505 error. If we tried to login with a
+ token, but the token was incorrect or expired, the server returns a
+ 32000 error.
+
+ For Bugzilla 5 and later, a new method, User.valid_login is available
+ to test the validity of the token. However, this will require that the
+ username be cached along with the token in order to work effectively in
+ all scenarios and is not currently used. For more information, refer to
+ the following url.
+
+ http://bugzilla.readthedocs.org/en/latest/api/core/v1/user.html#valid-login
+ """
+ try:
+ self._proxy.User.get({'ids': []})
+ return True
+ except Fault as e:
+ if e.faultCode == 505 or e.faultCode == 32000:
+ return False
+ raise e
+
+
+ ######################
+ # Bugfields querying #
+ ######################
+
+ def _getbugfields(self):
+ '''
+ Get the list of valid fields for Bug objects
+ '''
+ r = self._proxy.Bug.fields({'include_fields': ['name']})
+ return [f['name'] for f in r['fields']]
+
+ def getbugfields(self, force_refresh=False):
+ '''
+ Calls getBugFields, which returns a list of fields in each bug
+ for this bugzilla instance. This can be used to set the list of attrs
+ on the Bug object.
+ '''
+ if force_refresh or not self._cache.bugfields:
+ log.debug("Refreshing bugfields")
+ self._cache.bugfields = self._getbugfields()
+ self._cache.bugfields.sort()
+ log.debug("bugfields = %s", self._cache.bugfields)
+
+ return self._cache.bugfields
+ bugfields = property(fget=lambda self: self.getbugfields(),
+ fdel=lambda self: setattr(self, '_bugfields', None))
+
+
+ ####################
+ # Product querying #
+ ####################
+
+ def product_get(self, ids=None, names=None,
+ include_fields=None, exclude_fields=None,
+ ptype=None):
+ """
+ Raw wrapper around Product.get
+ https://bugzilla.readthedocs.io/en/latest/api/core/v1/product.html#get-product
+
+ This does not perform any caching like other product API calls.
+ If ids, names, or ptype is not specified, we default to
+ ptype=accessible for historical reasons
+
+ @ids: List of product IDs to lookup
+ @names: List of product names to lookup
+ @ptype: Either 'accessible', 'selectable', or 'enterable'. If
+ specified, we return data for all those
+ @include_fields: Only include these fields in the output
+ @exclude_fields: Do not include these fields in the output
+ """
+ if ids is None and names is None and ptype is None:
+ ptype = "accessible"
+
+ if ptype:
+ raw = None
+ if ptype == "accessible":
+ raw = self._proxy.Product.get_accessible_products()
+ elif ptype == "selectable":
+ raw = self._proxy.Product.get_selectable_products()
+ elif ptype == "enterable":
+ raw = self._proxy.Product.get_enterable_products()
+
+ if raw is None:
+ raise RuntimeError("Unknown ptype=%s" % ptype)
+ ids = raw['ids']
+ log.debug("For ptype=%s found ids=%s", ptype, ids)
+
+ kwargs = {}
+ if ids:
+ kwargs["ids"] = self._listify(ids)
+ if names:
+ kwargs["names"] = self._listify(names)
+ if include_fields:
+ kwargs["include_fields"] = include_fields
+ if exclude_fields:
+ kwargs["exclude_fields"] = exclude_fields
+
+ log.debug("Calling Product.get with: %s", kwargs)
+ ret = self._proxy.Product.get(kwargs)
+ return ret['products']
+
+ def refresh_products(self, **kwargs):
+ """
+ Refresh a product's cached info. Basically calls product_get
+ with the passed arguments, and tries to intelligently update
+ our product cache.
+
+ For example, if we already have cached info for product=foo,
+ and you pass in names=["bar", "baz"], the new cache will have
+ info for products foo, bar, baz. Individual product fields are
+ also updated.
+ """
+ for product in self.product_get(**kwargs):
+ updated = False
+ for current in self._cache.products[:]:
+ if (current.get("id", -1) != product.get("id", -2) and
+ current.get("name", -1) != product.get("name", -2)):
+ continue
+
+ _nested_update(current, product)
+ updated = True
+ break
+ if not updated:
+ self._cache.products.append(product)
+
+ def getproducts(self, force_refresh=False, **kwargs):
+ """
+ Query all products and return the raw dict info. Takes all the
+ same arguments as product_get.
+
+ On first invocation this will contact bugzilla and internally
+ cache the results. Subsequent getproducts calls or accesses to
+ self.products will return this cached data only.
+
+ :param force_refresh: force refreshing via refresh_products()
+ """
+ if force_refresh or not self._cache.products:
+ self.refresh_products(**kwargs)
+ return self._cache.products
+
+ products = property(
+ fget=lambda self: self.getproducts(),
+ fdel=lambda self: setattr(self, '_products', None),
+ doc="Helper for accessing the products cache. If nothing "
+ "has been cached yet, this calls getproducts()")
+
+
+ #######################
+ # components querying #
+ #######################
+
+ def _lookup_product_in_cache(self, productname):
+ prodstr = isinstance(productname, str) and productname or None
+ prodint = isinstance(productname, int) and productname or None
+ for proddict in self._cache.products:
+ if prodstr == proddict.get("name", -1):
+ return proddict
+ if prodint == proddict.get("id", "nope"):
+ return proddict
+ return {}
+
+ def getcomponentsdetails(self, product, force_refresh=False):
+ """
+ Wrapper around Product.get(include_fields=["components"]),
+ returning only the "components" data for the requested product,
+ slightly reworked to a dict mapping of components.name: components,
+ for historical reasons.
+
+ This uses the product cache, but will update it if the product
+ isn't found or "components" isn't cached for the product.
+
+ In cases like bugzilla.redhat.com where there are tons of
+ components for some products, this API will time out. You
+ should use product_get instead.
+ """
+ proddict = self._lookup_product_in_cache(product)
+
+ if (force_refresh or not proddict or "components" not in proddict):
+ self.refresh_products(names=[product],
+ include_fields=["name", "id", "components"])
+ proddict = self._lookup_product_in_cache(product)
+
+ ret = {}
+ for compdict in proddict["components"]:
+ ret[compdict["name"]] = compdict
+ return ret
+
+ def getcomponentdetails(self, product, component, force_refresh=False):
+ """
+ Helper for accessing a single component's info. This is a wrapper
+ around getcomponentsdetails, see that for explanation
+ """
+ d = self.getcomponentsdetails(product, force_refresh)
+ return d[component]
+
+ def getcomponents(self, product, force_refresh=False):
+ """
+ Return a list of component names for the passed product.
+
+ This can be implemented with Product.get, but behind the
+ scenes it uses Bug.legal_values. Reason being that on bugzilla
+ instances with tons of components, like bugzilla.redhat.com
+ Product=Fedora for example, there's a 10x speed difference
+ even with properly limited Product.get calls.
+
+ On first invocation the value is cached, and subsequent calls
+ will return the cached data.
+
+ :param force_refresh: Force refreshing the cache, and return
+ the new data
+ """
+ proddict = self._lookup_product_in_cache(product)
+ product_id = proddict.get("id", None)
+
+ if (force_refresh or
+ product_id is None or
+ product_id not in self._cache.component_names):
+ self.refresh_products(names=[product],
+ include_fields=["names", "id"])
+ proddict = self._lookup_product_in_cache(product)
+ product_id = proddict["id"]
+
+ opts = {'product_id': product_id, 'field': 'component'}
+ log.debug("Calling Bug.legal_values with: %s", opts)
+ names = self._proxy.Bug.legal_values(opts)["values"]
+ self._cache.component_names[product_id] = names
+
+ return self._cache.component_names[product_id]
+
+
+ ############################
+ # component adding/editing #
+ ############################
+
+ def _component_data_convert(self, data, update=False):
+ # Back compat for the old RH interface
+ convert_fields = [
+ ("initialowner", "default_assignee"),
+ ("initialqacontact", "default_qa_contact"),
+ ("initialcclist", "default_cc"),
+ ]
+ for old, new in convert_fields:
+ if old in data:
+ data[new] = data.pop(old)
+
+ if update:
+ names = {"product": data.pop("product"),
+ "component": data.pop("component")}
+ updates = {}
+ for k in list(data.keys()):
+ updates[k] = data.pop(k)
+
+ data["names"] = [names]
+ data["updates"] = updates
+
+
+ def addcomponent(self, data):
+ '''
+ A method to create a component in Bugzilla. Takes a dict, with the
+ following elements:
+
+ product: The product to create the component in
+ component: The name of the component to create
+ desription: A one sentence summary of the component
+ default_assignee: The bugzilla login (email address) of the initial
+ owner of the component
+ default_qa_contact (optional): The bugzilla login of the
+ initial QA contact
+ default_cc: (optional) The initial list of users to be CC'ed on
+ new bugs for the component.
+ is_active: (optional) If False, the component is hidden from
+ the component list when filing new bugs.
+ '''
+ data = data.copy()
+ self._component_data_convert(data)
+ log.debug("Calling Component.create with: %s", data)
+ return self._proxy.Component.create(data)
+
+ def editcomponent(self, data):
+ '''
+ A method to edit a component in Bugzilla. Takes a dict, with
+ mandatory elements of product. component, and initialowner.
+ All other elements are optional and use the same names as the
+ addcomponent() method.
+ '''
+ data = data.copy()
+ self._component_data_convert(data, update=True)
+ log.debug("Calling Component.update with: %s", data)
+ return self._proxy.Component.update(data)
+
+
+ ###################
+ # getbug* methods #
+ ###################
+
+ def _process_include_fields(self, include_fields, exclude_fields,
+ extra_fields):
+ """
+ Internal helper to process include_fields lists
+ """
+ def _convert_fields(_in):
+ if not _in:
+ return _in
+
+ for newname, oldname in self._get_api_aliases():
+ if oldname in _in:
+ _in.remove(oldname)
+ if newname not in _in:
+ _in.append(newname)
+ return _in
+
+ ret = {}
+ if self._check_version(4, 0):
+ if include_fields:
+ include_fields = _convert_fields(include_fields)
+ if "id" not in include_fields:
+ include_fields.append("id")
+ ret["include_fields"] = include_fields
+ if exclude_fields:
+ exclude_fields = _convert_fields(exclude_fields)
+ ret["exclude_fields"] = exclude_fields
+ if self._supports_getbug_extra_fields:
+ if extra_fields:
+ ret["extra_fields"] = _convert_fields(extra_fields)
+ return ret
+
+ def _get_bug_autorefresh(self):
+ """
+ This value is passed to Bug.autorefresh for all fetched bugs.
+ If True, and an uncached attribute is requested from a Bug,
+ the Bug will update its contents and try again.
+ """
+ return self._bug_autorefresh
+
+ def _set_bug_autorefresh(self, val):
+ self._bug_autorefresh = bool(val)
+ bug_autorefresh = property(_get_bug_autorefresh, _set_bug_autorefresh)
+
+
+ # getbug_extra_fields: Extra fields that need to be explicitly
+ # requested from Bug.get in order for the data to be returned.
+ #
+ # As of Dec 2012 it seems like only RH bugzilla actually has behavior
+ # like this, for upstream bz it returns all info for every Bug.get()
+ _getbug_extra_fields = []
+ _supports_getbug_extra_fields = False
+
+ def _getbugs(self, idlist, permissive,
+ include_fields=None, exclude_fields=None, extra_fields=None):
+ '''
+ Return a list of dicts of full bug info for each given bug id.
+ bug ids that couldn't be found will return None instead of a dict.
+ '''
+ oldidlist = idlist
+ idlist = []
+ for i in oldidlist:
+ try:
+ idlist.append(int(i))
+ except ValueError:
+ # String aliases can be passed as well
+ idlist.append(i)
+
+ extra_fields = self._listify(extra_fields or [])
+ extra_fields += self._getbug_extra_fields
+
+ getbugdata = {"ids": idlist}
+ if permissive:
+ getbugdata["permissive"] = 1
+
+ getbugdata.update(self._process_include_fields(
+ include_fields, exclude_fields, extra_fields))
+
+ log.debug("Calling Bug.get with: %s", getbugdata)
+ r = self._proxy.Bug.get(getbugdata)
+
+ if self._check_version(4, 0):
+ bugdict = dict([(b['id'], b) for b in r['bugs']])
+ else:
+ bugdict = dict([(b['id'], b['internals']) for b in r['bugs']])
+
+ ret = []
+ for i in idlist:
+ found = None
+ if i in bugdict:
+ found = bugdict[i]
+ else:
+ # Need to map an alias
+ for valdict in bugdict.values():
+ if i in self._listify(valdict.get("alias", None)):
+ found = valdict
+ break
+
+ ret.append(found)
+
+ return ret
+
+ def _getbug(self, objid, **kwargs):
+ """
+ Thin wrapper around _getbugs to handle the slight argument tweaks
+ for fetching a single bug. The main bit is permissive=False, which
+ will tell bugzilla to raise an explicit error if we can't fetch
+ that bug.
+
+ This logic is called from Bug() too
+ """
+ return self._getbugs([objid], permissive=False, **kwargs)[0]
+
+ def getbug(self, objid,
+ include_fields=None, exclude_fields=None, extra_fields=None):
+ '''Return a Bug object with the full complement of bug data
+ already loaded.'''
+ data = self._getbug(objid,
+ include_fields=include_fields, exclude_fields=exclude_fields,
+ extra_fields=extra_fields)
+ return Bug(self, dict=data, autorefresh=self.bug_autorefresh)
+
+ def getbugs(self, idlist,
+ include_fields=None, exclude_fields=None, extra_fields=None,
+ permissive=True):
+ '''Return a list of Bug objects with the full complement of bug data
+ already loaded. If there's a problem getting the data for a given id,
+ the corresponding item in the returned list will be None.'''
+ data = self._getbugs(idlist, include_fields=include_fields,
+ exclude_fields=exclude_fields, extra_fields=extra_fields,
+ permissive=permissive)
+ return [(b and Bug(self, dict=b,
+ autorefresh=self.bug_autorefresh)) or None
+ for b in data]
+
+ def get_comments(self, idlist):
+ '''Returns a dictionary of bugs and comments. The comments key will
+ be empty. See bugzilla docs for details'''
+ return self._proxy.Bug.comments({'ids': idlist})
+
+
+ #################
+ # query methods #
+ #################
+
+ def build_query(self,
+ product=None,
+ component=None,
+ version=None,
+ long_desc=None,
+ bug_id=None,
+ short_desc=None,
+ cc=None,
+ assigned_to=None,
+ reporter=None,
+ qa_contact=None,
+ status=None,
+ blocked=None,
+ dependson=None,
+ keywords=None,
+ keywords_type=None,
+ url=None,
+ url_type=None,
+ status_whiteboard=None,
+ status_whiteboard_type=None,
+ fixed_in=None,
+ fixed_in_type=None,
+ flag=None,
+ alias=None,
+ qa_whiteboard=None,
+ devel_whiteboard=None,
+ boolean_query=None,
+ bug_severity=None,
+ priority=None,
+ target_release=None,
+ target_milestone=None,
+ emailtype=None,
+ booleantype=None,
+ include_fields=None,
+ quicksearch=None,
+ savedsearch=None,
+ savedsearch_sharer_id=None,
+ sub_component=None,
+ tags=None,
+ exclude_fields=None,
+ extra_fields=None):
+ """
+ Build a query string from passed arguments. Will handle
+ query parameter differences between various bugzilla versions.
+
+ Most of the parameters should be self explanatory. However
+ if you want to perform a complex query, and easy way is to
+ create it with the bugzilla web UI, copy the entire URL it
+ generates, and pass it to the static method
+
+ Bugzilla.url_to_query
+
+ Then pass the output to Bugzilla.query()
+
+ For details about the specific argument formats, see the bugzilla docs:
+ https://bugzilla.readthedocs.io/en/latest/api/core/v1/bug.html#search-bugs
+ """
+ if boolean_query or booleantype:
+ raise RuntimeError("boolean_query format is no longer supported. "
+ "If you need complicated URL queries, look into "
+ "query --from-url/url_to_query().")
+
+ query = {
+ "alias": alias,
+ "product": self._listify(product),
+ "component": self._listify(component),
+ "version": version,
+ "id": bug_id,
+ "short_desc": short_desc,
+ "bug_status": status,
+ "bug_severity": bug_severity,
+ "priority": priority,
+ "target_release": target_release,
+ "target_milestone": target_milestone,
+ "tag": self._listify(tags),
+ "quicksearch": quicksearch,
+ "savedsearch": savedsearch,
+ "sharer_id": savedsearch_sharer_id,
+
+ # RH extensions... don't add any more. See comment below
+ "sub_components": self._listify(sub_component),
+ }
+
+ def add_bool(bzkey, value, bool_id, booltype=None):
+ value = self._listify(value)
+ if value is None:
+ return bool_id
+
+ query["query_format"] = "advanced"
+ for boolval in value:
+ def make_bool_str(prefix):
+ # pylint: disable=cell-var-from-loop
+ return "%s%i-0-0" % (prefix, bool_id)
+
+ query[make_bool_str("field")] = bzkey
+ query[make_bool_str("value")] = boolval
+ query[make_bool_str("type")] = booltype or "substring"
+
+ bool_id += 1
+ return bool_id
+
+ # RH extensions that we have to maintain here for back compat,
+ # but all future custom fields should be specified via
+ # cli --field option, or via extending the query dict() manually.
+ # No more supporting custom fields in this API
+ bool_id = 0
+ bool_id = add_bool("keywords", keywords, bool_id, keywords_type)
+ bool_id = add_bool("blocked", blocked, bool_id)
+ bool_id = add_bool("dependson", dependson, bool_id)
+ bool_id = add_bool("bug_file_loc", url, bool_id, url_type)
+ bool_id = add_bool("cf_fixed_in", fixed_in, bool_id, fixed_in_type)
+ bool_id = add_bool("flagtypes.name", flag, bool_id)
+ bool_id = add_bool("status_whiteboard",
+ status_whiteboard, bool_id, status_whiteboard_type)
+ bool_id = add_bool("cf_qa_whiteboard", qa_whiteboard, bool_id)
+ bool_id = add_bool("cf_devel_whiteboard", devel_whiteboard, bool_id)
+
+ def add_email(key, value, count):
+ if value is None:
+ return count
+ if not emailtype:
+ query[key] = value
+ return count
+
+ query["query_format"] = "advanced"
+ query['email%i' % count] = value
+ query['email%s%i' % (key, count)] = True
+ query['emailtype%i' % count] = emailtype
+ return count + 1
+
+ email_count = 1
+ email_count = add_email("cc", cc, email_count)
+ email_count = add_email("assigned_to", assigned_to, email_count)
+ email_count = add_email("reporter", reporter, email_count)
+ email_count = add_email("qa_contact", qa_contact, email_count)
+
+ if long_desc is not None:
+ query["query_format"] = "advanced"
+ query["longdesc"] = long_desc
+ query["longdesc_type"] = "allwordssubstr"
+
+ # 'include_fields' only available for Bugzilla4+
+ # 'extra_fields' is an RHBZ extension
+ query.update(self._process_include_fields(
+ include_fields, exclude_fields, extra_fields))
+
+ # Strip out None elements in the dict
+ for k, v in query.copy().items():
+ if v is None:
+ del(query[k])
+
+ self.pre_translation(query)
+ return query
+
+ def query(self, query):
+ '''Query bugzilla and return a list of matching bugs.
+ query must be a dict with fields like those in in querydata['fields'].
+ Returns a list of Bug objects.
+ Also see the _query() method for details about the underlying
+ implementation.
+ '''
+ log.debug("Calling Bug.search with: %s", query)
+ try:
+ r = self._proxy.Bug.search(query)
+ except Fault as e:
+
+ # Try to give a hint in the error message if url_to_query
+ # isn't supported by this bugzilla instance
+ if ("query_format" not in str(e) or
+ "RHBugzilla" in str(e.__class__) or
+ self._check_version(5, 0)):
+ raise
+ raise BugzillaError("%s\nYour bugzilla instance does not "
+ "appear to support API queries derived from bugzilla "
+ "web URL queries." % e)
+
+ log.debug("Query returned %s bugs", len(r['bugs']))
+ return [Bug(self, dict=b,
+ autorefresh=self.bug_autorefresh) for b in r['bugs']]
+
+ def pre_translation(self, query):
+ '''In order to keep the API the same, Bugzilla4 needs to process the
+ query and the result. This also applies to the refresh() function
+ '''
+ pass
+
+ def post_translation(self, query, bug):
+ '''In order to keep the API the same, Bugzilla4 needs to process the
+ query and the result. This also applies to the refresh() function
+ '''
+ pass
+
+ def bugs_history_raw(self, bug_ids):
+ '''
+ Experimental. Gets the history of changes for
+ particular bugs in the database.
+ '''
+ return self._proxy.Bug.history({'ids': bug_ids})
+
+
+ #######################################
+ # Methods for modifying existing bugs #
+ #######################################
+
+ # Bug() also has individual methods for many ops, like setassignee()
+
+ def update_bugs(self, ids, updates):
+ """
+ A thin wrapper around bugzilla Bug.update(). Used to update all
+ values of an existing bug report, as well as add comments.
+
+ The dictionary passed to this function should be generated with
+ build_update(), otherwise we cannot guarantee back compatibility.
+ """
+ tmp = updates.copy()
+ tmp["ids"] = self._listify(ids)
+
+ log.debug("Calling Bug.update with: %s", tmp)
+ return self._proxy.Bug.update(tmp)
+
+ def update_tags(self, idlist, tags_add=None, tags_remove=None):
+ '''
+ Updates the 'tags' field for a bug.
+ '''
+ tags = {}
+ if tags_add:
+ tags["add"] = self._listify(tags_add)
+ if tags_remove:
+ tags["remove"] = self._listify(tags_remove)
+
+ d = {
+ "ids": self._listify(idlist),
+ "tags": tags,
+ }
+
+ log.debug("Calling Bug.update_tags with: %s", d)
+ return self._proxy.Bug.update_tags(d)
+
+ def update_flags(self, idlist, flags):
+ """
+ A thin back compat wrapper around build_update(flags=X)
+ """
+ return self.update_bugs(idlist, self.build_update(flags=flags))
+
+
+ def build_update(self,
+ alias=None,
+ assigned_to=None,
+ blocks_add=None,
+ blocks_remove=None,
+ blocks_set=None,
+ depends_on_add=None,
+ depends_on_remove=None,
+ depends_on_set=None,
+ cc_add=None,
+ cc_remove=None,
+ is_cc_accessible=None,
+ comment=None,
+ comment_private=None,
+ component=None,
+ deadline=None,
+ dupe_of=None,
+ estimated_time=None,
+ groups_add=None,
+ groups_remove=None,
+ keywords_add=None,
+ keywords_remove=None,
+ keywords_set=None,
+ op_sys=None,
+ platform=None,
+ priority=None,
+ product=None,
+ qa_contact=None,
+ is_creator_accessible=None,
+ remaining_time=None,
+ reset_assigned_to=None,
+ reset_qa_contact=None,
+ resolution=None,
+ see_also_add=None,
+ see_also_remove=None,
+ severity=None,
+ status=None,
+ summary=None,
+ target_milestone=None,
+ target_release=None,
+ url=None,
+ version=None,
+ whiteboard=None,
+ work_time=None,
+ fixed_in=None,
+ qa_whiteboard=None,
+ devel_whiteboard=None,
+ internal_whiteboard=None,
+ sub_component=None,
+ flags=None,
+ comment_tags=None):
+ """
+ Returns a python dict() with properly formatted parameters to
+ pass to update_bugs(). See bugzilla documentation for the format
+ of the individual fields:
+
+ https://bugzilla.readthedocs.io/en/latest/api/core/v1/bug.html#create-bug
+ """
+ ret = {}
+
+ # These are only supported for rhbugzilla
+ for key, val in [
+ ("fixed_in", fixed_in),
+ ("devel_whiteboard", devel_whiteboard),
+ ("qa_whiteboard", qa_whiteboard),
+ ("internal_whiteboard", internal_whiteboard),
+ ("sub_component", sub_component),
+ ]:
+ if val is not None:
+ raise ValueError("bugzilla instance does not support "
+ "updating '%s'" % key)
+
+ def s(key, val, convert=None):
+ if val is None:
+ return
+ if convert:
+ val = convert(val)
+ ret[key] = val
+
+ def add_dict(key, add, remove, _set=None, convert=None):
+ if add is remove is _set is None:
+ return
+
+ def c(val):
+ val = self._listify(val)
+ if convert:
+ val = [convert(v) for v in val]
+ return val
+
+ newdict = {}
+ if add is not None:
+ newdict["add"] = c(add)
+ if remove is not None:
+ newdict["remove"] = c(remove)
+ if _set is not None:
+ newdict["set"] = c(_set)
+ ret[key] = newdict
+
+
+ s("alias", alias)
+ s("assigned_to", assigned_to)
+ s("is_cc_accessible", is_cc_accessible, bool)
+ s("component", component)
+ s("deadline", deadline)
+ s("dupe_of", dupe_of, int)
+ s("estimated_time", estimated_time, int)
+ s("op_sys", op_sys)
+ s("platform", platform)
+ s("priority", priority)
+ s("product", product)
+ s("qa_contact", qa_contact)
+ s("is_creator_accessible", is_creator_accessible, bool)
+ s("remaining_time", remaining_time, float)
+ s("reset_assigned_to", reset_assigned_to, bool)
+ s("reset_qa_contact", reset_qa_contact, bool)
+ s("resolution", resolution)
+ s("severity", severity)
+ s("status", status)
+ s("summary", summary)
+ s("target_milestone", target_milestone)
+ s("target_release", target_release)
+ s("url", url)
+ s("version", version)
+ s("whiteboard", whiteboard)
+ s("work_time", work_time, float)
+ s("flags", flags)
+ s("comment_tags", comment_tags, self._listify)
+
+ add_dict("blocks", blocks_add, blocks_remove, blocks_set,
+ convert=int)
+ add_dict("depends_on", depends_on_add, depends_on_remove,
+ depends_on_set, convert=int)
+ add_dict("cc", cc_add, cc_remove)
+ add_dict("groups", groups_add, groups_remove)
+ add_dict("keywords", keywords_add, keywords_remove, keywords_set)
+ add_dict("see_also", see_also_add, see_also_remove)
+
+ if comment is not None:
+ ret["comment"] = {"comment": comment}
+ if comment_private:
+ ret["comment"]["is_private"] = comment_private
+
+ return ret
+
+
+ ########################################
+ # Methods for working with attachments #
+ ########################################
+
+ def _attachment_uri(self, attachid):
+ '''Returns the URI for the given attachment ID.'''
+ att_uri = self.url.replace('xmlrpc.cgi', 'attachment.cgi')
+ att_uri = att_uri + '?id=%s' % attachid
+ return att_uri
+
+ def attachfile(self, idlist, attachfile, description, **kwargs):
+ '''
+ Attach a file to the given bug IDs. Returns the ID of the attachment
+ or raises XMLRPC Fault if something goes wrong.
+
+ attachfile may be a filename (which will be opened) or a file-like
+ object, which must provide a 'read' method. If it's not one of these,
+ this method will raise a TypeError.
+ description is the short description of this attachment.
+
+ Optional keyword args are as follows:
+ file_name: this will be used as the filename for the attachment.
+ REQUIRED if attachfile is a file-like object with no
+ 'name' attribute, otherwise the filename or .name
+ attribute will be used.
+ comment: An optional comment about this attachment.
+ is_private: Set to True if the attachment should be marked private.
+ is_patch: Set to True if the attachment is a patch.
+ content_type: The mime-type of the attached file. Defaults to
+ application/octet-stream if not set. NOTE that text
+ files will *not* be viewable in bugzilla unless you
+ remember to set this to text/plain. So remember that!
+
+ Returns the list of attachment ids that were added. If only one
+ attachment was added, we return the single int ID for back compat
+ '''
+ if isinstance(attachfile, str):
+ f = open(attachfile, "rb")
+ elif hasattr(attachfile, 'read'):
+ f = attachfile
+ else:
+ raise TypeError("attachfile must be filename or file-like object")
+
+ # Back compat
+ if "contenttype" in kwargs:
+ kwargs["content_type"] = kwargs.pop("contenttype")
+ if "ispatch" in kwargs:
+ kwargs["is_patch"] = kwargs.pop("ispatch")
+ if "isprivate" in kwargs:
+ kwargs["is_private"] = kwargs.pop("isprivate")
+ if "filename" in kwargs:
+ kwargs["file_name"] = kwargs.pop("filename")
+
+ kwargs['summary'] = description
+
+ data = f.read()
+ if not isinstance(data, bytes):
+ data = data.encode(locale.getpreferredencoding())
+ kwargs['data'] = Binary(data)
+
+ kwargs['ids'] = self._listify(idlist)
+
+ if 'file_name' not in kwargs and hasattr(f, "name"):
+ kwargs['file_name'] = os.path.basename(f.name)
+ if 'content_type' not in kwargs:
+ ctype = _detect_filetype(getattr(f, "name", None))
+ if not ctype:
+ ctype = 'application/octet-stream'
+ kwargs['content_type'] = ctype
+
+ ret = self._proxy.Bug.add_attachment(kwargs)
+
+ if "attachments" in ret:
+ # Up to BZ 4.2
+ ret = [int(k) for k in ret["attachments"].keys()]
+ elif "ids" in ret:
+ # BZ 4.4+
+ ret = ret["ids"]
+
+ if isinstance(ret, list) and len(ret) == 1:
+ ret = ret[0]
+ return ret
+
+
+ def openattachment(self, attachid):
+ '''Get the contents of the attachment with the given attachment ID.
+ Returns a file-like object.'''
+ attachments = self.get_attachments(None, attachid)
+ data = attachments["attachments"][str(attachid)]
+ xmlrpcbinary = data["data"]
+
+ ret = BytesIO()
+ ret.write(xmlrpcbinary.data)
+ ret.name = data["file_name"]
+ ret.seek(0)
+ return ret
+
+ def updateattachmentflags(self, bugid, attachid, flagname, **kwargs):
+ '''
+ Updates a flag for the given attachment ID.
+ Optional keyword args are:
+ status: new status for the flag ('-', '+', '?', 'X')
+ requestee: new requestee for the flag
+ '''
+ # Bug ID was used for the original custom redhat API, no longer
+ # needed though
+ ignore = bugid
+
+ flags = {"name": flagname}
+ flags.update(kwargs)
+ update = {'ids': [int(attachid)], 'flags': [flags]}
+
+ log.debug("Calling Bug.update_attachment(%s)", update)
+ return self._proxy.Bug.update_attachment(update)
+
+ def get_attachments(self, ids, attachment_ids,
+ include_fields=None, exclude_fields=None):
+ """
+ Wrapper for Bug.attachments. One of ids or attachment_ids is required
+
+ :param ids: Get attachments for this bug ID
+ :param attachment_ids: Specific attachment ID to get
+
+ https://bugzilla.readthedocs.io/en/latest/api/core/v1/attachment.html#get-attachment
+ """
+ params = {
+ "ids": self._listify(ids) or [],
+ "attachment_ids": self._listify(attachment_ids) or [],
+ }
+ if include_fields:
+ params["include_fields"] = self._listify(include_fields)
+ if exclude_fields:
+ params["exclude_fields"] = self._listify(exclude_fields)
+
+ log.debug("Calling Bug.attachments(%s)", params)
+ return self._proxy.Bug.attachments(params)
+
+
+ #####################
+ # createbug methods #
+ #####################
+
+ createbug_required = ('product', 'component', 'summary', 'version',
+ 'description')
+
+ def build_createbug(self,
+ product=None,
+ component=None,
+ version=None,
+ summary=None,
+ description=None,
+ comment_private=None,
+ blocks=None,
+ cc=None,
+ assigned_to=None,
+ keywords=None,
+ depends_on=None,
+ groups=None,
+ op_sys=None,
+ platform=None,
+ priority=None,
+ qa_contact=None,
+ resolution=None,
+ severity=None,
+ status=None,
+ target_milestone=None,
+ target_release=None,
+ url=None,
+ sub_component=None,
+ alias=None,
+ comment_tags=None):
+ """"
+ Returns a python dict() with properly formatted parameters to
+ pass to createbug(). See bugzilla documentation for the format
+ of the individual fields:
+
+ https://bugzilla.readthedocs.io/en/latest/api/core/v1/bug.html#update-bug
+ """
+
+ localdict = {}
+ if blocks:
+ localdict["blocks"] = self._listify(blocks)
+ if cc:
+ localdict["cc"] = self._listify(cc)
+ if depends_on:
+ localdict["depends_on"] = self._listify(depends_on)
+ if groups:
+ localdict["groups"] = self._listify(groups)
+ if keywords:
+ localdict["keywords"] = self._listify(keywords)
+ if description:
+ localdict["description"] = description
+ if comment_private:
+ localdict["comment_is_private"] = True
+
+ # Most of the machinery and formatting here is the same as
+ # build_update, so reuse that as much as possible
+ ret = self.build_update(product=product, component=component,
+ version=version, summary=summary, op_sys=op_sys,
+ platform=platform, priority=priority, qa_contact=qa_contact,
+ resolution=resolution, severity=severity, status=status,
+ target_milestone=target_milestone,
+ target_release=target_release, url=url,
+ assigned_to=assigned_to, sub_component=sub_component,
+ alias=alias, comment_tags=comment_tags)
+
+ ret.update(localdict)
+ return ret
+
+ def _validate_createbug(self, *args, **kwargs):
+ # Previous API required users specifying keyword args that mapped
+ # to the XMLRPC arg names. Maintain that bad compat, but also allow
+ # receiving a single dictionary like query() does
+ if kwargs and args:
+ raise BugzillaError("createbug: cannot specify positional "
+ "args=%s with kwargs=%s, must be one or the "
+ "other." % (args, kwargs))
+ if args:
+ if len(args) > 1 or not isinstance(args[0], dict):
+ raise BugzillaError("createbug: positional arguments only "
+ "accept a single dictionary.")
+ data = args[0]
+ else:
+ data = kwargs
+
+ # If we're getting a call that uses an old fieldname, convert it to the
+ # new fieldname instead.
+ for newname, oldname in self._get_api_aliases():
+ if (newname in self.createbug_required and
+ newname not in data and
+ oldname in data):
+ data[newname] = data.pop(oldname)
+
+ # Back compat handling for check_args
+ if "check_args" in data:
+ del(data["check_args"])
+
+ return data
+
+ def createbug(self, *args, **kwargs):
+ '''
+ Create a bug with the given info. Returns a new Bug object.
+ Check bugzilla API documentation for valid values, at least
+ product, component, summary, version, and description need to
+ be passed.
+ '''
+ data = self._validate_createbug(*args, **kwargs)
+ log.debug("Calling Bug.create with: %s", data)
+ rawbug = self._proxy.Bug.create(data)
+ return Bug(self, bug_id=rawbug["id"],
+ autorefresh=self.bug_autorefresh)
+
+
+ ##############################
+ # Methods for handling Users #
+ ##############################
+
+ def _getusers(self, ids=None, names=None, match=None):
+ '''Return a list of users that match criteria.
+
+ :kwarg ids: list of user ids to return data on
+ :kwarg names: list of user names to return data on
+ :kwarg match: list of patterns. Returns users whose real name or
+ login name match the pattern.
+ :raises XMLRPC Fault: Code 51: if a Bad Login Name was sent to the
+ names array.
+ Code 304: if the user was not authorized to see user they
+ requested.
+ Code 505: user is logged out and can't use the match or ids
+ parameter.
+
+ Available in Bugzilla-3.4+
+ '''
+ params = {}
+ if ids:
+ params['ids'] = self._listify(ids)
+ if names:
+ params['names'] = self._listify(names)
+ if match:
+ params['match'] = self._listify(match)
+ if not params:
+ raise BugzillaError('_get() needs one of ids, '
+ ' names, or match kwarg.')
+
+ log.debug("Calling User.get with: %s", params)
+ return self._proxy.User.get(params)
+
+ def getuser(self, username):
+ '''Return a bugzilla User for the given username
+
+ :arg username: The username used in bugzilla.
+ :raises XMLRPC Fault: Code 51 if the username does not exist
+ :returns: User record for the username
+ '''
+ ret = self.getusers(username)
+ return ret and ret[0]
+
+ def getusers(self, userlist):
+ '''Return a list of Users from .
+
+ :userlist: List of usernames to lookup
+ :returns: List of User records
+ '''
+ userobjs = [User(self, **rawuser) for rawuser in
+ self._getusers(names=userlist).get('users', [])]
+
+ # Return users in same order they were passed in
+ ret = []
+ for u in userlist:
+ for uobj in userobjs[:]:
+ if uobj.email == u:
+ userobjs.remove(uobj)
+ ret.append(uobj)
+ break
+ ret += userobjs
+ return ret
+
+
+ def searchusers(self, pattern):
+ '''Return a bugzilla User for the given list of patterns
+
+ :arg pattern: List of patterns to match against.
+ :returns: List of User records
+ '''
+ return [User(self, **rawuser) for rawuser in
+ self._getusers(match=pattern).get('users', [])]
+
+ def createuser(self, email, name='', password=''):
+ '''Return a bugzilla User for the given username
+
+ :arg email: The email address to use in bugzilla
+ :kwarg name: Real name to associate with the account
+ :kwarg password: Password to set for the bugzilla account
+ :raises XMLRPC Fault: Code 501 if the username already exists
+ Code 500 if the email address isn't valid
+ Code 502 if the password is too short
+ Code 503 if the password is too long
+ :return: User record for the username
+ '''
+ self._proxy.User.create(email, name, password)
+ return self.getuser(email)
+
+ def updateperms(self, user, action, groups):
+ '''
+ A method to update the permissions (group membership) of a bugzilla
+ user.
+
+ :arg user: The e-mail address of the user to be acted upon. Can
+ also be a list of emails.
+ :arg action: add, remove, or set
+ :arg groups: list of groups to be added to (i.e. ['fedora_contrib'])
+ '''
+ groups = self._listify(groups)
+ if action == "rem":
+ action = "remove"
+ if action not in ["add", "remove", "set"]:
+ raise BugzillaError("Unknown user permission action '%s'" % action)
+
+ update = {
+ "names": self._listify(user),
+ "groups": {
+ action: groups,
+ }
+ }
+
+ log.debug("Call User.update with: %s", update)
+ return self._proxy.User.update(update)
diff --git a/scripts/bugzilla/bug.py b/scripts/bugzilla/bug.py
new file mode 100644
index 0000000000..e586e7f95b
--- /dev/null
+++ b/scripts/bugzilla/bug.py
@@ -0,0 +1,450 @@
+# base.py - the base classes etc. for a Python interface to bugzilla
+#
+# Copyright (C) 2007, 2008, 2009, 2010 Red Hat Inc.
+# Author: Will Woods <wwoods@redhat.com>
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version. See http://www.gnu.org/copyleft/gpl.html for
+# the full text of the license.
+
+from __future__ import unicode_literals
+import locale
+from logging import getLogger
+import sys
+
+log = getLogger(__name__)
+
+
+class Bug(object):
+ '''A container object for a bug report. Requires a Bugzilla instance -
+ every Bug is on a Bugzilla, obviously.
+ Optional keyword args:
+ dict=DICT - populate attributes with the result of a getBug() call
+ bug_id=ID - if dict does not contain bug_id, this is required before
+ you can read any attributes or make modifications to this
+ bug.
+ '''
+ def __init__(self, bugzilla, bug_id=None, dict=None, autorefresh=False):
+ # pylint: disable=redefined-builtin
+ # API had pre-existing issue that we can't change ('dict' usage)
+
+ self.bugzilla = bugzilla
+ self._bug_fields = []
+ self.autorefresh = autorefresh
+
+ if not dict:
+ dict = {}
+ if bug_id:
+ dict["id"] = bug_id
+
+ log.debug("Bug(%s)", sorted(dict.keys()))
+ self._update_dict(dict)
+
+ self.weburl = bugzilla.url.replace('xmlrpc.cgi',
+ 'show_bug.cgi?id=%i' % self.bug_id)
+
+ def __str__(self):
+ '''Return a simple string representation of this bug
+
+ This is available only for compatibility. Using 'str(bug)' and
+ 'print(bug)' is not recommended because of potential encoding issues.
+ Please use unicode(bug) where possible.
+ '''
+ if sys.version_info[0] >= 3:
+ return self.__unicode__()
+ else:
+ return self.__unicode__().encode(
+ locale.getpreferredencoding(), 'replace')
+
+ def __unicode__(self):
+ '''Return a simple unicode string representation of this bug'''
+ return "#%-6s %-10s - %s - %s" % (self.bug_id, self.bug_status,
+ self.assigned_to, self.summary)
+
+ def __repr__(self):
+ return '<Bug #%i on %s at %#x>' % (self.bug_id, self.bugzilla.url,
+ id(self))
+
+ def __getattr__(self, name):
+ refreshed = False
+ while True:
+ if refreshed and name in self.__dict__:
+ # If name was in __dict__ to begin with, __getattr__ would
+ # have never been called.
+ return self.__dict__[name]
+
+ # pylint: disable=protected-access
+ aliases = self.bugzilla._get_bug_aliases()
+ # pylint: enable=protected-access
+
+ for newname, oldname in aliases:
+ if name == oldname and newname in self.__dict__:
+ return self.__dict__[newname]
+
+ # Doing dir(bugobj) does getattr __members__/__methods__,
+ # don't refresh for those
+ if name.startswith("__") and name.endswith("__"):
+ break
+
+ if refreshed or not self.autorefresh:
+ break
+
+ log.info("Bug %i missing attribute '%s' - doing implicit "
+ "refresh(). This will be slow, if you want to avoid "
+ "this, properly use query/getbug include_fields, and "
+ "set bugzilla.bug_autorefresh = False to force failure.",
+ self.bug_id, name)
+
+ # We pass the attribute name to getbug, since for something like
+ # 'attachments' which downloads lots of data we really want the
+ # user to opt in.
+ self.refresh(extra_fields=[name])
+ refreshed = True
+
+ msg = ("Bug object has no attribute '%s'." % name)
+ if not self.autorefresh:
+ msg += ("\nIf '%s' is a bugzilla attribute, it may not have "
+ "been cached when the bug was fetched. You may want "
+ "to adjust your include_fields for getbug/query." % name)
+ raise AttributeError(msg)
+
+ def refresh(self, include_fields=None, exclude_fields=None,
+ extra_fields=None):
+ '''
+ Refresh the bug with the latest data from bugzilla
+ '''
+ # pylint: disable=protected-access
+ r = self.bugzilla._getbug(self.bug_id,
+ include_fields=include_fields, exclude_fields=exclude_fields,
+ extra_fields=self._bug_fields + (extra_fields or []))
+ # pylint: enable=protected-access
+ self._update_dict(r)
+ reload = refresh
+
+ def _update_dict(self, newdict):
+ '''
+ Update internal dictionary, in a way that ensures no duplicate
+ entries are stored WRT field aliases
+ '''
+ if self.bugzilla:
+ self.bugzilla.post_translation({}, newdict)
+
+ # pylint: disable=protected-access
+ aliases = self.bugzilla._get_bug_aliases()
+ # pylint: enable=protected-access
+
+ for newname, oldname in aliases:
+ if oldname not in newdict:
+ continue
+
+ if newname not in newdict:
+ newdict[newname] = newdict[oldname]
+ elif newdict[newname] != newdict[oldname]:
+ log.debug("Update dict contained differing alias values "
+ "d[%s]=%s and d[%s]=%s , dropping the value "
+ "d[%s]", newname, newdict[newname], oldname,
+ newdict[oldname], oldname)
+ del(newdict[oldname])
+
+ for key in newdict.keys():
+ if key not in self._bug_fields:
+ self._bug_fields.append(key)
+ self.__dict__.update(newdict)
+
+ if 'id' not in self.__dict__ and 'bug_id' not in self.__dict__:
+ raise TypeError("Bug object needs a bug_id")
+
+
+ ##################
+ # pickle helpers #
+ ##################
+
+ def __getstate__(self):
+ ret = {}
+ for key in self._bug_fields:
+ ret[key] = self.__dict__[key]
+ return ret
+
+ def __setstate__(self, vals):
+ self._bug_fields = []
+ self.bugzilla = None
+ self._update_dict(vals)
+
+
+ #####################
+ # Modify bug status #
+ #####################
+
+ def setstatus(self, status, comment=None, private=False):
+ '''
+ Update the status for this bug report.
+ Commonly-used values are ASSIGNED, MODIFIED, and NEEDINFO.
+
+ To change bugs to RESOLVED, use .close() instead.
+ '''
+ # Note: fedora bodhi uses this function
+ vals = self.bugzilla.build_update(status=status,
+ comment=comment,
+ comment_private=private)
+ log.debug("setstatus: update=%s", vals)
+
+ return self.bugzilla.update_bugs(self.bug_id, vals)
+
+ def close(self, resolution, dupeid=None, fixedin=None,
+ comment=None, isprivate=False):
+ '''Close this bug.
+ Valid values for resolution are in bz.querydefaults['resolution_list']
+ For bugzilla.redhat.com that's:
+ ['NOTABUG', 'WONTFIX', 'DEFERRED', 'WORKSFORME', 'CURRENTRELEASE',
+ 'RAWHIDE', 'ERRATA', 'DUPLICATE', 'UPSTREAM', 'NEXTRELEASE',
+ 'CANTFIX', 'INSUFFICIENT_DATA']
+ If using DUPLICATE, you need to set dupeid to the ID of the other bug.
+ If using WORKSFORME/CURRENTRELEASE/RAWHIDE/ERRATA/UPSTREAM/NEXTRELEASE
+ you can (and should) set 'new_fixed_in' to a string representing the
+ version that fixes the bug.
+ You can optionally add a comment while closing the bug. Set 'isprivate'
+ to True if you want that comment to be private.
+ '''
+ # Note: fedora bodhi uses this function
+ vals = self.bugzilla.build_update(comment=comment,
+ comment_private=isprivate,
+ resolution=resolution,
+ dupe_of=dupeid,
+ fixed_in=fixedin,
+ status="RESOLVED")
+ log.debug("close: update=%s", vals)
+
+ return self.bugzilla.update_bugs(self.bug_id, vals)
+
+
+ #####################
+ # Modify bug emails #
+ #####################
+
+ def setassignee(self, assigned_to=None,
+ qa_contact=None, comment=None):
+ '''
+ Set any of the assigned_to or qa_contact fields to a new
+ bugzilla account, with an optional comment, e.g.
+ setassignee(assigned_to='wwoods@redhat.com')
+ setassignee(qa_contact='wwoods@redhat.com', comment='wwoods QA ftw')
+
+ You must set at least one of the two assignee fields, or this method
+ will throw a ValueError.
+
+ Returns [bug_id, mailresults].
+ '''
+ if not (assigned_to or qa_contact):
+ raise ValueError("You must set one of assigned_to "
+ " or qa_contact")
+
+ vals = self.bugzilla.build_update(assigned_to=assigned_to,
+ qa_contact=qa_contact,
+ comment=comment)
+ log.debug("setassignee: update=%s", vals)
+
+ return self.bugzilla.update_bugs(self.bug_id, vals)
+
+ def addcc(self, cclist, comment=None):
+ '''
+ Adds the given email addresses to the CC list for this bug.
+ cclist: list of email addresses (strings)
+ comment: optional comment to add to the bug
+ '''
+ vals = self.bugzilla.build_update(comment=comment,
+ cc_add=cclist)
+ log.debug("addcc: update=%s", vals)
+
+ return self.bugzilla.update_bugs(self.bug_id, vals)
+
+ def deletecc(self, cclist, comment=None):
+ '''
+ Removes the given email addresses from the CC list for this bug.
+ '''
+ vals = self.bugzilla.build_update(comment=comment,
+ cc_remove=cclist)
+ log.debug("deletecc: update=%s", vals)
+
+ return self.bugzilla.update_bugs(self.bug_id, vals)
+
+
+ ####################
+ # comment handling #
+ ####################
+
+ def addcomment(self, comment, private=False):
+ '''
+ Add the given comment to this bug. Set private to True to mark this
+ comment as private.
+ '''
+ # Note: fedora bodhi uses this function
+ vals = self.bugzilla.build_update(comment=comment,
+ comment_private=private)
+ log.debug("addcomment: update=%s", vals)
+
+ return self.bugzilla.update_bugs(self.bug_id, vals)
+
+ def getcomments(self):
+ '''
+ Returns an array of comment dictionaries for this bug
+ '''
+ comment_list = self.bugzilla.get_comments([self.bug_id])
+ return comment_list['bugs'][str(self.bug_id)]['comments']
+
+
+ #####################
+ # Get/Set bug flags #
+ #####################
+
+ def get_flag_type(self, name):
+ """
+ Return flag_type information for a specific flag
+
+ Older RHBugzilla returned a lot more info here, but it was
+ non-upstream and is now gone.
+ """
+ for t in self.flags:
+ if t['name'] == name:
+ return t
+ return None
+
+ def get_flags(self, name):
+ """
+ Return flag value information for a specific flag
+ """
+ ft = self.get_flag_type(name)
+ if not ft:
+ return None
+
+ return [ft]
+
+ def get_flag_status(self, name):
+ """
+ Return a flag 'status' field
+
+ This method works only for simple flags that have only a 'status' field
+ with no "requestee" info, and no multiple values. For more complex
+ flags, use get_flags() to get extended flag value information.
+ """
+ f = self.get_flags(name)
+ if not f:
+ return None
+
+ # This method works only for simple flags that have only one
+ # value set.
+ assert len(f) <= 1
+
+ return f[0]['status']
+
+ def updateflags(self, flags):
+ """
+ Thin wrapper around build_update(flags=X). This only handles simple
+ status changes, anything like needinfo requestee needs to call
+ build_update + update_bugs directly
+
+ :param flags: Dictionary of the form {"flagname": "status"}, example
+ {"needinfo": "?", "devel_ack": "+"}
+ """
+ flaglist = []
+ for key, value in flags.items():
+ flaglist.append({"name": key, "status": value})
+ return self.bugzilla.update_bugs([self.bug_id],
+ self.bugzilla.build_update(flags=flaglist))
+
+
+ ########################
+ # Experimental methods #
+ ########################
+
+ def get_attachments(self, include_fields=None, exclude_fields=None):
+ """
+ Helper call to Bugzilla.get_attachments. If you want to fetch
+ specific attachment IDs, use that function instead
+ """
+ if "attachments" in self.__dict__:
+ return self.attachments
+
+ data = self.bugzilla.get_attachments([self.bug_id], None,
+ include_fields, exclude_fields)
+ return data["bugs"][str(self.bug_id)]
+
+ def get_attachment_ids(self):
+ """
+ Helper function to return only the attachment IDs for this bug
+ """
+ return [a["id"] for a in self.get_attachments(exclude_fields=["data"])]
+
+ def get_history_raw(self):
+ '''
+ Experimental. Get the history of changes for this bug.
+ '''
+ return self.bugzilla.bugs_history_raw([self.bug_id])
+
+
+class User(object):
+ '''Container object for a bugzilla User.
+
+ :arg bugzilla: Bugzilla instance that this User belongs to.
+ Rest of the params come straight from User.get()
+ '''
+ def __init__(self, bugzilla, **kwargs):
+ self.bugzilla = bugzilla
+ self.__userid = kwargs.get('id')
+ self.__name = kwargs.get('name')
+
+ self.__email = kwargs.get('email', self.__name)
+ self.__can_login = kwargs.get('can_login', False)
+
+ self.real_name = kwargs.get('real_name', None)
+ self.password = None
+
+ self.groups = kwargs.get('groups', {})
+ self.groupnames = []
+ for g in self.groups:
+ if "name" in g:
+ self.groupnames.append(g["name"])
+ self.groupnames.sort()
+
+
+ ########################
+ # Read-only attributes #
+ ########################
+
+ # We make these properties so that the user cannot set them. They are
+ # unaffected by the update() method so it would be misleading to let them
+ # be changed.
+ @property
+ def userid(self):
+ return self.__userid
+
+ @property
+ def email(self):
+ return self.__email
+
+ @property
+ def can_login(self):
+ return self.__can_login
+
+ # name is a key in some methods. Mark it dirty when we change it #
+ @property
+ def name(self):
+ return self.__name
+
+ def refresh(self):
+ """
+ Update User object with latest info from bugzilla
+ """
+ newuser = self.bugzilla.getuser(self.email)
+ self.__dict__.update(newuser.__dict__)
+
+ def updateperms(self, action, groups):
+ '''
+ A method to update the permissions (group membership) of a bugzilla
+ user.
+
+ :arg action: add, remove, or set
+ :arg groups: list of groups to be added to (i.e. ['fedora_contrib'])
+ '''
+ self.bugzilla.updateperms(self.name, action, groups)
diff --git a/scripts/bugzilla/oldclasses.py b/scripts/bugzilla/oldclasses.py
new file mode 100644
index 0000000000..18169e7811
--- /dev/null
+++ b/scripts/bugzilla/oldclasses.py
@@ -0,0 +1,23 @@
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version. See http://www.gnu.org/copyleft/gpl.html for
+# the full text of the license.
+
+from .base import Bugzilla
+from .rhbugzilla import RHBugzilla
+
+
+# These are old compat classes. Nothing new should be added here,
+# and these should not be altered
+
+class Bugzilla3(Bugzilla): pass
+class Bugzilla32(Bugzilla): pass
+class Bugzilla34(Bugzilla): pass
+class Bugzilla36(Bugzilla): pass
+class Bugzilla4(Bugzilla): pass
+class Bugzilla42(Bugzilla): pass
+class Bugzilla44(Bugzilla): pass
+class NovellBugzilla(Bugzilla): pass
+class RHBugzilla3(RHBugzilla): pass
+class RHBugzilla4(RHBugzilla): pass
diff --git a/scripts/bugzilla/rhbugzilla.py b/scripts/bugzilla/rhbugzilla.py
new file mode 100644
index 0000000000..55ee601b14
--- /dev/null
+++ b/scripts/bugzilla/rhbugzilla.py
@@ -0,0 +1,352 @@
+# rhbugzilla.py - a Python interface to Red Hat Bugzilla using xmlrpclib.
+#
+# Copyright (C) 2008-2012 Red Hat Inc.
+# Author: Will Woods <wwoods@redhat.com>
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version. See http://www.gnu.org/copyleft/gpl.html for
+# the full text of the license.
+
+from logging import getLogger
+
+from .base import Bugzilla
+
+log = getLogger(__name__)
+
+
+class RHBugzilla(Bugzilla):
+ '''
+ Bugzilla class for connecting Red Hat's forked bugzilla instance,
+ bugzilla.redhat.com
+
+ Historically this class used many more non-upstream methods, but
+ in 2012 RH started dropping most of its custom bits. By that time,
+ upstream BZ had most of the important functionality.
+
+ Much of the remaining code here is just trying to keep things operating
+ in python-bugzilla back compatible manner.
+
+ This class was written using bugzilla.redhat.com's API docs:
+ https://bugzilla.redhat.com/docs/en/html/api/
+ '''
+ def _init_class_state(self):
+ def _add_both_alias(newname, origname):
+ self._add_field_alias(newname, origname, is_api=False)
+ self._add_field_alias(origname, newname, is_bug=False)
+
+ _add_both_alias('fixed_in', 'cf_fixed_in')
+ _add_both_alias('qa_whiteboard', 'cf_qa_whiteboard')
+ _add_both_alias('devel_whiteboard', 'cf_devel_whiteboard')
+ _add_both_alias('internal_whiteboard', 'cf_internal_whiteboard')
+
+ self._add_field_alias('component', 'components', is_bug=False)
+ self._add_field_alias('version', 'versions', is_bug=False)
+ # Yes, sub_components is the field name the API expects
+ self._add_field_alias('sub_components', 'sub_component', is_bug=False)
+
+ # flags format isn't exactly the same but it's the closest approx
+ self._add_field_alias('flags', 'flag_types')
+
+ self._getbug_extra_fields = self._getbug_extra_fields + [
+ "comments", "description",
+ "external_bugs", "flags", "sub_components",
+ "tags",
+ ]
+ self._supports_getbug_extra_fields = True
+
+
+ ######################
+ # Bug update methods #
+ ######################
+
+ def build_update(self, **kwargs):
+ # pylint: disable=arguments-differ
+ adddict = {}
+
+ def pop(key, destkey):
+ val = kwargs.pop(key, None)
+ if val is None:
+ return
+ adddict[destkey] = val
+
+ def get_sub_component():
+ val = kwargs.pop("sub_component", None)
+ if val is None:
+ return
+
+ if not isinstance(val, dict):
+ component = self._listify(kwargs.get("component"))
+ if not component:
+ raise ValueError("component must be specified if "
+ "specifying sub_component")
+ val = {component[0]: val}
+ adddict["sub_components"] = val
+
+ def get_alias():
+ # RHBZ has a custom extension to allow a bug to have multiple
+ # aliases, so the format of aliases is
+ # {"add": [...], "remove": [...]}
+ # But that means in order to approximate upstream, behavior
+ # which just overwrites the existing alias, we need to read
+ # the bug's state first to know what string to remove. Which
+ # we can't do, since we don't know the bug numbers at this point.
+ # So fail for now.
+ #
+ # The API should provide {"set": [...]}
+ # https://bugzilla.redhat.com/show_bug.cgi?id=1173114
+ #
+ # Implementation will go here when it's available
+ pass
+
+ pop("fixed_in", "cf_fixed_in")
+ pop("qa_whiteboard", "cf_qa_whiteboard")
+ pop("devel_whiteboard", "cf_devel_whiteboard")
+ pop("internal_whiteboard", "cf_internal_whiteboard")
+
+ get_sub_component()
+ get_alias()
+
+ vals = Bugzilla.build_update(self, **kwargs)
+ vals.update(adddict)
+
+ return vals
+
+ def add_external_tracker(self, bug_ids, ext_bz_bug_id, ext_type_id=None,
+ ext_type_description=None, ext_type_url=None,
+ ext_status=None, ext_description=None,
+ ext_priority=None):
+ """
+ Wrapper method to allow adding of external tracking bugs using the
+ ExternalBugs::WebService::add_external_bug method.
+
+ This is documented at
+ https://bugzilla.redhat.com/docs/en/html/api/extensions/ExternalBugs/lib/WebService.html#add_external_bug
+
+ bug_ids: A single bug id or list of bug ids to have external trackers
+ added.
+ ext_bz_bug_id: The external bug id (ie: the bug number in the
+ external tracker).
+ ext_type_id: The external tracker id as used by Bugzilla.
+ ext_type_description: The external tracker description as used by
+ Bugzilla.
+ ext_type_url: The external tracker url as used by Bugzilla.
+ ext_status: The status of the external bug.
+ ext_description: The description of the external bug.
+ ext_priority: The priority of the external bug.
+ """
+ param_dict = {'ext_bz_bug_id': ext_bz_bug_id}
+ if ext_type_id is not None:
+ param_dict['ext_type_id'] = ext_type_id
+ if ext_type_description is not None:
+ param_dict['ext_type_description'] = ext_type_description
+ if ext_type_url is not None:
+ param_dict['ext_type_url'] = ext_type_url
+ if ext_status is not None:
+ param_dict['ext_status'] = ext_status
+ if ext_description is not None:
+ param_dict['ext_description'] = ext_description
+ if ext_priority is not None:
+ param_dict['ext_priority'] = ext_priority
+ params = {
+ 'bug_ids': self._listify(bug_ids),
+ 'external_bugs': [param_dict],
+ }
+
+ log.debug("Calling ExternalBugs.add_external_bug(%s)", params)
+ return self._proxy.ExternalBugs.add_external_bug(params)
+
+ def update_external_tracker(self, ids=None, ext_type_id=None,
+ ext_type_description=None, ext_type_url=None,
+ ext_bz_bug_id=None, bug_ids=None,
+ ext_status=None, ext_description=None,
+ ext_priority=None):
+ """
+ Wrapper method to allow adding of external tracking bugs using the
+ ExternalBugs::WebService::update_external_bug method.
+
+ This is documented at
+ https://bugzilla.redhat.com/docs/en/html/api/extensions/ExternalBugs/lib/WebService.html#update_external_bug
+
+ ids: A single external tracker bug id or list of external tracker bug
+ ids.
+ ext_type_id: The external tracker id as used by Bugzilla.
+ ext_type_description: The external tracker description as used by
+ Bugzilla.
+ ext_type_url: The external tracker url as used by Bugzilla.
+ ext_bz_bug_id: A single external bug id or list of external bug ids
+ (ie: the bug number in the external tracker).
+ bug_ids: A single bug id or list of bug ids to have external tracker
+ info updated.
+ ext_status: The status of the external bug.
+ ext_description: The description of the external bug.
+ ext_priority: The priority of the external bug.
+ """
+ params = {}
+ if ids is not None:
+ params['ids'] = self._listify(ids)
+ if ext_type_id is not None:
+ params['ext_type_id'] = ext_type_id
+ if ext_type_description is not None:
+ params['ext_type_description'] = ext_type_description
+ if ext_type_url is not None:
+ params['ext_type_url'] = ext_type_url
+ if ext_bz_bug_id is not None:
+ params['ext_bz_bug_id'] = self._listify(ext_bz_bug_id)
+ if bug_ids is not None:
+ params['bug_ids'] = self._listify(bug_ids)
+ if ext_status is not None:
+ params['ext_status'] = ext_status
+ if ext_description is not None:
+ params['ext_description'] = ext_description
+ if ext_priority is not None:
+ params['ext_priority'] = ext_priority
+
+ log.debug("Calling ExternalBugs.update_external_bug(%s)", params)
+ return self._proxy.ExternalBugs.update_external_bug(params)
+
+ def remove_external_tracker(self, ids=None, ext_type_id=None,
+ ext_type_description=None, ext_type_url=None,
+ ext_bz_bug_id=None, bug_ids=None):
+ """
+ Wrapper method to allow removal of external tracking bugs using the
+ ExternalBugs::WebService::remove_external_bug method.
+
+ This is documented at
+ https://bugzilla.redhat.com/docs/en/html/api/extensions/ExternalBugs/lib/WebService.html#remove_external_bug
+
+ ids: A single external tracker bug id or list of external tracker bug
+ ids.
+ ext_type_id: The external tracker id as used by Bugzilla.
+ ext_type_description: The external tracker description as used by
+ Bugzilla.
+ ext_type_url: The external tracker url as used by Bugzilla.
+ ext_bz_bug_id: A single external bug id or list of external bug ids
+ (ie: the bug number in the external tracker).
+ bug_ids: A single bug id or list of bug ids to have external tracker
+ info updated.
+ """
+ params = {}
+ if ids is not None:
+ params['ids'] = self._listify(ids)
+ if ext_type_id is not None:
+ params['ext_type_id'] = ext_type_id
+ if ext_type_description is not None:
+ params['ext_type_description'] = ext_type_description
+ if ext_type_url is not None:
+ params['ext_type_url'] = ext_type_url
+ if ext_bz_bug_id is not None:
+ params['ext_bz_bug_id'] = self._listify(ext_bz_bug_id)
+ if bug_ids is not None:
+ params['bug_ids'] = self._listify(bug_ids)
+
+ log.debug("Calling ExternalBugs.remove_external_bug(%s)", params)
+ return self._proxy.ExternalBugs.remove_external_bug(params)
+
+
+ #################
+ # Query methods #
+ #################
+
+ def pre_translation(self, query):
+ '''Translates the query for possible aliases'''
+ old = query.copy()
+
+ if 'bug_id' in query:
+ if not isinstance(query['bug_id'], list):
+ query['id'] = query['bug_id'].split(',')
+ else:
+ query['id'] = query['bug_id']
+ del query['bug_id']
+
+ if 'component' in query:
+ if not isinstance(query['component'], list):
+ query['component'] = query['component'].split(',')
+
+ if 'include_fields' not in query and 'column_list' not in query:
+ return
+
+ if 'include_fields' not in query:
+ query['include_fields'] = []
+ if 'column_list' in query:
+ query['include_fields'] = query['column_list']
+ del query['column_list']
+
+ # We need to do this for users here for users that
+ # don't call build_query
+ query.update(self._process_include_fields(query["include_fields"],
+ None, None))
+
+ if old != query:
+ log.debug("RHBugzilla pretranslated query to: %s", query)
+
+ def post_translation(self, query, bug):
+ '''
+ Convert the results of getbug back to the ancient RHBZ value
+ formats
+ '''
+ ignore = query
+
+ # RHBZ _still_ returns component and version as lists, which
+ # deviates from upstream. Copy the list values to components
+ # and versions respectively.
+ if 'component' in bug and "components" not in bug:
+ val = bug['component']
+ bug['components'] = isinstance(val, list) and val or [val]
+ bug['component'] = bug['components'][0]
+
+ if 'version' in bug and "versions" not in bug:
+ val = bug['version']
+ bug['versions'] = isinstance(val, list) and val or [val]
+ bug['version'] = bug['versions'][0]
+
+ # sub_components isn't too friendly of a format, add a simpler
+ # sub_component value
+ if 'sub_components' in bug and 'sub_component' not in bug:
+ val = bug['sub_components']
+ bug['sub_component'] = ""
+ if isinstance(val, dict):
+ values = []
+ for vallist in val.values():
+ values += vallist
+ bug['sub_component'] = " ".join(values)
+
+ def build_external_tracker_boolean_query(self, *args, **kwargs):
+ ignore1 = args
+ ignore2 = kwargs
+ raise RuntimeError("Building external boolean queries is "
+ "no longer supported. Please build a URL query "
+ "via the bugzilla web UI and pass it to 'query --from-url' "
+ "or url_to_query()")
+
+
+ def build_query(self, **kwargs):
+ # pylint: disable=arguments-differ
+
+ # We previously accepted a text format to approximate boolean
+ # queries, and only for RHBugzilla. Upstream bz has --from-url
+ # support now, so point people to that instead so we don't have
+ # to document and maintain this logic anymore
+ def _warn_bool(kwkey):
+ vallist = self._listify(kwargs.get(kwkey, None))
+ for value in vallist or []:
+ for s in value.split(" "):
+ if s not in ["|", "&", "!"]:
+ continue
+ log.warning("%s value '%s' appears to use the now "
+ "unsupported boolean formatting, your query may "
+ "be incorrect. If you need complicated URL queries, "
+ "look into bugzilla --from-url/url_to_query().",
+ kwkey, value)
+ return
+
+ _warn_bool("fixed_in")
+ _warn_bool("blocked")
+ _warn_bool("dependson")
+ _warn_bool("flag")
+ _warn_bool("qa_whiteboard")
+ _warn_bool("devel_whiteboard")
+ _warn_bool("alias")
+
+ return Bugzilla.build_query(self, **kwargs)
diff --git a/scripts/bugzilla/transport.py b/scripts/bugzilla/transport.py
new file mode 100644
index 0000000000..0c3e47babe
--- /dev/null
+++ b/scripts/bugzilla/transport.py
@@ -0,0 +1,196 @@
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version. See http://www.gnu.org/copyleft/gpl.html for
+# the full text of the license.
+
+from logging import getLogger
+import sys
+
+# pylint: disable=import-error
+if sys.version_info[0] >= 3:
+ from configparser import SafeConfigParser
+ from urllib.parse import urlparse # pylint: disable=no-name-in-module
+ from xmlrpc.client import Fault, ProtocolError, ServerProxy, Transport
+else:
+ from ConfigParser import SafeConfigParser
+ from urlparse import urlparse
+ from xmlrpclib import Fault, ProtocolError, ServerProxy, Transport
+# pylint: enable=import-error
+
+import requests
+
+
+log = getLogger(__name__)
+
+
+class BugzillaError(Exception):
+ '''Error raised in the Bugzilla client code.'''
+ pass
+
+
+class _BugzillaTokenCache(object):
+ """
+ Cache for tokens, including, with apologies for the duplicative
+ terminology, both Bugzilla Tokens and API Keys.
+ """
+
+ def __init__(self, uri, tokenfilename):
+ self.tokenfilename = tokenfilename
+ self.tokenfile = SafeConfigParser()
+ self.domain = urlparse(uri)[1]
+
+ if self.tokenfilename:
+ self.tokenfile.read(self.tokenfilename)
+
+ if self.domain not in self.tokenfile.sections():
+ self.tokenfile.add_section(self.domain)
+
+ @property
+ def value(self):
+ if self.tokenfile.has_option(self.domain, 'token'):
+ return self.tokenfile.get(self.domain, 'token')
+ else:
+ return None
+
+ @value.setter
+ def value(self, value):
+ if self.value == value:
+ return
+
+ if value is None:
+ self.tokenfile.remove_option(self.domain, 'token')
+ else:
+ self.tokenfile.set(self.domain, 'token', value)
+
+ if self.tokenfilename:
+ with open(self.tokenfilename, 'w') as tokenfile:
+ log.debug("Saving to tokenfile")
+ self.tokenfile.write(tokenfile)
+
+ def __repr__(self):
+ return '<Bugzilla Token Cache :: %s>' % self.value
+
+
+class _BugzillaServerProxy(ServerProxy, object):
+ def __init__(self, uri, tokenfile, *args, **kwargs):
+ super(_BugzillaServerProxy, self).__init__(uri, *args, **kwargs)
+ self.token_cache = _BugzillaTokenCache(uri, tokenfile)
+ self.api_key = None
+
+ def use_api_key(self, api_key):
+ self.api_key = api_key
+
+ def clear_token(self):
+ self.token_cache.value = None
+
+ def _ServerProxy__request(self, methodname, params):
+ if len(params) == 0:
+ params = ({}, )
+
+ if self.api_key is not None:
+ if 'Bugzilla_api_key' not in params[0]:
+ params[0]['Bugzilla_api_key'] = self.api_key
+ elif self.token_cache.value is not None:
+ if 'Bugzilla_token' not in params[0]:
+ params[0]['Bugzilla_token'] = self.token_cache.value
+
+ # pylint: disable=no-member
+ ret = super(_BugzillaServerProxy,
+ self)._ServerProxy__request(methodname, params)
+ # pylint: enable=no-member
+
+ if isinstance(ret, dict) and 'token' in ret.keys():
+ self.token_cache.value = ret.get('token')
+ return ret
+
+
+class _RequestsTransport(Transport):
+ user_agent = 'Python/Bugzilla'
+
+ def __init__(self, url, cookiejar=None,
+ sslverify=True, sslcafile=None, debug=True, cert=None):
+ if hasattr(Transport, "__init__"):
+ Transport.__init__(self, use_datetime=False)
+
+ self.verbose = debug
+ self._cookiejar = cookiejar
+
+ # transport constructor needs full url too, as xmlrpc does not pass
+ # scheme to request
+ self.scheme = urlparse(url)[0]
+ if self.scheme not in ["http", "https"]:
+ raise Exception("Invalid URL scheme: %s (%s)" % (self.scheme, url))
+
+ self.use_https = self.scheme == 'https'
+
+ self.request_defaults = {
+ 'cert': sslcafile if self.use_https else None,
+ 'cookies': cookiejar,
+ 'verify': sslverify,
+ 'headers': {
+ 'Content-Type': 'text/xml',
+ 'User-Agent': self.user_agent,
+ }
+ }
+
+ # Using an explicit Session, rather than requests.get, will use
+ # HTTP KeepAlive if the server supports it.
+ self.session = requests.Session()
+ if cert:
+ self.session.cert = cert
+
+ def parse_response(self, response):
+ """ Parse XMLRPC response """
+ parser, unmarshaller = self.getparser()
+ parser.feed(response.text.encode('utf-8'))
+ parser.close()
+ return unmarshaller.close()
+
+ def _request_helper(self, url, request_body):
+ """
+ A helper method to assist in making a request and provide a parsed
+ response.
+ """
+ response = None
+ try:
+ response = self.session.post(
+ url, data=request_body, **self.request_defaults)
+
+ # We expect utf-8 from the server
+ response.encoding = 'UTF-8'
+
+ # update/set any cookies
+ if self._cookiejar is not None:
+ for cookie in response.cookies:
+ self._cookiejar.set_cookie(cookie)
+
+ if self._cookiejar.filename is not None:
+ # Save is required only if we have a filename
+ self._cookiejar.save()
+
+ log.debug(response.text)
+ response.raise_for_status()
+ return self.parse_response(response)
+ except requests.RequestException as e:
+ if not response:
+ raise
+ raise ProtocolError(
+ url, response.status_code, str(e), response.headers)
+ except Fault:
+ raise
+ except Exception:
+ e = BugzillaError(str(sys.exc_info()[1]))
+ # pylint: disable=attribute-defined-outside-init
+ e.__traceback__ = sys.exc_info()[2]
+ # pylint: enable=attribute-defined-outside-init
+ raise e
+
+ def request(self, host, handler, request_body, verbose=0):
+ self.verbose = verbose
+ url = "%s://%s%s" % (self.scheme, host, handler)
+
+ # xmlrpclib fails to escape \r
+ request_body = request_body.replace(b'\r', b'&#xd;')
+
+ return self._request_helper(url, request_body)
diff --git a/scripts/check-patch-dirs b/scripts/check-patch-dirs
new file mode 100755
index 0000000000..3cdaf78d44
--- /dev/null
+++ b/scripts/check-patch-dirs
@@ -0,0 +1,73 @@
+#!/bin/bash
+
+#############################################################################
+# Copyright (c) 2017 SUSE
+# All Rights Reserved.
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of version 2 of the GNU General Public License as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, contact Novell, Inc.
+#
+# To contact SUSE about this file by physical or electronic mail,
+# you may find current contact information at www.suse.com
+#############################################################################
+
+. ${0%/*}/wd-functions.sh
+
+branch="$(get_branch_name)"
+
+result=true
+check=false
+
+# This policy starts with master and stable but will apply to
+# future releases starting with SLE15-SP1 and the next openSUSE release
+# following 42.3
+case $branch in
+SLES9*|SLES10*|SLE11-*|SLE12-*|SLE15)
+ ;;
+openSUSE-1*|openSUSE-42.[123])
+ ;;
+*)
+ check=true
+ ;;
+esac
+
+if $check; then
+ for file in "$@"; do
+ # We don't care about anything outside of the patches.* directories
+ case "$file" in
+ patches.*)
+ ;;
+ *)
+ continue
+ ;;
+ esac
+
+ case "$file" in
+ patches.suse/*|patches.rpmify/*|patches.kabi/*|patches.kernel.org/*)
+ ;;
+ *)
+ echo "** $file is in obsolete directory \"${file%%/*}\""
+ result=false
+ ;;
+ esac
+ done
+
+ if ! $result; then
+ echo "Valid patch directories are:"
+ echo "* patches.kernel.org"
+ echo "* patches.rpmify"
+ echo "* patches.kabi"
+ echo "* patches.suse"
+ fi
+fi
+
+$result
diff --git a/scripts/check-patchfmt b/scripts/check-patchfmt
index 73b12bad6e..325631282f 100755
--- a/scripts/check-patchfmt
+++ b/scripts/check-patchfmt
@@ -52,7 +52,10 @@ sub check_filename {
sub do_patch {
my $fh = shift;
+ my $fn = shift;
my ($in_payload, $in_hunk, $maybe_eof, $last_noncontext);
+ my $xen_prefix = "patches.xen/";
+ my $is_xen = $xen_prefix eq substr($fn,0,length($xen_prefix));
my @lines = <$fh>;
chomp(@lines);
@@ -73,9 +76,9 @@ sub do_patch {
$maybe_eof = 0;
(my $new = $cur) =~ s/^\+\+\+ //;
(my $old = $lines[$i-1]) =~ s/^--- //;
- check_filename($new);
- check_filename($old);
- if ($i > 2 && $lines[$i-2] =~ /^={20}/ &&
+ if (!$is_xen) { check_filename($new); }
+ if (!$is_xen) { check_filename($old); }
+ if (!$is_xen && $i > 2 && $lines[$i-2] =~ /^={20}/ &&
$lines[$i-3] =~ /^Index: /) {
err("Superfluous Index: line in patch");
}
@@ -124,12 +127,12 @@ if ($ARGV[0] eq "--stdin") {
$file = $ARGV[0];
}
open(my $fh, '-');
- do_patch($fh);
+ do_patch($fh, $file);
close($fh);
} else {
for $file (@ARGV) {
open(my $fh, '<', $file) or die "$file: $!\n";
- do_patch($fh);
+ do_patch($fh, $file);
close($fh);
}
}
diff --git a/scripts/cvs-wd-timestamp b/scripts/cvs-wd-timestamp
index c89dbd77d4..d41262cb55 100755
--- a/scripts/cvs-wd-timestamp
+++ b/scripts/cvs-wd-timestamp
@@ -28,7 +28,7 @@
if $using_git; then
# Just echo the commit timestamp of HEAD
ts=$(git show --pretty=format:%ct HEAD | head -n 1)
- date "+%Y-%m-%d %H:%M:%S %z" -d "1970-01-01 00:00 UTC $ts seconds"
+ TZ=UTC date "+%Y-%m-%d %H:%M:%S %z" -d "1970-01-01 00:00 UTC $ts seconds"
exit
fi
diff --git a/scripts/git-pre-commit b/scripts/git-pre-commit
index 774e12b440..60b211c327 100644..100755
--- a/scripts/git-pre-commit
+++ b/scripts/git-pre-commit
@@ -47,6 +47,19 @@ while read stat file garbage; do
"$dir/check-patchfmt" --stdin "$file" <"$tmp" || err=1
esac
done
+
+config_sh="$dir/../rpm/config.sh"
+series_conf="$dir/../series.conf"
+
+if [ -r "$config_sh" ]; then
+ . "$config_sh"
+fi
+
+if [ "$SORT_SERIES" = "yes" -a -r "$series_conf" ] && \
+ ! "$dir/git_sort/pre-commit.sh"; then
+ err=1
+fi
+
if test "$err" != 0; then
echo "Aborting."
exit "$err"
diff --git a/scripts/git_sort/README.md b/scripts/git_sort/README.md
new file mode 100644
index 0000000000..ca8c799de5
--- /dev/null
+++ b/scripts/git_sort/README.md
@@ -0,0 +1,362 @@
+Installation Requirements
+=========================
+`git-sort` and the related series.conf sorting scripts depend on the python3
+`dbm` and `pygit2` modules.
+
+For SLE12-SP2 and SLE12-SP3, the python3-dbm package is available in the
+SUSE:SLE-12:Update IBS project:
+```
+https://build.suse.de/project/show/SUSE:SLE-12:Update
+http://download.suse.de/ibs/SUSE:/SLE-12:/Update/standard/SUSE:SLE-12:Update.repo
+```
+
+For openSUSE 42.3 and Tumbleweed, the python3-dbm package is available in the
+normal OSS repository.
+
+The python3-pygit2 package is available from the Kernel:tools OBS project
+except for Tumbleweed where it's available from the normal OSS repository.
+```
+https://build.opensuse.org/package/show/Kernel:tools/python-pygit2
+```
+
+merge_tool.py depends on `merge` from the rcs package, available in standard
+repositories.
+
+The functions in `quilt-mode.sh` are meant to be used with a modified `quilt`
+that can use kernel-source.git's series.conf directly instead of a shadow
+copy.
+
+Packages are available in the following OBS project
+https://build.opensuse.org/package/show/home:benjamin_poirier:series_sort/quilt-ks
+Source is avaible from
+https://gitlab.suse.de/benjamin_poirier/quilt
+
+The packages in home:benjamin_poirier:series_sort are signed with the key
+from home:benjamin_poirier which has the following fingerprint:
+6075 E129 2ACD 9732 A079 7B40 3F6E 17FC 4A3D 5625
+
+quilt depends on diffstat from the package with the same name. For SLE12-SP2
+and SLE12-SP3, the diffstat package is available in the SDK module. For
+openSUSE 42.3 and Tumbleweed, the diffstat package is available in the normal
+OSS repository.
+
+Configuration Requirements
+==========================
+The LINUX_GIT environment variable must be set to the path of a fresh Linux
+kernel git clone; it will be used as a reference for upstream commit
+information. Specifically, this must be a clone of
+git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git or one of the
+alternate URLs found on kernel.org. The `user.name` and `user.email` git
+config variables must be set to sensible values in that clone; they will be
+used to tag patches.
+
+If you want to import patches that are not yet in mainline but that are in a
+subsystem maintainer's tree, that repository must be configured as an
+additional remote of the local repository cloned under LINUX_GIT. For example:
+```
+linux$ git remote show
+net # git://git.kernel.org/pub/scm/linux/kernel/git/davem/net.git
+net-next # git://git.kernel.org/pub/scm/linux/kernel/git/davem/net-next.git
+origin # git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
+stable # git://git.kernel.org/pub/scm/linux/kernel/git/stable/linux-stable.git
+```
+
+Inserting a new patch in the "sorted patches" section of series.conf
+====================================================================
+For a patch file, `<patch>` which is a backport of an upstream commit:
+1) add the file into one of the `patches.*` directories
+2) make sure that it contains the correct "Git-commit" tag
+3) make sure that this commit can be found in your local LINUX_GIT clone
+
+Then run:
+```
+kernel-source$ ./scripts/git_sort/series_insert.py <patch>
+```
+
+This script can also be called with multiple patches at once.
+
+Patches which are out-of-tree (not backports of upstream commits) can be added
+directly at a location of your choosing in the "# out-of-tree" section of the
+"sorted patches" section. In doubt, simply add them at the end of that section.
+
+After the patch has been inserted in series.conf, make sure to check that the
+series applies and to fix any potential conflicts. Then commit and push your
+result.
+
+Refreshing the order of patches in series.conf
+==============================================
+As upstream maintainers pull from each other, the order of patches in
+series.conf needs to be refreshed. In that case, run:
+```
+kernel-source$ ./scripts/series_sort.py --upstream series.conf
+```
+
+In case of unexpected trouble, you can also move patch entries to the
+"# out-of-tree" section and run the above script to reset a patch's position.
+
+Backporting commits using kernel-source.git and the quilt-mode.sh functions
+===========================================================================
+The sections "Example workflow to backport a single commit" and "Example
+workflow to backport a series of commits using kernel-source.git" demonstrate
+how to use the functions in quilt-mode.sh, which can assist in backporting a
+single commit or a series of commits directly to kernel-source.git.
+
+Example workflow to backport a single commit
+============================================
+For example, we want to backport f5a952c08e84 which is a fix for another
+commit which was already backported:
+```
+# adjust the path to `sequence-insert.py` according to your environment
+ben@f1:~/local/src/kernel-source$ ./scripts/sequence-patch.sh $(./scripts/git_sort/sequence-insert.py f5a952c08e84)
+[...]
+ben@f1:~/local/src/kernel-source$ cd tmp/current
+ben@f1:~/local/src/kernel-source/tmp/current$ . ../../scripts/git_sort/quilt-mode.sh
+# Note that we are using the "-f" option of qcp since f5a952c08e84 is a
+# followup to another commit; its log contains a "Fixes" tag. If that was not
+# the case, we would use the "-d" and "-r" options of qcp.
+ben@f1:~/local/src/kernel-source/tmp/current$ qcp -f f5a952c08e84
+Info: using references "bsc#1026030 FATE#321670" from patch "patches.drivers/of-of_mdio-Add-a-whitelist-of-PHY-compatibilities.patch" which contains commit ae461131960b.
+Importing patch /tmp/qcp.d82Wqi/0001-of-of_mdio-Add-marvell-88e1145-to-whitelist-of-PHY-c.patch (stored as patches/patches.drivers/of-of_mdio-Add-marvell-88e1145-to-whitelist-of-PHY-c.patch)
+# Note that `q` is an alias for `quilt`. You may be using `q` a lot...
+ben@f1:~/local/src/kernel-source/tmp/current$ q push
+Applying patch patches/patches.drivers/of-of_mdio-Add-marvell-88e1145-to-whitelist-of-PHY-c.patch
+File drivers/of/of_mdio.c is read-only; trying to patch anyway
+patching file drivers/of/of_mdio.c
+Applied patch patches/patches.drivers/of-of_mdio-Add-marvell-88e1145-to-whitelist-of-PHY-c.patch (needs refresh)
+
+Now at patch patches/patches.drivers/of-of_mdio-Add-marvell-88e1145-to-whitelist-of-PHY-c.patch
+ben@f1:~/local/src/kernel-source/tmp/current$ make olddefconfig
+ HOSTCC scripts/basic/fixdep
+ HOSTCC scripts/kconfig/conf.o
+ SHIPPED scripts/kconfig/zconf.tab.c
+ SHIPPED scripts/kconfig/zconf.lex.c
+ SHIPPED scripts/kconfig/zconf.hash.c
+ HOSTCC scripts/kconfig/zconf.tab.o
+ HOSTLD scripts/kconfig/conf
+scripts/kconfig/conf --olddefconfig Kconfig
+ben@f1:~/local/src/kernel-source/tmp/current$ qfmake
+[...]
+ben@f1:~/local/src/kernel-source/tmp/current$ ./refresh_patch.sh
+Refreshed patch patches/patches.drivers/of-of_mdio-Add-marvell-88e1145-to-whitelist-of-PHY-c.patch
+ben@f1:~/local/src/kernel-source/tmp/current$ cd ../../
+ben@f1:~/local/src/kernel-source$ git st
+On branch SLE12-SP3
+Your branch is up-to-date with 'kerncvs/SLE12-SP3'.
+Changes not staged for commit:
+ (use "git add <file>..." to update what will be committed)
+ (use "git checkout -- <file>..." to discard changes in working directory)
+
+ modified: series.conf
+
+Untracked files:
+ (use "git add <file>..." to include in what will be committed)
+
+ patches.drivers/of-of_mdio-Add-marvell-88e1145-to-whitelist-of-PHY-c.patch
+
+no changes added to commit (use "git add" and/or "git commit -a")
+ben@f1:~/local/src/kernel-source$ git add -A
+ben@f1:~/local/src/kernel-source$ ./scripts/log
+```
+
+Example workflow to backport a series of commits using kernel-source.git
+========================================================================
+Generate the list of commit ids to backport:
+```
+upstream$ git log --no-merges --topo-order --reverse --pretty=tformat:%H v3.12.6.. -- drivers/net/ethernet/emulex/benet/ > /tmp/output
+```
+
+Optionally, generate a description of the commits to backport.
+```
+upstream$ cat /tmp/output | xargs -n1 git log -n1 --oneline > /tmp/list
+```
+
+Optionally, check if commits in the list are referenced in the logs of later
+commits which are not in the list themselves. You may wish to review these
+later commits and add them to the list.
+```
+upstream$ cat /tmp/list | check_missing_fixes.sh
+```
+
+Optionally, check which commits in the list have already been applied to
+kernel-source.git. Afterwards, you may wish to regenerate the list of commit
+ids with a different starting point; or remove from series.conf the commits
+that have already been applied and cherry-pick them again during the backport;
+or skip them during the backport.
+
+```
+# note that the path is a pattern, not just a base directory
+kernel-source$ cat /tmp/list | refs_in_series.sh "drivers/net/ethernet/emulex/benet/*"
+```
+
+Generate the work tree with patches applied up to the first patch in the
+list of commits to backport:
+```
+# adjust the path to `sequence-insert.py` according to your environment
+kernel-source$ ./scripts/sequence-patch.sh $(./scripts/git_sort/sequence-insert.py $(head -n1 /tmp/list | awk '{print $1}'))
+```
+
+It is preferable to check that the driver builds before getting started:
+```
+kernel-source/tmp/current$ make -j4 drivers/net/ethernet/intel/e1000/
+```
+
+Import the quilt-mode functions:
+```
+kernel-source/tmp/current$ . ../../scripts/git_sort/quilt-mode.sh
+```
+
+Set the list of commits to backport:
+```
+kernel-source/tmp/current$ qadd -r "bsc#1024371 FATE#321245" -d patches.drivers < /tmp/list
+```
+
+Note that the commits are automatically sorted using git-sort.
+The references and destination are saved in environment variables and reused
+later by `qcp` (see below). They can also be specified directly to `qcp`.
+
+The working list can be queried at any time. Note that it is kept in the
+$series environment variable. It will be lost if the shell exits. It is not
+available in other terminals.
+```
+kernel-source/tmp/current$ qnext
+847a1d6796c7 e1000: Do not overestimate descriptor counts in Tx pre-check (v4.6-rc3)
+kernel-source/tmp/current$ qcat
+ 847a1d6796c7 e1000: Do not overestimate descriptor counts in Tx pre-check (v4.6-rc3)
+ a4605fef7132 e1000: Double Tx descriptors needed check for 82544 (v4.6-rc3)
+ 1f2f83f83848 e1000: call ndo_stop() instead of dev_close() when running offline selftest (v4.7-rc1)
+ 91c527a55664 ethernet/intel: use core min/max MTU checking (v4.10-rc1)
+ 311191297125 e1000: use disable_hardirq() for e1000_netpoll() (v4.10-rc1)
+```
+
+Start backporting:
+```
+kernel-source/tmp/current$ qdoit -j4 drivers/net/ethernet/intel/e1000/
+```
+
+For each commit in the list, this command will
+* go to the appropriate location in the series using `qgoto` which calls
+ `quilt push/pop`
+* check that the commit is not already present somewhere in the series using
+ `qdupcheck`
+* import the commit using `qcp` which calls `git format-patch` and `quilt
+ import`
+* add required tags using `clean_header.sh`
+* apply the commit using `quilt push`
+* build test the result using `qfmake`. This calls make with the options
+ specified to `qdoit` plus the .o targets corresponding to the .c files
+ changed by the topmost patch.
+
+The process will stop automatically in case of error. At that time the user
+must address the situation and then call `qdoit` again when ready.
+
+To address the situation,
+* if a commit is already present in an existing patch
+ * possibly leave the patch where it is or move it to the current
+ location. To move a patch, edit series.conf. However, if the patch
+ is already applied, make sure to use `q pop` or `qgoto` first.
+ Then call `qskip` to skip the commit.
+ * remove the other copy, using `q delete -r <patch`, then call
+ `qcp <commit>` and follow as indicated below (q push, qfmake,
+ ./refresh_patch.sh)
+* if a commit does not apply
+ `q push -f # or -fm`
+ `vi-conflicts # also from git-helpers`
+ `qfmake [...]`
+ `./refresh_patch.sh`
+* if one or more additional commits are necessary to fix the problem
+ Use `qedit` to add these additional commits to the list of commits to
+ backport.
+
+ Note that the queue of commits to backport is sorted after invoking
+ qadd or qedit. Therefore, commits can be added anywhere in the list
+ when using qedit.
+ After editing the queue of commits to backport, `qnext` will show one
+ of the new commits since it should be backported before the current
+ one. You can continue by calling `qdoit` to backport the dependent
+ commits.
+* if it turns out that the commit should be skipped
+ `q delete -r`
+ or, if after having done `q push -f`:
+ `q pop -f`
+ `q delete -r $(q next)`
+
+The following commands can be useful to identify the origin of code lines when
+fixing conflicts:
+```
+quilt annotate <file>
+git gui blame --line=<line> <commit> <file>
+```
+
+Example of a merge conflict resolution involving sorted patches in series.conf
+==============================================================================
+When merging or rebasing between commits in kernel-source it is possible that
+there is a conflict involving sorted patches in series.conf. This type of
+conflict can be solved automatically using the git mergetool interface with
+the script merge_tool.py. Please see the header of that file for installation
+instructions.
+
+When using the merge tool, the LINUX_GIT reference repository must fetch from
+the repositories which are the upstream source of patches which were added in
+the remote branch of the merge (the `<commit>` argument to `git merge`) or
+which are in different subsystem maintainer sections between the local and
+remote revisions. A simple way to satisfy that condition is to fetch from all
+remotes configured for git-sort before doing a merge resolution. The script
+`scripts/git_sort/update_clone.py` can be used to create or update the
+configuration of a repository so that it contains all of the remotes
+configured for git-sort. Please see the help message of that script for more
+information.
+
+As an example, the merge in kernel-source commit da87d04b3b needed conflict
+resolution. Let's redo this resolution using merge_tool:
+```
+ben@f1:~/local/src/kernel-source$ git log -n1 da87d04b3b
+commit da87d04b3bc6edf2b58a10e27c77352a5eb7b3d9
+Merge: e2d6a02d9c 1244565fb9
+Author: Jiri Kosina <jkosina@suse.cz>
+Date: Wed Sep 13 18:48:33 2017 +0200
+
+ Merge remote-tracking branch 'origin/users/dchang/SLE15/for-next' into SLE15
+
+ Conflicts:
+ series.conf
+ben@f1:~/local/src/kernel-source$ git co e2d6a02d9c
+HEAD is now at e2d6a02d9c... Merge remote-tracking branch 'origin/users/bpoirier/SLE15/for-next' into SLE15
+ben@f1:~/local/src/kernel-source$ git merge 1244565fb9
+Auto-merging series.conf
+CONFLICT (content): Merge conflict in series.conf
+Recorded preimage for 'series.conf'
+Automatic merge failed; fix conflicts and then commit the result.
+ben@f1:~/local/src/kernel-source$ git mergetool --tool=git-sort series.conf
+Merging:
+series.conf
+
+Normal merge conflict for 'series.conf':
+ {local}: modified file
+ {remote}: modified file
+10 commits added, 0 commits removed from base to remote.
+ben@f1:~/local/src/kernel-source$ git st
+HEAD detached at e2d6a02d9c
+All conflicts fixed but you are still merging.
+ (use "git commit" to conclude merge)
+
+Changes to be committed:
+
+ new file: patches.drivers/be2net-Fix-UE-detection-logic-for-BE3.patch
+ new file: patches.drivers/be2net-Update-the-driver-version-to-11.4.0.0.patch
+ new file: patches.drivers/bnx2x-Remove-open-coded-carrier-check.patch
+ new file: patches.drivers/bnx2x-fix-format-overflow-warning.patch
+ new file: patches.drivers/net-broadcom-bnx2x-make-a-couple-of-const-arrays-sta.patch
+ new file: patches.drivers/net-phy-Make-phy_ethtool_ksettings_get-return-void.patch
+ new file: patches.drivers/netxen-fix-incorrect-loop-counter-decrement.patch
+ new file: patches.drivers/netxen-remove-writeq-readq-function-definitions.patch
+ new file: patches.drivers/netxen_nic-Remove-unused-pointer-hdr-in-netxen_setup.patch
+ new file: patches.drivers/qlge-avoid-memcpy-buffer-overflow.patch
+ modified: series.conf
+
+Untracked files:
+ (use "git add <file>..." to include in what will be committed)
+
+ series.conf.orig
+
+ben@f1:~/local/src/kernel-source$ git commit
+```
diff --git a/scripts/git_sort/check_missing_fixes.sh b/scripts/git_sort/check_missing_fixes.sh
new file mode 100755
index 0000000000..638b5a852e
--- /dev/null
+++ b/scripts/git_sort/check_missing_fixes.sh
@@ -0,0 +1,63 @@
+#!/bin/bash
+
+# Copyright (C) 2018 SUSE LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
+# USA.
+
+# Check if a commit is referenced in the log of later commits.
+# Useful to identify missing followup commits.
+
+
+progname=$(basename "$0")
+usage () {
+ echo "Usage: $progname [OPTIONS]"
+ echo ""
+ cat <<"EOD"
+Read a list of git hashes from stdin and print information about commits which
+reference these hashes in their log and which are not part of the list.
+
+The input list must be partially ordered such that if it already contains some
+fixes, they appear after the commit they fix. Otherwise, fixes may appear
+multiple times in the output. Use `git sort` if needed.
+
+Options:
+ -h Print this help
+EOD
+}
+
+while getopts ":h" opt; do
+ case $opt in
+ h)
+ usage
+ exit 0
+ ;;
+ ?)
+ echo "Invalid option: -$OPTARG" >&2
+ exit 1
+ ;;
+ esac
+done
+
+indent=" "
+declare -a known
+tac | while read line; do
+ commit=$(git rev-parse --short=7 $(echo "$line" | awk '{print $1}'))
+ git log --no-merges --pretty="$indent%h %s" --grep="$commit" $commit.. | \
+ grep -vf <(echo -n "${known[@]}" | \
+ awk 'BEGIN {RS=" "} {print "^'"$indent"'" $1}')
+ known+=("$commit")
+ echo "$line"
+done | tac
diff --git a/scripts/git_sort/clean_conflicts.awk b/scripts/git_sort/clean_conflicts.awk
new file mode 100644
index 0000000000..a5bb17e78b
--- /dev/null
+++ b/scripts/git_sort/clean_conflicts.awk
@@ -0,0 +1,47 @@
+#!/usr/bin/awk -f
+
+# Copyright (C) 2018 SUSE LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
+# USA.
+
+BEGIN {
+ conflicts = 0
+}
+
+/^Conflicts:$/ {
+ conflicts = 1
+ getline
+ next
+}
+
+/^---$/ {
+ if (conflicts == 0) {
+ print lastLine
+ }
+ conflicts = 2
+}
+
+{
+ #print "statement 3 conflicts " conflicts $0
+ if (conflicts == 0) {
+ if (NR != 1) {
+ print lastLine
+ }
+ lastLine = $0
+ } else if (conflicts == 2) {
+ print
+ }
+}
diff --git a/scripts/git_sort/clean_header.sh b/scripts/git_sort/clean_header.sh
new file mode 100755
index 0000000000..b761585c8d
--- /dev/null
+++ b/scripts/git_sort/clean_header.sh
@@ -0,0 +1,384 @@
+#!/bin/bash -e
+
+# Copyright (C) 2018 SUSE LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
+# USA.
+
+# Filter a patch file such that it is properly formatted per SUSE rules.
+# Useful when importing patches into SUSE's kernel-source.git.
+
+
+progname=$(basename "$0")
+libdir=$(dirname "$(readlink -f "$0")")
+git_dir=$("$libdir"/../linux_git.sh) || exit 1
+
+export GIT_DIR=$git_dir
+: ${EDITOR:=${VISUAL:=vi}}
+
+. "$libdir"/lib_from.sh
+. "$libdir"/lib_tag.sh
+. "$libdir"/lib.sh
+
+usage () {
+ echo "Usage: $progname [options] [patch file]"
+ echo ""
+ echo "Options:"
+ echo " -c, --commit=<refspec> Upstream commit id used to tag the patch file."
+ echo " -r, --reference=<bsc> bsc or fate number used to tag the patch file."
+ echo " -R, --soft-reference=<bsc> bsc or fate number used to tag the patch file"
+ echo " if no other reference is found."
+ echo " -s, --skip=<domain> Skip adding Acked-by tag if there is already an"
+ echo " attribution line with an email from this domain."
+ echo " (Can be used multiple times.)"
+ echo " -h, --help Print this help"
+ echo ""
+}
+
+
+result=$(getopt -o c:r:R:s:h --long commit:,reference:,soft-reference:,skip:,help -n "$progname" -- "$@")
+if [ $? != 0 ]; then
+ echo "Error: getopt error" >&2
+ exit 1
+fi
+
+eval set -- "$result"
+
+while true ; do
+ case "$1" in
+ -c|--commit)
+ opt_commit=$2
+ shift
+ ;;
+ -r|--reference)
+ opt_ref=$2
+ shift
+ ;;
+ -R|--soft-reference)
+ opt_soft=$2
+ shift
+ ;;
+ -s|--skip)
+ opt_skip+=($2)
+ shift
+ ;;
+ -h|--help)
+ usage
+ exit 0
+ ;;
+ --)
+ shift
+ break
+ ;;
+ *)
+ echo "Error: could not parse arguments" >&2
+ exit 1
+ ;;
+ esac
+ shift
+done
+
+# bash strips trailing newlines in variables, protect them with "---"
+if [ -n "$1" ]; then
+ filename=$1
+ patch=$(cat "$filename" && echo ---)
+ shift
+else
+ patch=$(cat && echo ---)
+fi
+
+if [ -n "$1" ]; then
+ echo "Error: too many arguments" > /dev/stderr
+ usage > /dev/stderr
+ exit 1
+fi
+
+if echo -n "${patch%---}" | grep -q $'\r'; then
+ patch=$(echo -n "${patch%---}" | sed -e 's/\r//g' && echo ---)
+fi
+
+body=$(echo -n "${patch%---}" | awk -f "$libdir"/patch_body.awk && echo ---)
+# * Remove "From" line with tag, since it points to a local commit from
+# kernel.git that I created
+# * Remove "Conflicts" section
+header=$(echo -n "${patch%---}" | awk -f "$libdir"/patch_header.awk | from_extract | awk -f "$libdir"/clean_conflicts.awk && echo ---)
+
+
+# Git-commit:
+
+cherry=$(echo "$header" | tag_get "cherry picked from commit")
+if [ "$cherry" ]; then
+ if ! cherry=$(echo "$cherry" | expand_git_ref); then
+ exit 1
+ fi
+ header=$(echo -n "$header" | tag_remove "cherry picked from commit")
+fi
+
+git_commit=$(echo "$header" | tag_get git-commit)
+if [ "$git_commit" ]; then
+ if ! git_commit=$(echo "$git_commit" | expand_git_ref); then
+ exit 1
+ fi
+ header=$(echo -n "$header" | tag_remove git-commit)
+fi
+
+if [ "$opt_commit" ] && ! opt_commit=$(echo "$opt_commit" | expand_git_ref); then
+ exit 1
+fi
+
+# command line > Git-commit > cherry
+var_override commit "$cherry" "cherry picked from commit"
+var_override commit "$git_commit" "Git-commit"
+var_override commit "$opt_commit" "command line commit"
+
+if [ -z "$commit" ]; then
+ patch_subject=$(echo -n "$header" | tag_get subject | remove_subject_annotation)
+ log_grep=$(git log --reverse --pretty="tformat:%h%x09%ai%x09%aN <%aE>%x09%s" -F --grep "$patch_subject" | grep -F "$patch_subject" || true)
+ log_grep_nb=$(echo "$log_grep" | wc -l)
+ if [ -n "$log_grep" -a $log_grep_nb -eq 1 ]; then
+ log_grep_commit=$(echo "$log_grep" | awk '{print $1}' | expand_git_ref)
+ var_override commit "$log_grep_commit" "git log --grep commit"
+ elif [ -t 0 ]; then
+ echo -n "Upstream commit id unknown for patch \"$patch_subject\", "
+ if [ -z "$log_grep" ]; then
+ echo "enter it now?"
+ else
+ echo "$log_grep_nb potential commits found in git log. Which one to use?"
+ echo "$log_grep" | awk -F$'\t' '{print $1 " " $2 " " $3}'
+ fi
+ read -p "(<refspec>/empty cancels): " prompt_commit
+ if [ "$prompt_commit" ]; then
+ prompt_commit=$(echo "$prompt_commit" | expand_git_ref)
+ var_override commit "$prompt_commit" "prompted commit"
+ fi
+ fi
+fi
+
+if [ -z "$commit" ]; then
+ echo "Warning: Upstream commit id unknown, you will have to edit the patch header manually." > /dev/stderr
+ header=$(echo -n "$header" | tag_add Git-commit "(fill me in)")
+ edit=1
+else
+ commit_str=$commit
+ if [ -n "${body%---}" ]; then
+ cl_orig=$(git format-patch --stdout -p $commit^..$commit | cheat_diffstat | diffstat -lp1 | wc -l)
+ cl_patch=$(echo -n "${body%---}" | cheat_diffstat | diffstat -lp1 | wc -l)
+ if [ $cl_orig -ne $cl_patch ]; then
+ commit_str+=" (partial)"
+ fi
+ fi
+ header=$(echo -n "$header" | tag_add Git-commit "$commit_str")
+
+ git_describe=$(git describe --contains --match "v*" $commit 2>/dev/null || true)
+ git_describe=${git_describe%%[~^]*}
+ if [ -z "$git_describe" ]; then
+ git_describe="Queued in subsystem maintainer repository"
+ result=$(git describe --contains --all $commit)
+ if echo "$result" | grep -Eq "^remotes/"; then
+ remote=$(echo "$result" | cut -d/ -f2)
+ else
+ branch=${result%%[~^]*}
+ if [ $branch = "stash" ]; then
+ echo "Error: cannot use stash to describe patch. Stopping to avoid possibly erroneous results." > /dev/stderr
+ exit 1
+ else
+ if ! remote=$(git config --get branch.$branch.remote); then
+ echo "Error: \"$branch\" does not look like a remote tracking branch. Failed to get information about repository URL." > /dev/stderr
+ exit 1
+ fi
+ fi
+ fi
+ describe_url=$(git config --get remote.$remote.url)
+ fi
+fi
+
+
+# Patch-mainline:
+
+patch_mainline=$(echo -n "$header" | tag_get patch-mainline)
+header=$(echo -n "$header" | tag_remove patch-mainline)
+
+# Sometimes the tag does not include -rcX, I prefer to have it
+# var_override can take care of it, but it will generate a warning
+if [ "$patch_mainline" = "${git_describe%-rc*}" ]; then
+ patch_mainline=$git_describe
+fi
+
+# git describe > Patch-mainline
+var_override ml_status "$patch_mainline" "Patch-mainline"
+var_override ml_status "$git_describe" "git describe result"
+
+if [ -z "$ml_status" ]; then
+ echo "Warning: Mainline status unknown, you will have to edit the patch header manually." > /dev/stderr
+ header=$(echo -n "$header" | tag_add Patch-mainline "(fill me in)")
+ edit=1
+else
+ header=$(echo -n "$header" | tag_add Patch-mainline "$ml_status")
+fi
+
+
+# Git-repo:
+
+git_repo=$(echo -n "$header" | tag_get git-repo)
+header=$(echo -n "$header" | tag_remove git-repo)
+
+# git config > Git-repo
+var_override remote_url "$git_repo" "Git-repo"
+var_override --allow-empty remote_url "$describe_url" "git describe and remote configuration"
+
+if [ -n "$remote_url" ]; then
+ header=$(echo -n "$header" | tag_add Git-repo "$remote_url")
+fi
+
+
+# Patch-filtered:
+# may be added by the exportpatch tool
+header=$(echo -n "$header" | tag_remove patch-filtered)
+
+
+# References:
+
+cherry=$(echo "$header" | tag_get "cherry picked for")
+if [ "$cherry" ]; then
+ header=$(echo -n "$header" | tag_remove "cherry picked for")
+fi
+
+references=$(echo -n "$header" | tag_get --last references)
+if [ "$references" ]; then
+ header=$(echo -n "$header" | tag_remove --last references)
+fi
+
+# command line > References > cherry > command line (soft)
+var_override ref "$opt_soft"
+var_override ref "$cherry" "cherry picked for"
+var_override ref "$references" "References"
+var_override ref "$opt_ref" "command line reference"
+
+if [ -z "$ref" ]; then
+ echo "Warning: Reference information unknown, you will have to edit the patch header manually." > /dev/stderr
+ header=$(echo -n "$header" | tag_add References "(fill me in)")
+ edit=1
+else
+ header=$(echo -n "$header" | tag_add --last References "$ref")
+fi
+
+
+if [ -n "$commit" ]; then
+ original_header=$(git format-patch --stdout -p $commit^..$commit | awk -f "$libdir"/patch_header.awk && echo ---)
+
+
+ # Clean From:
+
+ patch_from=$(echo -n "$header" | tag_get --last from)
+ header=$(echo -n "$header" | tag_remove --last from)
+ original_from=$(echo -n "$original_header" | tag_get --last from)
+
+ # git format-patch > From
+ var_override from "$patch_from" "patch file From:"
+ var_override from "$original_from" "git format-patch From:"
+
+ header=$(echo -n "$header" | tag_add --last From "$from")
+
+
+ # Clean Date:
+
+ patch_date=$(echo -n "$header" | tag_get date)
+ header=$(echo -n "$header" | tag_remove date)
+ original_date=$(echo -n "$original_header" | tag_get date)
+
+ # git format-patch > date
+ var_override date "$patch_date" "patch file Date:"
+ var_override date "$original_date" "git format-patch Date:"
+
+ header=$(echo -n "$header" | tag_add Date "$date")
+
+
+ # Clean Subject:
+
+ patch_subject=$(echo -n "$header" | tag_get subject | remove_subject_annotation)
+ original_subject=$(echo -n "$original_header" | tag_get subject | remove_subject_annotation)
+
+ # git format-patch > Subject
+ var_override subject "$patch_subject" "patch file Subject:"
+ var_override subject "$original_subject" "git format-patch Subject:"
+
+ if [ "$original_subject" != "$patch_subject" ]; then
+ header=$(echo -n "$header" | tag_remove subject)
+ header=$(echo -n "$header" | tag_add Subject "$subject")
+ fi
+ # else ... keep the changes lower between the original patch file and
+ # the cleaned one
+fi
+
+
+# Clean attributions
+
+# this may be added by exportpatch in its default configuration
+header=$(echo -n "$header" | grep -vF "Acked-by: Your Name <user@business.com>")
+
+patch_attributions=$(echo -n "$header" | get_attributions)
+if [ -n "$commit" ]; then
+ original_attributions=$(echo -n "$original_header" | get_attributions)
+ missing=$(grep -vf <(echo "$patch_attributions") <(echo "$original_attributions") || true)
+ count=$(echo -n "$missing" | wc -l)
+ if [ $count -gt 0 ]; then
+ echo "Warning: $count attribution lines missing from the patch file. Adding them." > /dev/stderr
+ header=$(echo -n "$header" | insert_attributions "$missing")
+ fi
+fi
+
+
+# Add Acked-by:
+
+name=$(git config --get user.name)
+email=$(git config --get user.email)
+
+if [ -z "$name" -o -z "$email" ]; then
+ name_str=${name:-(empty name)}
+ email_str=${email:-(empty email)}
+ echo "Warning: user signature incomplete ($name_str <$email_str>), you will have to edit the patch header manually. Check the git config of the repository in $git_dir." > /dev/stderr
+ name=${name:-Name}
+ email=${email:-user@example.com}
+ edit=1
+fi
+signature="$name <$email>"
+
+patterns=$signature
+patterns+=($opt_skip)
+if ! echo -n "$header" | get_attribution_names | grep -qF "$(printf "%s\n" "${patterns[@]}")"; then
+ header=$(echo -n "${header%---}" | tag_add Acked-by "$signature" && echo ---)
+fi
+
+
+if [ -n "$edit" ]; then
+ if [ ! -t 0 ]; then
+ echo "Warning: input is not from a terminal, cannot edit header now." > /dev/stderr
+ else
+ tmpfile=
+ trap '[ -n "$tmpfile" -a -f "$tmpfile" ] && rm "$tmpfile"' EXIT
+ tmpfile=$(mktemp --tmpdir clean_header.XXXXXXXXXX)
+ echo -n "${header%---}" > "$tmpfile"
+ $EDITOR "$tmpfile"
+ header=$(cat "$tmpfile" && echo ---)
+ rm "$tmpfile"
+ trap - EXIT
+ fi
+fi
+
+if [ -n "$filename" ]; then
+ exec 1>"$filename"
+fi
+echo -n "${header%---}"
+echo -n "${body%---}"
diff --git a/scripts/git_sort/exc.py b/scripts/git_sort/exc.py
new file mode 100644
index 0000000000..7f7af685f5
--- /dev/null
+++ b/scripts/git_sort/exc.py
@@ -0,0 +1,30 @@
+#!/usr/bin/python3
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2018 SUSE LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
+# USA.
+
+class KSException(BaseException):
+ pass
+
+
+class KSError(KSException):
+ pass
+
+
+class KSNotFound(KSError):
+ pass
diff --git a/scripts/git_sort/git_sort.py b/scripts/git_sort/git_sort.py
new file mode 100755
index 0000000000..a07d737172
--- /dev/null
+++ b/scripts/git_sort/git_sort.py
@@ -0,0 +1,734 @@
+#!/usr/bin/python3
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2018 SUSE LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
+# USA.
+
+import argparse
+import bisect
+import collections
+import dbm
+import functools
+import operator
+import os
+import os.path
+import pprint
+import pygit2
+import re
+import shelve
+import subprocess
+import sys
+import types
+
+
+class GSException(BaseException):
+ pass
+
+
+class GSError(GSException):
+ pass
+
+
+class GSKeyError(GSException):
+ pass
+
+
+class GSNotFound(GSException):
+ pass
+
+
+class RepoURL(object):
+ k_org_canon_prefix = "git://git.kernel.org/pub/scm/linux/kernel/git/"
+ proto_match = re.compile("(git|https?)://")
+ ext = ".git"
+
+ def __init__(self, url):
+ if url is None or url == repr(None):
+ self.url = None
+ return
+
+ k_org_prefixes = [
+ "http://git.kernel.org/pub/scm/linux/kernel/git/",
+ "https://git.kernel.org/pub/scm/linux/kernel/git/",
+ "https://kernel.googlesource.com/pub/scm/linux/kernel/git/",
+ ]
+ for prefix in k_org_prefixes:
+ if url.startswith(prefix):
+ url = url.replace(prefix, self.k_org_canon_prefix)
+ break
+
+ if not self.proto_match.match(url):
+ url = self.k_org_canon_prefix + url
+
+ if not url.endswith(self.ext):
+ url = url + self.ext
+
+ # an undocumented alias
+ if url == "git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux-2.6.git":
+ url = "git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git"
+
+ self.url = url
+
+
+ def _is_valid_operand(self, other):
+ return hasattr(other, "url")
+
+
+ def __eq__(self, other):
+ if not self._is_valid_operand(other):
+ return NotImplemented
+ return self.url == other.url
+
+
+ def __ne__(self, other):
+ if not self._is_valid_operand(other):
+ return NotImplemented
+ return self.url != other.url
+
+
+ def __hash__(self):
+ return hash(self.url)
+
+
+ def __repr__(self):
+ return "%s" % (self.url,)
+
+
+ def __str__(self):
+ url = self.url
+ if url is None:
+ url = ""
+ elif url.startswith(self.k_org_canon_prefix) and url.endswith(self.ext):
+ url = url[len(self.k_org_canon_prefix):-1 * len(self.ext)]
+
+ return url
+
+
+@functools.total_ordering
+class Head(object):
+ def __init__(self, repo_url, rev="master"):
+ self.repo_url = repo_url
+ self.rev = rev
+
+
+ def _is_valid_operand(self, other):
+ return hasattr(other, "repo_url") and hasattr(other, "rev")
+
+
+ def _get_index(self):
+ """
+ A head with no url is considered out of tree. Any other head with a
+ url is upstream of it.
+ """
+ if self.repo_url == RepoURL(None):
+ return len(remotes)
+ else:
+ return remote_index[self]
+
+
+ def __eq__(self, other):
+ if not self._is_valid_operand(other):
+ return NotImplemented
+ return (self.repo_url == other.repo_url and self.rev == other.rev)
+
+
+ def __lt__(self, other):
+ if not self._is_valid_operand(other):
+ return NotImplemented
+ return self._get_index() < other._get_index()
+
+
+ def __hash__(self):
+ return hash((self.repo_url, self.rev,))
+
+
+ def __repr__(self):
+ return "%s %s" % (repr(self.repo_url), self.rev,)
+
+
+ def __str__(self):
+ url = str(self.repo_url)
+ if self.rev == "master":
+ return url
+ else:
+ result = "%s %s" % (url, self.rev,)
+ return result.strip()
+
+
+# a list of each remote head which is indexed by this script
+# If the working repository is a clone of linux.git (it fetches from mainline,
+# the first remote) and a commit does not appear in one of these remotes, it is
+# considered "not upstream" and cannot be sorted.
+# Repositories that come first in the list should be pulling/merging from
+# repositories lower down in the list. Said differently, commits should trickle
+# up from repositories at the end of the list to repositories higher up. For
+# example, network commits usually follow "net-next" -> "net" -> "linux.git".
+#
+# linux-next is not a good reference because it gets rebased. If a commit is in
+# linux-next, it comes from some other tree. Please tag the patch accordingly.
+#
+# Head(RepoURL(remote url), remote branch name)[]
+# Note that "remote url" can be abbreviated if it starts with one of the usual
+# kernel.org prefixes and "remote branch name" can be omitted if it is "master".
+remotes = (
+ Head(RepoURL("torvalds/linux.git")),
+ Head(RepoURL("davem/net.git")),
+ Head(RepoURL("davem/net-next.git")),
+ Head(RepoURL("rdma/rdma.git"), "for-rc"),
+ Head(RepoURL("rdma/rdma.git"), "for-next"),
+ Head(RepoURL("dledford/rdma.git"), "k.o/for-next"),
+ Head(RepoURL("jejb/scsi.git"), "for-next"),
+ Head(RepoURL("bp/bp.git"), "for-next"),
+ Head(RepoURL("tiwai/sound.git")),
+ Head(RepoURL("git://linuxtv.org/media_tree.git")),
+ Head(RepoURL("powerpc/linux.git"), "fixes"),
+ Head(RepoURL("powerpc/linux.git"), "next"),
+ Head(RepoURL("tip/tip.git")),
+ Head(RepoURL("shli/md.git"), "for-next"),
+ Head(RepoURL("dhowells/linux-fs.git"), "keys-uefi"),
+ Head(RepoURL("tytso/ext4.git"), "dev"),
+ Head(RepoURL("s390/linux.git"), "for-linus"),
+ Head(RepoURL("https://github.com/kdave/btrfs-devel.git"), "misc-next"),
+ Head(RepoURL("git://people.freedesktop.org/~airlied/linux"), "drm-next"),
+ Head(RepoURL("git://anongit.freedesktop.org/drm/drm-misc"), "drm-misc-next"),
+ Head(RepoURL("gregkh/tty.git"), "tty-next"),
+ Head(RepoURL("jj/linux-apparmor.git"), "apparmor-next"),
+ Head(RepoURL("pablo/nf.git")),
+ Head(RepoURL("pablo/nf-next.git")),
+ Head(RepoURL("horms/ipvs.git")),
+ Head(RepoURL("horms/ipvs-next.git")),
+ Head(RepoURL("klassert/ipsec.git")),
+ Head(RepoURL("klassert/ipsec-next.git")),
+ Head(RepoURL("mkp/scsi.git"), "4.19/scsi-queue"),
+ Head(RepoURL("git://git.kernel.dk/linux-block.git"), "for-next"),
+ Head(RepoURL("git://git.kernel.org/pub/scm/virt/kvm/kvm.git"), "queue"),
+ Head(RepoURL("git://git.infradead.org/nvme.git"), "nvme-4.18"),
+ Head(RepoURL("git://git.infradead.org/nvme.git"), "nvme-4.19"),
+ Head(RepoURL("dhowells/linux-fs.git")),
+ Head(RepoURL("herbert/cryptodev-2.6.git")),
+ Head(RepoURL("helgaas/pci.git"), "next"),
+ Head(RepoURL("viro/vfs.git"), "for-linus"),
+ Head(RepoURL("jeyu/linux.git"), "modules-next"),
+ Head(RepoURL("nvdimm/nvdimm.git"), "libnvdimm-for-next"),
+)
+
+
+remote_index = dict(zip(remotes, list(range(len(remotes)))))
+oot = Head(RepoURL(None), "out-of-tree patches")
+
+
+def get_heads(repo):
+ """
+ Returns
+ repo_heads[Head]
+ sha1
+ """
+ result = collections.OrderedDict()
+ repo_remotes = collections.OrderedDict(
+ ((RepoURL(remote.url), remote,) for remote in repo.remotes))
+
+ for head in remotes:
+ if head in result:
+ raise GSException("head \"%s\" is not unique." % (head,))
+
+ try:
+ remote = repo_remotes[head.repo_url]
+ except KeyError:
+ continue
+
+ lhs = "refs/heads/%s" % (head.rev,)
+ rhs = None
+ nb = len(remote.fetch_refspecs)
+ if nb == 0:
+ # `git clone --bare` case
+ rhs = lhs
+ else:
+ for i in range(nb):
+ r = remote.get_refspec(i)
+ if r.src_matches(lhs):
+ rhs = r.transform(lhs)
+ break
+ if rhs is None:
+ raise GSError("No matching fetch refspec for head \"%s\"." %
+ (head,))
+ try:
+ commit = repo.revparse_single(rhs)
+ except KeyError:
+ raise GSError("Could not read revision \"%s\". Perhaps you need "
+ "to fetch from remote \"%s\"" % (rhs, remote.name,))
+ result[head] = str(commit.id)
+
+ if len(result) == 0 or list(result.keys())[0] != remotes[0]:
+ # According to the urls in remotes, this is not a clone of linux.git
+ # Sort according to commits reachable from the current head
+ result = collections.OrderedDict(
+ [(Head(RepoURL(None), "HEAD"),
+ str(repo.revparse_single("HEAD").id),)])
+
+ return result
+
+
+def get_history(repo, repo_heads):
+ """
+ Returns
+ history[Head][commit hash represented as string of 40 characters]
+ index, an ordinal number such that
+ commit a is an ancestor of commit b -> index(a) < index(b)
+ """
+ processed = []
+ history = collections.OrderedDict()
+ args = ["git", "log", "--topo-order", "--pretty=tformat:%H"]
+ for head, rev in repo_heads.items():
+ sp = subprocess.Popen(args + processed + [rev],
+ cwd=repo.path,
+ env={},
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+
+ result = {}
+ for l in sp.stdout:
+ result[l.decode().strip()] = len(result)
+ # reverse indexes
+ history[head] = {commit : len(result) - val for commit, val in
+ result.items()}
+
+ sp.communicate()
+ if sp.returncode != 0:
+ raise GSError("git log exited with an error:\n" +
+ "\n".join(history[head]))
+
+ processed.append("^%s" % (rev,))
+
+ return history
+
+
+class CException(BaseException):
+ pass
+
+
+class CError(CException):
+ pass
+
+
+class CNeedsRebuild(CException):
+ pass
+
+
+class CAbsent(CNeedsRebuild):
+ pass
+
+
+class CKeyError(CNeedsRebuild):
+ pass
+
+
+class CUnsupported(CNeedsRebuild):
+ pass
+
+
+class CInconsistent(CNeedsRebuild):
+ pass
+
+
+class Cache(object):
+ """
+ cache
+ version
+ history[]
+ (url, rev, sha1,
+ history[commit hash represented as string of 40 characters]
+ index (as described in get_history())
+ ,)
+
+ The cache is stored using basic types.
+ """
+ version = 3
+
+
+ def __init__(self, write_enable=False):
+ self.write_enable = write_enable
+ self.closed = True
+ try:
+ cache_dir = os.environ["XDG_CACHE_HOME"]
+ except KeyError:
+ cache_dir = os.path.expanduser("~/.cache")
+ cache_path = os.path.join(cache_dir, "git-sort")
+ try:
+ os.stat(cache_path)
+ except OSError as e:
+ if e.errno == 2:
+ if write_enable:
+ if not os.path.isdir(cache_dir):
+ try:
+ os.makedirs(cache_dir)
+ except OSError as err:
+ raise CError("Could not create cache directory:\n" +
+ str(err))
+ else:
+ raise CAbsent
+ else:
+ raise
+
+ if write_enable:
+ # In case there is already a database file of an unsupported format,
+ # one would hope that with flag="n" a new database would be created
+ # to overwrite the current one. Alas, that is not the case... :'(
+ try:
+ os.unlink(cache_path)
+ except OSError as e:
+ if e.errno != 2:
+ raise
+
+ flag_map = {False : "r", True : "n"}
+ try:
+ self.cache = shelve.open(cache_path, flag=flag_map[write_enable])
+ except dbm.error:
+ raise CUnsupported
+ self.closed = False
+ if write_enable:
+ self.cache["version"] = Cache.version
+
+
+ def __del__(self):
+ self.close()
+
+
+ def __enter__(self):
+ return self
+
+
+ def __exit__(self, *args):
+ self.close()
+
+
+ def close(self):
+ if not self.closed:
+ self.cache.close()
+ self.closed = True
+
+
+ def __getitem__(self, key):
+ """
+ Supported keys:
+ "version"
+ int
+ "history"
+ OrderedDict((Head, sha1) : history)
+ """
+ if self.closed:
+ raise ValueError
+
+ try:
+ version = self.cache["version"]
+ except KeyError:
+ key_error = True
+ except ValueError as err:
+ raise CUnsupported(str(err))
+ else:
+ key_error = False
+
+ if key == "version":
+ if key_error:
+ raise CKeyError
+ else:
+ return version
+ elif key == "history":
+ if key_error or version != Cache.version:
+ raise CUnsupported
+
+ try:
+ cache_history = self.cache["history"]
+ except KeyError:
+ raise CInconsistent
+
+ # This detailed check may be needed if an older git-sort (which
+ # didn't set a cache version) modified the cache.
+ if (not isinstance(cache_history, list) or
+ len(cache_history) < 1 or
+ len(cache_history[0]) != 4 or
+ not isinstance(cache_history[0][3], dict)):
+ raise CInconsistent
+
+ return collections.OrderedDict([
+ (
+ (Head(RepoURL(e[0]), e[1]), e[2],),
+ e[3],
+ ) for e in cache_history])
+ else:
+ raise KeyError
+
+
+ def __setitem__(self, key, value):
+ """
+ Supported keys:
+ "history"
+ OrderedDict((Head, sha1) : history)
+ """
+ if self.closed or not self.write_enable:
+ raise ValueError
+
+ if key == "history":
+ self.cache["history"] = [(
+ repr(desc[0].repo_url), desc[0].rev, desc[1], log,
+ ) for desc, log in value.items()]
+ else:
+ raise KeyError
+
+
+@functools.total_ordering
+class IndexedCommit(object):
+ def __init__(self, head, index):
+ self.head = head
+ self.index = index
+
+
+ def _is_valid_operand(self, other):
+ return hasattr(other, "head") and hasattr(other, "index")
+
+
+ def __eq__(self, other):
+ if not self._is_valid_operand(other):
+ return NotImplemented
+ return (self.head == other.head and self.index == other.index)
+
+
+ def __lt__(self, other):
+ if not self._is_valid_operand(other):
+ return NotImplemented
+ if self.head == other.head:
+ return self.index < other.index
+ else:
+ return self.head < other.head
+
+
+ def __hash__(self):
+ return hash((self.head, self.index,))
+
+
+ def __repr__(self):
+ return "%s %d" % (repr(self.head), self.index,)
+
+
+class SortIndex(object):
+ version_match = re.compile("refs/tags/v(2\.6\.\d+|\d\.\d+)(-rc\d+)?$")
+
+
+ def __init__(self, repo):
+ self.repo = repo
+ needs_rebuild = False
+ try:
+ with Cache() as cache:
+ try:
+ history = cache["history"]
+ except CNeedsRebuild:
+ needs_rebuild = True
+ except CNeedsRebuild:
+ needs_rebuild = True
+ except CError as err:
+ print("Error: %s" % (err,), file=sys.stderr)
+ sys.exit(1)
+
+ try:
+ repo_heads = get_heads(repo)
+ except GSError as err:
+ print("Error: %s" % (err,), file=sys.stderr)
+ sys.exit(1)
+
+ if needs_rebuild or list(history.keys()) != list(repo_heads.items()):
+ try:
+ history = get_history(repo, repo_heads)
+ except GSError as err:
+ print("Error: %s" % (err,), file=sys.stderr)
+ sys.exit(1)
+ try:
+ with Cache(write_enable=True) as cache:
+ cache["history"] = collections.OrderedDict(
+ [((head, repo_heads[head],), log,)
+ for head, log in history.items()])
+ except CError as err:
+ print("Error: %s" % (err,), file=sys.stderr)
+ sys.exit(1)
+ self.history = history
+ else:
+ # no more need for the head sha1
+ self.history = collections.OrderedDict(
+ [(key[0], log,) for key, log in history.items()])
+ self.version_indexes = None
+ self.repo_heads = repo_heads
+
+
+ def lookup(self, commit):
+ for head, log in self.history.items():
+ try:
+ index = log[commit]
+ except KeyError:
+ continue
+ else:
+ return IndexedCommit(head, index)
+
+ raise GSKeyError
+
+
+ def describe(self, index):
+ """
+ index must come from the mainline head (remotes[0]).
+ """
+ if self.version_indexes is None:
+ history = self.history[remotes[0]]
+ # Remove "refs/tags/"
+ # Mainline release tags are annotated tag objects attached to a
+ # commit object; do not consider other kinds of tags.
+ objects = [(obj_tag.get_object(), tag,)
+ for obj_tag, tag in [
+ (self.repo.revparse_single(tag), tag[10:],)
+ for tag in self.repo.listall_references()
+ if self.version_match.match(tag)
+ ] if obj_tag.type == pygit2.GIT_OBJ_TAG]
+ revs = [(history[str(obj.id)], tag,)
+ for obj, tag in objects
+ if obj.type == pygit2.GIT_OBJ_COMMIT]
+ revs.sort(key=operator.itemgetter(0))
+ self.version_indexes = list(zip(*revs))
+
+ if not self.version_indexes:
+ raise GSError("Cannot describe commit, did not find any mainline "
+ "release tags in repository.")
+
+ indexes, tags = self.version_indexes
+ i = bisect.bisect_left(indexes, index)
+ if i == len(tags):
+ # not yet part of a tagged release
+ m = re.search("v([0-9]+)\.([0-9]+)(|-rc([0-9]+))$", tags[-1])
+ if m:
+ # Post-release commit with no rc, it'll be rc1
+ if m.group(3) == "":
+ nexttag = "v%s.%d-rc1" % (m.group(1), int(m.group(2)) + 1)
+ else:
+ nexttag = "v%s.%d or v%s.%s-rc%d (next release)" % \
+ (m.group(1), int(m.group(2)), m.group(1),
+ m.group(2), int(m.group(4)) + 1)
+ return nexttag
+ else:
+ return tags[i]
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ description="Sort input lines according to the upstream order of "
+ "commits that each line represents, with the first word on the line "
+ "taken to be a commit id.")
+ parser.add_argument("-d", "--dump-heads", action="store_true",
+ help="Print the branch heads used for sorting "
+ "(debugging).")
+ args = parser.parse_args()
+
+ try:
+ path = os.environ["GIT_DIR"]
+ except KeyError:
+ try:
+ path = pygit2.discover_repository(os.getcwd())
+ except KeyError:
+ print("Error: Not a git repository", file=sys.stderr)
+ sys.exit(1)
+ repo = pygit2.Repository(path)
+
+ if args.dump_heads:
+ needs_rebuild = False
+ try:
+ with Cache() as cache:
+ try:
+ print("Cached heads (version %d):" % cache["version"])
+ except CKeyError:
+ print("No usable cache")
+ needs_rebuild = True
+ else:
+ try:
+ history = cache["history"]
+ except CUnsupported:
+ print("Unsupported cache version")
+ needs_rebuild = True
+ except CInconsistent:
+ print("Inconsistent cache content")
+ needs_rebuild = True
+ else:
+ pprint.pprint(list(history.keys()))
+ except CAbsent:
+ print("No usable cache")
+ needs_rebuild = True
+ except CNeedsRebuild:
+ needs_rebuild = True
+ except CError as err:
+ print("Error: %s" % (err,), file=sys.stderr)
+ sys.exit(1)
+
+ try:
+ repo_heads = get_heads(repo)
+ except GSError as err:
+ print("Error: %s" % (err,), file=sys.stderr)
+ sys.exit(1)
+ if not needs_rebuild and list(history.keys()) != list(repo_heads.items()):
+ needs_rebuild = True
+ print("Current heads (version %d):" % Cache.version)
+ pprint.pprint(list(repo_heads.items()))
+ if needs_rebuild:
+ action = "Will"
+ else:
+ action = "Will not"
+ print("%s rebuild history" % (action,))
+ sys.exit(0)
+
+ index = SortIndex(repo)
+ dest = {}
+ oot = []
+ num = 0
+ for line in sys.stdin.readlines():
+ num = num + 1
+ tokens = line.strip().split(None, 1)
+ if not tokens:
+ continue
+ try:
+ commit = repo.revparse_single(tokens[0])
+ except ValueError:
+ print("Error: did not find a commit hash on line %d:\n%s" %
+ (num, line.strip(),), file=sys.stderr)
+ sys.exit(1)
+ except KeyError:
+ print("Error: commit hash on line %d not found in the repository:\n%s" %
+ (num, line.strip(),), file=sys.stderr)
+ sys.exit(1)
+ h = str(commit.id)
+ if h in dest:
+ dest[h][1].append(line)
+ else:
+ try:
+ ic = index.lookup(h)
+ except GSKeyError:
+ oot.append(line)
+ else:
+ dest[h] = (ic, [line],)
+
+ print("".join([line
+ for ic, lines in sorted(dest.values(),
+ key=operator.itemgetter(0))
+ for line in lines
+ ]), end="")
+
+ if oot:
+ print("Error: the following entries were not found in the indexed heads:",
+ file=sys.stderr)
+ print("".join(oot), end="")
+ sys.exit(1)
diff --git a/scripts/git_sort/lib.py b/scripts/git_sort/lib.py
new file mode 100644
index 0000000000..f029267b7e
--- /dev/null
+++ b/scripts/git_sort/lib.py
@@ -0,0 +1,550 @@
+#!/usr/bin/python3
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2018 SUSE LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
+# USA.
+
+import collections
+import operator
+import os
+import os.path
+import pygit2
+import re
+import signal
+import subprocess
+import sys
+
+import exc
+import git_sort
+from patch import Patch
+import series_conf
+
+
+# https://stackoverflow.com/a/952952
+flatten = lambda l: [item for sublist in l for item in sublist]
+
+
+# http://stackoverflow.com/questions/1158076/implement-touch-using-python
+def touch(fname, times=None):
+ with open(fname, 'a'):
+ os.utime(fname, times)
+
+
+def libdir():
+ return os.path.dirname(os.path.realpath(__file__))
+
+
+def check_series():
+ """
+ Check that the "series" file used by quilt looks like a series.conf file and
+ not a simplified version. If using the modified quilt, it will be a symlink
+ to the actual "series.conf" file and doing things like `quilt import` will
+ automatically update series.conf.
+ """
+ def check():
+ return (open("series").readline().strip() ==
+ "# Kernel patches configuration file")
+
+ try:
+ retval = check()
+ except IOError as err:
+ print("Error: could not read series file: %s" % (err,), file=sys.stderr)
+ return False
+
+ if retval:
+ return True
+
+ try:
+ subprocess.check_output(("quilt", "--quiltrc", "-", "top",),
+ stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as err:
+ if err.output.decode() == "No patches applied\n":
+ pass
+ else:
+ raise
+ if check():
+ return True
+ else:
+ print("Error: series file does not look like series.conf. "
+ "Make sure you are using the modified `quilt`; see "
+ "scripts/git_sort/README.md.", file=sys.stderr)
+ return False
+
+
+def repo_path():
+ """
+ Get the path to the git_dir of the mainline linux git repository to use.
+ Typically obtained from the LINUX_GIT environment variable.
+ """
+ try:
+ search_path = subprocess.check_output(
+ os.path.join(libdir(), "..",
+ "linux_git.sh")).decode().strip()
+ except subprocess.CalledProcessError:
+ print("Error: Could not determine mainline linux git repository path.",
+ file=sys.stderr)
+ sys.exit(1)
+ return pygit2.discover_repository(search_path)
+
+
+def series_header(series):
+ """
+ Return the block of lines at the top of series that are not patch files
+ entries or automatically generated comments. These lines should be prepended
+ to the output.
+ """
+ header = []
+
+ for line in series:
+ if series_conf.filter_patches(line):
+ break
+
+ try:
+ parse_section_header(line)
+ except exc.KSNotFound:
+ pass
+ else:
+ break
+
+ header.append(line)
+
+ return header
+
+
+def series_footer(series):
+ return series_header(reversed(series))
+
+
+def parse_section_header(line):
+ """
+ Parse a series.conf line to identify if it's a comment denoting the
+ beginning of a subsystem section. In that case, return the Head object it
+ corresponds to.
+ """
+ oot_text = git_sort.oot.rev
+ line = line.strip()
+
+ if not line.startswith("# "):
+ raise exc.KSNotFound()
+ line = line[2:]
+ if line == oot_text:
+ return git_sort.oot
+ elif line.lower() == series_conf.start_text:
+ raise exc.KSNotFound()
+
+ words = line.split(None, 3)
+ if len(words) > 2:
+ raise exc.KSError(
+ "Section comment \"%s\" in series.conf could not be parsed. "
+ "series.conf is invalid." % (line,))
+ args = [git_sort.RepoURL(words[0])]
+ if len(words) == 2:
+ args.append(words[1])
+
+ head = git_sort.Head(*args)
+
+ if head not in git_sort.remotes:
+ raise exc.KSError(
+ "Section comment \"%s\" in series.conf does not match any Head in "
+ "variable \"remotes\". series.conf is invalid." % (line,))
+
+ return head
+
+
+def patches_per_section(inside_lines):
+ """
+ Returns an OrderedDict
+ result[Head][]
+ patch file name
+ """
+ result = collections.OrderedDict([
+ (head, [],)
+ for head in flatten((git_sort.remotes, (git_sort.oot,),))])
+
+ current_head = git_sort.remotes[0]
+ for line in inside_lines:
+ try:
+ current_head = parse_section_header(line)
+ except exc.KSNotFound:
+ pass
+
+ if not series_conf.filter_patches(line):
+ continue
+
+ name = series_conf.firstword(line)
+ result[current_head].append(name)
+
+ for head, names in list(result.items()):
+ if not names:
+ del result[head]
+
+ return result
+
+
+def parse_inside(index, inside_lines, move_upstream):
+ """
+ Parse series.conf lines to generate InputEntry objects.
+ """
+ result = []
+ for head, names in patches_per_section(inside_lines).items():
+ for name in names:
+ entry = InputEntry("\t%s\n" % (name,))
+ entry.from_patch(index, name, head, move_upstream)
+ result.append(entry)
+
+ return result
+
+
+def list_moved_patches(base_lines, remote_lines):
+ """
+ Return a list of patch file names which are in different subsystem sections
+ between base and remote.
+ """
+ base = {}
+ result = []
+
+ for head, names in patches_per_section(base_lines).items():
+ for name in names:
+ base[name] = head
+
+ for head, names in patches_per_section(remote_lines).items():
+ for name in names:
+ if name in base and head != base[name]:
+ result.append(name)
+
+ return result
+
+
+class InputEntry(object):
+ """
+ A patch line entry (usually from series.conf) and associated data about the
+ commit it backports.
+ """
+ commit_match = re.compile("[0-9a-f]{40}")
+
+
+ def __init__(self, value):
+ """
+ value is typically a series.conf line but can be anything.
+ """
+ self.value = value
+
+
+ def from_patch(self, index, name, current_head, move_upstream):
+ """
+ This is where we decide a patch line's fate in the sorted series.conf
+ The following factors determine how a patch is sorted:
+ * commit found in index
+ * patch's series.conf current_head is indexed (ie. the local repo
+ fetches from that remote)
+ * patch appears to have moved downstream/didn't move/upstream
+ * patch's tag is good ("Git-repo:" == current_head.url)
+ * patches may be moved upstream between subsystem sections
+ """
+ self.name = name
+ if not os.path.exists(name):
+ raise exc.KSError("Could not find patch \"%s\"" % (name,))
+
+ with Patch(open(name, mode="rb")) as patch:
+ commit_tags = patch.get("Git-commit")
+ repo_tags = patch.get("Git-repo")
+
+ if not commit_tags:
+ self.dest_head = git_sort.oot
+ return
+
+ class BadTag(Exception):
+ pass
+
+ def get_commit(value):
+ if not value:
+ raise BadTag(value)
+ tag = series_conf.firstword(value)
+ if not self.commit_match.match(tag):
+ raise BadTag(tag)
+ return tag
+
+ try:
+ self.revs = [get_commit(value) for value in commit_tags]
+ except BadTag as e:
+ raise exc.KSError("Git-commit tag \"%s\" in patch \"%s\" is not a "
+ "valid revision." % (e.args[0], name,))
+ rev = self.revs[0]
+
+ if len(repo_tags) > 1:
+ raise exc.KSError("Multiple Git-repo tags found. Patch \"%s\" is "
+ "tagged improperly." % (name,))
+ elif repo_tags:
+ repo = git_sort.RepoURL(repo_tags[0])
+ elif commit_tags:
+ repo = git_sort.remotes[0].repo_url
+ self.new_url = None
+
+ try:
+ ic = index.lookup(rev)
+ except git_sort.GSKeyError: # commit not found
+ if current_head not in index.repo_heads: # repo not indexed
+ if repo == current_head.repo_url: # good tag
+ self.dest_head = current_head
+ else: # bad tag
+ raise exc.KSError(
+ "There is a problem with patch \"%s\". "
+ "The Git-repo tag is incorrect or the patch is in the "
+ "wrong section of series.conf and (the Git-commit tag "
+ "is incorrect or the relevant remote is outdated or "
+ "not available locally) or an entry for this "
+ "repository is missing from \"remotes\". In the last "
+ "case, please edit \"remotes\" in "
+ "\"scripts/git_sort/git_sort.py\" and commit the "
+ "result. Manual intervention is required." % (name,))
+ else: # repo is indexed
+ if repo == current_head.repo_url: # good tag
+ raise exc.KSError(
+ "There is a problem with patch \"%s\". "
+ "Commit \"%s\" not found in git-sort index. "
+ "The remote fetching from \"%s\" needs to be fetched "
+ "or the Git-commit tag is incorrect or the patch is "
+ "in the wrong section of series.conf. Manual "
+ "intervention is required." % (
+ name, rev, current_head.repo_url,))
+ else: # bad tag
+ raise exc.KSError(
+ "There is a problem with patch \"%s\". "
+ "The Git-repo tag is incorrect or the patch is in the "
+ "wrong section of series.conf. Manual intervention is "
+ "required." % (name,))
+ else: # commit found
+ msg_bad_tag = "There is a problem with patch \"%s\". " \
+ "The Git-repo tag is incorrect or the patch is in " \
+ "the wrong section of series.conf. Manual " \
+ "intervention is required." % (name,)
+ if current_head not in index.repo_heads: # repo not indexed
+ if ic.head > current_head: # patch moved downstream
+ if repo == current_head.repo_url: # good tag
+ self.dest_head = current_head
+ else: # bad tag
+ raise exc.KSError(msg_bad_tag)
+ elif ic.head == current_head: # patch didn't move
+ raise exc.KSException(
+ "Head \"%s\" is not available locally but commit "
+ "\"%s\" found in patch \"%s\" was found in that head." %
+ (ic.head, rev, name,))
+ elif ic.head < current_head: # patch moved upstream
+ if move_upstream: # move patches between subsystem sections
+ self.dest_head = ic.head
+ self.dest = ic
+ if repo != ic.head.repo_url: # bad tag
+ self.new_url = ic.head.repo_url
+ else: # do not move patches between subsystem sections
+ if repo == current_head.repo_url: # good tag
+ self.dest_head = current_head
+ else: # bad tag
+ raise exc.KSError(msg_bad_tag)
+ else: # repo is indexed
+ if ic.head > current_head: # patch moved downstream
+ if repo == current_head.repo_url: # good tag
+ raise exc.KSError(
+ "There is a problem with patch \"%s\". "
+ "The patch is in the wrong section of series.conf "
+ "or the remote fetching from \"%s\" needs to be "
+ "fetched or the relative order of \"%s\" and "
+ "\"%s\" in \"remotes\" is incorrect. Manual "
+ "intervention is required." % (
+ name, current_head.repo_url, ic.head,
+ current_head,))
+ else: # bad tag
+ raise exc.KSError(
+ "There is a problem with patch \"%s\". "
+ "The patch is in the wrong section of series.conf "
+ "or the remote fetching from \"%s\" needs to be "
+ "fetched. Manual intervention is required." % (
+ name, current_head.repo_url,))
+ elif ic.head == current_head: # patch didn't move
+ self.dest_head = ic.head
+ self.dest = ic
+ if repo != ic.head.repo_url: # bad tag
+ self.new_url = ic.head.repo_url
+ elif ic.head < current_head: # patch moved upstream
+ if move_upstream: # move patches between subsystem sections
+ self.dest_head = ic.head
+ self.dest = ic
+ if repo != ic.head.repo_url: # bad tag
+ self.new_url = ic.head.repo_url
+ else: # do not move patches between subsystem sections
+ if repo == current_head.repo_url: # good tag
+ self.dest_head = current_head
+ self.dest = ic
+ else: # bad tag
+ raise exc.KSError(msg_bad_tag)
+
+
+def series_sort(index, entries):
+ """
+ entries is a list of InputEntry objects
+
+ Returns an OrderedDict
+ result[Head][]
+ patch file name
+
+ Note that Head may be a "virtual head" like "out-of-tree patches".
+ """
+ def container(head):
+ if head in index.repo_heads:
+ return collections.defaultdict(list)
+ else:
+ return []
+
+ result = collections.OrderedDict([
+ (head, container(head),)
+ for head in flatten((git_sort.remotes, (git_sort.oot,),))])
+
+ for entry in entries:
+ try:
+ result[entry.dest_head][entry.dest].append(entry.value)
+ except AttributeError:
+ # no entry.dest
+ result[entry.dest_head].append(entry.value)
+
+ for head in index.repo_heads:
+ result[head] = flatten([
+ e[1]
+ for e in sorted(result[head].items(), key=operator.itemgetter(0))])
+
+ for head, lines in list(result.items()):
+ if not lines:
+ del result[head]
+
+ return result
+
+
+def series_format(entries):
+ """
+ entries is an OrderedDict, typically the output of series_sort()
+ result[Head][]
+ patch file name
+ """
+ result = []
+
+ for head, lines in entries.items():
+ if head != git_sort.remotes[0]:
+ if result:
+ result.append("\n")
+ result.append("\t# %s\n" % (str(head),))
+ result.extend(lines)
+
+ return result
+
+
+def tag_needs_update(entry):
+ if entry.dest_head != git_sort.oot and entry.new_url is not None:
+ return True
+ else:
+ return False
+
+
+def update_tags(index, entries):
+ """
+ Update the Git-repo tag (possibly by removing it) of patches.
+ """
+ for entry in entries:
+ with Patch(open(entry.name, mode="r+b")) as patch:
+ message = "Failed to update tag \"%s\" in patch \"%s\". This " \
+ "tag is not found."
+ if entry.dest_head == git_sort.remotes[0]:
+ tag_name = "Patch-mainline"
+ try:
+ patch.change(tag_name, index.describe(entry.dest.index))
+ except KeyError:
+ raise exc.KSNotFound(message % (tag_name, entry.name,))
+ except git_sort.GSError as err:
+ raise exc.KSError("Failed to update tag \"%s\" in patch "
+ "\"%s\". %s" % (tag_name, entry.name,
+ str(err),))
+ patch.remove("Git-repo")
+ else:
+ tag_name = "Git-repo"
+ try:
+ patch.change(tag_name, repr(entry.new_url))
+ except KeyError:
+ raise exc.KSNotFound(message % (tag_name, entry.name,))
+
+
+def sequence_insert(series, rev, top):
+ """
+ top is the top applied patch, None if none are applied.
+
+ Caller must chdir to where the entries in series can be found.
+
+ Returns the name of the new top patch and how many must be applied/popped.
+ """
+ git_dir = repo_path()
+ repo = pygit2.Repository(git_dir)
+ index = git_sort.SortIndex(repo)
+
+ try:
+ commit = str(repo.revparse_single(rev).id)
+ except ValueError:
+ raise exc.KSError("\"%s\" is not a valid revision." % (rev,))
+ except KeyError:
+ raise exc.KSError("Revision \"%s\" not found in \"%s\"." % (
+ rev, git_dir,))
+
+ marker = "# new commit"
+ new_entry = InputEntry(marker)
+ try:
+ new_entry.dest = index.lookup(commit)
+ except git_sort.GSKeyError:
+ raise exc.KSError(
+ "Commit %s not found in git-sort index. If it is from a "
+ "repository and branch pair which is not listed in \"remotes\", "
+ "please add it and submit a patch." % (commit,))
+ new_entry.dest_head = new_entry.dest.head
+
+ try:
+ before, inside, after = series_conf.split(series)
+ except exc.KSNotFound as err:
+ raise exc.KSError(err)
+ before, after = map(series_conf.filter_series, (before, after,))
+ current_patches = flatten([before, series_conf.filter_series(inside), after])
+
+ if top is None:
+ top_index = 0
+ else:
+ top_index = current_patches.index(top) + 1
+
+ input_entries = parse_inside(index, inside, False)
+ input_entries.append(new_entry)
+
+ sorted_entries = series_sort(index, input_entries)
+ new_patches = flatten([
+ before,
+ [line.strip() for lines in sorted_entries.values() for line in lines],
+ after,
+ ])
+ commit_pos = new_patches.index(marker)
+ if commit_pos == 0:
+ # should be inserted first in series
+ name = ""
+ else:
+ name = new_patches[commit_pos - 1]
+ del new_patches[commit_pos]
+
+ if new_patches != current_patches:
+ raise exc.KSError("Subseries is not sorted. "
+ "Please run scripts/series_sort.py.")
+
+ return (name, commit_pos - top_index,)
diff --git a/scripts/git_sort/lib.sh b/scripts/git_sort/lib.sh
new file mode 100644
index 0000000000..a3b57200b3
--- /dev/null
+++ b/scripts/git_sort/lib.sh
@@ -0,0 +1,181 @@
+# Copyright (C) 2018 SUSE LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
+# USA.
+
+bash_single_esc () {
+ sed "s/'/'\\\\''/g"
+}
+
+# var_override [options] <var name> <value> <source name>
+# Options:
+# -a, --allow-empty Allow an empty "value" to override the value of "var"
+var_override () {
+ local result=$(getopt -o a --long allow-empty -n "${BASH_SOURCE[0]}:${FUNCNAME[0]}()" -- "$@")
+ local opt_empty
+
+ if [ $? != 0 ]; then
+ echo "Error: getopt error" >&2
+ exit 1
+ fi
+
+ eval set -- "$result"
+
+ while true ; do
+ case "$1" in
+ -a|--allow-empty)
+ opt_empty=1
+ ;;
+ --)
+ shift
+ break
+ ;;
+ *)
+ echo "Error: could not parse arguments" >&2
+ exit 1
+ ;;
+ esac
+ shift
+ done
+
+ local name=$1
+ local value=$2
+ local value_esc=$(echo "$value" | bash_single_esc)
+ local src=$3
+ local src_esc=$(echo "$src" | bash_single_esc)
+
+ if [ -n "$value" -o "$opt_empty" ]; then
+ local name_src=_${name}src
+ if [ -z "${!name}" ]; then
+ eval "$name='$value_esc'"
+ eval "$name_src='$src_esc'"
+ elif [ "$value" != "${!name}" ]; then
+ if [ "${!name_src}" ]; then
+ echo "Warning: $src (\"$value\") and ${!name_src} (\"${!name}\") differ. Using $src." > /dev/stderr
+ fi
+ eval "$name='$value_esc'"
+ eval "$name_src='$src_esc'"
+ fi
+ fi
+}
+
+# expand_git_ref [options]
+# Options:
+# -q, --quiet Do not error out if a refspec is not found, just print an empty line
+expand_git_ref () {
+ local result=$(getopt -o q --long quiet -n "${BASH_SOURCE[0]}:${FUNCNAME[0]}()" -- "$@")
+ local opt_quiet
+
+ if [ $? != 0 ]; then
+ echo "Error: getopt error" >&2
+ exit 1
+ fi
+
+ eval set -- "$result"
+
+ while true ; do
+ case "$1" in
+ -q|--quiet)
+ opt_quiet=1
+ ;;
+ --)
+ shift
+ break
+ ;;
+ *)
+ echo "Error: could not parse arguments" >&2
+ exit 1
+ ;;
+ esac
+ shift
+ done
+
+ local commit rest
+ # take the first word only, which will discard cruft like "(partial)"
+ while read commit rest; do
+ local hash
+ local cmd="git log -n1 --pretty=format:%H '$commit' --"
+ if [ -z "$opt_quiet" ] && ! hash=$(eval "$cmd"); then
+ return 1
+ else
+ hash=$(eval "$cmd" 2>/dev/null || true)
+ fi
+ echo $hash
+ done
+}
+
+# remove_subject_annotation
+remove_subject_annotation () {
+ sed -re 's/^( *\[[^]]*\] *)+//'
+}
+
+# get_patch_num
+get_patch_num () {
+ sed -nre 's/.*\[.*\b0*([0-9]+)\/[0-9]+\].*/\1/p'
+}
+
+# format_sanitized_subject
+# Transform a subject into a file name
+format_sanitized_subject () {
+ sed -re '
+ s/\.+/./g
+ s/[^a-zA-Z0-9._]+/-/g
+ s/^-+//
+ s/[-.]+$//
+ s/(.{,52}).*/\1/
+ '
+}
+
+# cheat_diffstat
+# Adds fake content to a patch body so that diffstat will show something for
+# renames
+cheat_diffstat () {
+ awk '
+ BEGIN {
+ state = 0
+ percent = "unknown%"
+ }
+
+ state == 4 {
+ print "@@ -1 +1 @@"
+ print "-" percent " of the content"
+ print "+" percent " of the content"
+
+ percent = "unknown%"
+ state = 0
+ }
+
+ state == 3 && /^rename to/ {
+ state = 4
+ }
+
+ state == 2 && /^rename from/ {
+ state = 3
+ }
+
+ state == 1 && /^similarity index/ {
+ state = 2
+ percent = $3
+ }
+
+ state == 0 && /^diff --git/ {
+ state = 1
+ }
+
+ {
+ print
+ }
+ '
+}
diff --git a/scripts/git_sort/lib_from.sh b/scripts/git_sort/lib_from.sh
new file mode 100644
index 0000000000..b69fc86b7d
--- /dev/null
+++ b/scripts/git_sort/lib_from.sh
@@ -0,0 +1,39 @@
+# Copyright (C) 2018 SUSE LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
+# USA.
+
+# from_get
+from_get () {
+ awk '
+ NR==1 && /^From [0-9a-f]+/ {
+ print $2
+ exit
+ }
+ '
+}
+
+# from_extract
+from_extract () {
+ awk '
+ NR==1 && /^From [0-9a-f]+/ {
+ next
+ }
+
+ {
+ print
+ }
+ '
+}
diff --git a/scripts/git_sort/lib_tag.sh b/scripts/git_sort/lib_tag.sh
new file mode 100644
index 0000000000..e64c302131
--- /dev/null
+++ b/scripts/git_sort/lib_tag.sh
@@ -0,0 +1,510 @@
+# Copyright (C) 2018 SUSE LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
+# USA.
+
+# countkeys <key>
+countkeys () {
+ local key=$1
+
+ case "${key,,*}" in
+ "cherry picked from commit" | "cherry picked for")
+ grep "^($key .*)$" | wc -l
+ ;;
+ *)
+ grep -i "^$key: " | wc -l
+ ;;
+ esac
+}
+
+# tag_get [options] <key>
+# Options:
+# -l, --last Do not error out if a tag is present more than once,
+# return the last occurrence
+tag_get () {
+ local result=$(getopt -o l --long last -n "${BASH_SOURCE[0]}:${FUNCNAME[0]}()" -- "$@")
+ local opt_last
+
+ if [ $? != 0 ]; then
+ echo "Error: getopt error" >&2
+ exit 1
+ fi
+
+ eval set -- "$result"
+
+ while true ; do
+ case "$1" in
+ -l|--last)
+ opt_last=1
+ ;;
+ --)
+ shift
+ break
+ ;;
+ *)
+ echo "Error: could not parse arguments" >&2
+ exit 1
+ ;;
+ esac
+ shift
+ done
+
+ local key=$1
+
+ local header=$(cat)
+ local nb=$(countkeys "$key" <<< "$header")
+ if [ $nb -gt 1 -a -z "$opt_last" ]; then
+ echo "Error: key \"$key\" present more than once." > /dev/stderr
+ exit 1
+ fi
+
+ case "${key,,*}" in
+ subject)
+ awk --assign nb="$nb" '
+ BEGIN {
+ insubject = 0
+ }
+
+ tolower($1) ~ /subject:/ {
+ nb--
+ if (nb > 0) {
+ next
+ }
+ insubject = 1
+ split($0, array, FS, seps)
+ result = substr($0, 1 + length(seps[0]) + length(array[1]) + length(seps[1]))
+ next
+ }
+
+ insubject && /^[ \t]/ {
+ sub("[ \t]", " ")
+ result = result $0
+ next
+ }
+
+ insubject {
+ print result
+ exit
+ }
+ ' <<< "$header"
+ ;;
+ "cherry picked from commit" | "cherry picked for")
+ awk --assign nb="$nb" '
+ /^\('"$key"' .*\)$/ {
+ nb--
+ if (nb > 0) {
+ next
+ }
+ match($0, "^\\('"$key"' (.*)\\)$", a)
+ print a[1]
+ exit
+ }
+ ' <<< "$header"
+ ;;
+ *)
+ awk --assign nb="$nb" '
+ tolower($1) ~ /'"${key,,*}"':/ {
+ nb--
+ if (nb > 0) {
+ next
+ }
+ split($0, array, FS, seps)
+ print substr($0, 1 + length(seps[0]) + length(array[1]) + length(seps[1]))
+ exit
+ }
+ ' <<< "$header"
+ ;;
+ esac
+}
+
+# tag_remove [options] <key>
+# Options:
+# -l, --last Do not error out if a tag is present more than once,
+# extract the last occurrence
+tag_remove () {
+ local result=$(getopt -o l --long last -n "${BASH_SOURCE[0]}:${FUNCNAME[0]}()" -- "$@")
+ local opt_last
+
+ if [ $? != 0 ]; then
+ echo "Error: getopt error" >&2
+ exit 1
+ fi
+
+ eval set -- "$result"
+
+ while true ; do
+ case "$1" in
+ -l|--last)
+ opt_last=1
+ ;;
+ --)
+ shift
+ break
+ ;;
+ *)
+ echo "Error: could not parse arguments" >&2
+ exit 1
+ ;;
+ esac
+ shift
+ done
+
+ local key=$1
+
+ local header=$(cat && echo ---)
+ local nb=$(countkeys "$key" <<< "$header")
+ if [ $nb -gt 1 -a -z "$opt_last" ]; then
+ echo "Error: key \"$key\" present more than once." > /dev/stderr
+ exit 1
+ fi
+
+ case "${key,,*}" in
+ subject)
+ echo -n "${header%---}" | awk --assign nb="$nb" '
+ BEGIN {
+ insubject = 0
+ }
+
+ tolower($1) ~ /subject:/ {
+ nb--
+ if (nb == 0) {
+ insubject = 1
+ next
+ }
+ }
+
+ insubject && /^ / {
+ next
+ }
+
+ insubject {
+ insubject = 0
+ }
+
+ {
+ print
+ }
+ '
+ ;;
+ "cherry picked from commit" | "cherry picked for")
+ echo -n "${header%---}" | awk --assign nb="$nb" '
+ /^\('"$key"' .*\)$/ {
+ nb--
+ if (nb == 0) {
+ next
+ }
+ }
+
+ {
+ print
+ }
+ '
+ ;;
+ *)
+ echo -n "${header%---}" | awk --assign nb="$nb" '
+ tolower($1) ~ /'"${key,,*}"':/ {
+ nb--
+ if (nb == 0) {
+ next
+ }
+ }
+
+ {
+ print
+ }
+ '
+ ;;
+ esac
+}
+
+# tag_add [options] <key> <value>
+# Options:
+# -l, --last Do not error out if a tag is already present, add it
+# after the last occurrence
+tag_add () {
+ local result=$(getopt -o l --long last -n "${BASH_SOURCE[0]}:${FUNCNAME[0]}()" -- "$@")
+ local opt_last
+
+ if [ $? != 0 ]; then
+ echo "Error: getopt error" >&2
+ exit 1
+ fi
+
+ eval set -- "$result"
+
+ while true ; do
+ case "$1" in
+ -l|--last)
+ opt_last=1
+ ;;
+ --)
+ shift
+ break
+ ;;
+ *)
+ echo "Error: could not parse arguments" >&2
+ exit 1
+ ;;
+ esac
+ shift
+ done
+
+ local key=$1
+ local value=$2
+
+ case "${key,,*}" in
+ from)
+ local header=$(cat && echo ---)
+ local nb=$(countkeys "$key" <<< "$header")
+ if [ $nb -gt 0 -a -z "$opt_last" ]; then
+ echo "Error: key \"$key\" already present." > /dev/stderr
+ exit 1
+ fi
+
+ echo -n "${header%---}" | awk --assign key="$key" --assign value="$value" --assign nb="$nb" '
+ BEGIN {
+ inserted = 0
+ }
+
+ NR == 1 && /^From [0-9a-f]+/ {
+ print
+ next
+ }
+
+ nb == 0 && !inserted {
+ print key ": " value
+ print
+ inserted = 1
+ next
+ }
+
+ tolower($1) ~ /'"${key,,*}"':/ {
+ nb--
+ }
+
+ {
+ print
+ }
+ '
+ ;;
+ date | subject)
+ local header=$(cat && echo ---)
+ local nb=$(countkeys "$key" <<< "$header")
+ if [ $nb -gt 0 ]; then
+ echo "Error: key \"$key\" already present." > /dev/stderr
+ exit 1
+ fi
+
+ local -A prevkey=(["date"]="from" ["subject"]="date")
+
+ nb=$(countkeys "${prevkey[${key,,*}]}" <<< "$header")
+
+ echo -n "${header%---}" | awk --assign key="$key" --assign value="$value" --assign nb="$nb" '
+ {
+ print
+ }
+
+ tolower($1) ~ /'"${prevkey[${key,,*}]}"':/ {
+ nb--
+ if (nb == 0) {
+ print key ": " value
+ }
+ }
+ '
+ ;;
+ patch-mainline | git-repo | git-commit | references)
+ local header=$(cat && echo ---)
+ local nb=$(countkeys "$key" <<< "$header")
+ if [ $nb -gt 0 -a -z "$opt_last" ]; then
+ echo "Error: key \"$key\" already present." > /dev/stderr
+ exit 1
+ fi
+
+ echo -n "${header%---}" | awk '
+ BEGIN {
+ added = 0
+ keys["Patch-mainline:"] = 1
+ keys["Git-repo:"] = 2
+ keys["Git-commit:"] = 3
+ keys["References:"] = 4
+ }
+
+ function keycmp(key1, key2) {
+ return keys[key1] - keys[key2]
+ }
+
+ $1 in keys && !added {
+ if (keycmp("'"$key"':", $1) < 0) {
+ print "'"$key"': '"$value"'"
+ print
+ added = 1
+ next
+ }
+ }
+
+ /^$/ && !added {
+ print "'"$key"': '"$value"'"
+ print
+ added = 1
+ next
+ }
+
+ {
+ print
+ }
+ '
+ ;;
+ acked-by | signed-off-by)
+ local line="$key: $value"
+ local header=$(cat && echo ---)
+
+ echo -n "${header%---}" | _append_attribution "$line"
+ ;;
+ "cherry picked from commit" | "cherry picked for")
+ local line
+ local header=$(cat && echo ---)
+ local nb=$(countkeys "$key" <<< "$header")
+
+ if [ $nb -gt 0 ]; then
+ echo "Error: key \"$key\" already present." > /dev/stderr
+ exit 1
+ fi
+
+ line="($key $value)"
+
+ echo -n "${header%---}" | _append_attribution "$line"
+ ;;
+ *)
+ echo "Error: I don't know where to add a tag of type \"$key\"." > /dev/stderr
+ exit 1
+ esac
+}
+
+# get_attributions
+get_attributions () {
+ awk '
+ tolower($1) ~ /^(acked|reviewed|signed-off)-by:$/ {
+ print
+ }
+ '
+}
+
+# get_attribution_names
+get_attribution_names () {
+ get_attributions | awk '
+ {
+ split($0, array, FS, seps)
+ print substr($0, 1 + length(seps[0]) + length(array[1]) + length(seps[1]))
+ }
+ '
+}
+
+# _append_attribution <attribution line>
+_append_attribution () {
+ local line=$1
+
+ awk --assign line="$line" '
+ BEGIN {
+ added = 0
+ empty_line_nb = 0
+ attribseen = 0
+ }
+
+ function print_attribution(attribseen, line, before_diffstat)
+ {
+ if (!attribseen) {
+ print ""
+ }
+ print line
+ if (!before_diffstat) {
+ print ""
+ }
+
+ added = 1
+ empty_line_nb = 0
+ }
+
+ function playback_empty_lines()
+ {
+ for (; empty_line_nb > 0; empty_line_nb--) {
+ print ""
+ }
+ }
+
+ /^$/ {
+ empty_line_nb++
+ next
+ }
+
+ tolower($1) ~ /^[^ ]+-by:$/ {
+ attribseen = 1
+ }
+
+ /^\(cherry picked from commit [[:xdigit:]]{6,})$/ {
+ attribseen = 1
+ }
+
+ /^\(cherry picked for .*)$/ {
+ attribseen = 1
+ }
+
+ !added && /^---$/ {
+ print_attribution(attribseen, line, 1)
+ }
+
+ # from quilt, patchfns
+ !added && /^(---|\*\*\*|Index:)[ \t][^ \t]|^diff -/ {
+ print_attribution(attribseen, line, 0)
+ }
+
+ {
+ playback_empty_lines()
+ print
+ }
+
+ END {
+ if (!added) {
+ print_attribution(attribseen, line, 1)
+ } else {
+ playback_empty_lines()
+ }
+ }
+ '
+}
+
+# insert_attributions <attribution lines>
+# Add multiple attribution lines
+insert_attributions () {
+ local attrs=$1
+ local header=$(cat && echo ---)
+
+ if [ "$(get_attributions <<< ${header%---})" ]; then
+ echo -n "${header%---}" | awk --assign attr="$1" '
+ tolower($1) ~ /^[^ ]+-by:$/ {
+ print attr
+ }
+
+ {
+ print
+ }
+ '
+ else
+ while read attribution; do
+ header=$(echo -n "${header%---}" | _append_attribution "$attribution" && echo ---)
+ done <<< "$attrs"
+
+ echo -n "${header%---}"
+ fi
+}
diff --git a/scripts/git_sort/merge_tool.py b/scripts/git_sort/merge_tool.py
new file mode 100755
index 0000000000..f3a148d946
--- /dev/null
+++ b/scripts/git_sort/merge_tool.py
@@ -0,0 +1,149 @@
+#!/usr/bin/python3
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2018 SUSE LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
+# USA.
+
+"""
+Depends on `merge` from rcs
+
+Add a section like this to git config:
+
+[mergetool "git-sort"]
+ cmd = scripts/git_sort/merge_tool.py $LOCAL $BASE $REMOTE $MERGED
+ trustExitCode = true
+
+Then call
+git mergetool --tool=git-sort series.conf
+
+"""
+
+import os.path
+import pygit2
+import shutil
+import subprocess
+import sys
+
+import exc
+import lib
+import series_conf
+
+
+def splice(series, inside, output_path):
+ with open(output_path, mode="w") as f:
+ [f.writelines(l) for l in (
+ series[0],
+ lib.series_header(series[1]),
+ inside,
+ lib.series_footer(series[1]),
+ series[2],)]
+
+
+if __name__ == "__main__":
+ local_path, base_path, remote_path, merged_path = sys.argv[1:5]
+
+ repo_path = lib.repo_path()
+ repo = pygit2.Repository(repo_path)
+ index = lib.git_sort.SortIndex(repo)
+
+ # (before, inside, after, set(inside),)
+ local, base, remote = (
+ (s[0], s[1], s[2], set([series_conf.firstword(l)
+ for l in s[1]
+ if series_conf.filter_patches(l)]),)
+ for s in [
+ series_conf.split(open(s_path))
+ for s_path in (local_path, base_path, remote_path,)
+ ]
+ )
+
+ added = remote[3] - base[3]
+ removed = base[3] - remote[3]
+ moved = set(lib.list_moved_patches(base[1], remote[1]))
+
+ if added or removed:
+ print("%d commits added, %d commits removed from base to remote." %
+ (len(added), len(removed),))
+ if moved:
+ print("%d commits changed section from base to remote." % (len(moved),))
+ dup_add_nb = len(local[3] & added)
+ dup_rem_nb = len(removed) - len(local[3] & removed)
+ if dup_add_nb:
+ print("Warning: %d commits added in remote and already present in "
+ "local, ignoring." % (dup_add_nb,))
+ if dup_rem_nb:
+ print("Warning: %d commits removed in remote but not present in local, "
+ "ignoring." % (dup_rem_nb,))
+
+ filter_set = removed | moved
+ inside = [line for line in local[1] if not line.strip() in filter_set]
+ try:
+ input_entries = lib.parse_inside(index, inside, False)
+ except exc.KSError as err:
+ print("Error: %s" % (err,), file=sys.stderr)
+ sys.exit(1)
+ for name in added - local[3] | moved:
+ entry = lib.InputEntry("\t%s\n" % (name,))
+ entry.from_patch(index, name, lib.git_sort.oot, True)
+ input_entries.append(entry)
+
+ try:
+ sorted_entries = lib.series_sort(index, input_entries)
+ except exc.KSError as err:
+ print("Error: %s" % (err,), file=sys.stderr)
+ sys.exit(1)
+ output = lib.series_format(sorted_entries)
+
+ # If there were no conflicts outside of the sorted section, then it would be
+ # sufficient to splice the sorted result into local
+ splice(local, output, merged_path)
+
+ # ... but we don't know, so splice them all and call `merge` so that the
+ # lines outside the sorted section get conflict markers if needed
+ splice(base, output, base_path)
+ splice(remote, output, remote_path)
+
+ result = 0
+ try:
+ cmd = "merge"
+ retval = subprocess.call([cmd, merged_path, base_path, remote_path])
+ except OSError as e:
+ if e.errno == 2:
+ print("Error: could not run `%s`. Please make sure it is "
+ "installed (from the \"rcs\" package)." % (cmd,),
+ file=sys.stderr)
+ sys.exit(1)
+ else:
+ raise
+ if retval != 0:
+ name = "%s.merged%d" % (merged_path, os.getpid(),)
+ print("Warning: conflicts outside of sorted section, leaving merged "
+ "result in %s" % (name,))
+ shutil.copy(merged_path, name)
+ result = 1
+
+ to_update = filter(lib.tag_needs_update, input_entries)
+ try:
+ lib.update_tags(index, to_update)
+ except exc.KSError as err:
+ print("Error: %s" % (err,), file=sys.stderr)
+ result = 1
+ else:
+ for entry in to_update:
+ subprocess.check_call(["git", "add", entry.name])
+
+ sys.exit(result)
diff --git a/scripts/git_sort/patch.py b/scripts/git_sort/patch.py
new file mode 100644
index 0000000000..1890d4fb91
--- /dev/null
+++ b/scripts/git_sort/patch.py
@@ -0,0 +1,123 @@
+#!/usr/bin/python3
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2018 SUSE LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
+# USA.
+
+import io
+import re
+import sys
+
+import exc
+
+
+class Patch(object):
+ # This pattern was copied from quilt/scripts/patchfns.in:patch_header() in
+ # the quilt sources
+ break_matcher = re.compile(b"(---|\*\*\*|Index:)[ \t][^ \t]|^diff -")
+
+ def __init__(self, f):
+ assert(f.tell() == 0)
+ assert(isinstance(f, io.BufferedIOBase)) # binary (bytes) io object
+
+ self.modified = False
+ self.f = f
+ self.head = []
+ self.body = b""
+ for line in f:
+ if self.break_matcher.match(line):
+ self.body = line
+ break
+ self.head.append(line.decode())
+
+
+ def __del__(self):
+ self.writeback()
+
+
+ def __enter__(self):
+ return self
+
+
+ def __exit__(self, *args):
+ self.writeback()
+
+
+ def writeback(self):
+ if not self.modified:
+ return
+
+ self.body = self.body + self.f.read()
+ self.f.seek(0)
+ self.f.writelines([line.encode() for line in self.head])
+ self.f.write(self.body)
+ self.f.truncate()
+
+ self.modified = False
+
+
+ def get(self, tag):
+ """
+ tag does not contain the terminal ": ". It is case insensitive.
+
+ Returns a list with the value for each instance of the tag.
+ """
+ start = "%s: " % (tag.lower(),)
+ return [line[len(start):].strip()
+ for line in self.head
+ if line.lower().startswith(start)]
+
+
+ def remove(self, tag):
+ """
+ Removes all instances of the tag.
+
+ tag does not contain the terminal ": ". It is case insensitive.
+ """
+ if not self.f.writable():
+ raise exc.KSException("Modification of read-only Patch")
+
+ if len(self.get(tag)):
+ self.modified = True
+ start = "%s: " % (tag.lower(),)
+ self.head = [line
+ for line in self.head
+ if not line.lower().startswith(start)]
+
+
+ def change(self, tag, value):
+ """
+ Changes the value of all instances of the tag.
+
+ tag does not contain the terminal ": ". It is case insensitive.
+ """
+ if not self.f.writable():
+ raise exc.KSException("Modification of read-only Patch")
+
+ if len(self.get(tag)):
+ self.modified = True
+ start = "%s: " % (tag.lower(),)
+
+ def change_value(line):
+ if line.lower().startswith(start):
+ return "%s%s\n" % (line[:len(start)], value.strip(),)
+ else:
+ return line
+
+ self.head = [change_value(line) for line in self.head]
+ else:
+ raise KeyError("Tag \"%s\" not found" % (tag,))
diff --git a/scripts/git_sort/patch_body.awk b/scripts/git_sort/patch_body.awk
new file mode 100644
index 0000000000..ea04ee5729
--- /dev/null
+++ b/scripts/git_sort/patch_body.awk
@@ -0,0 +1,28 @@
+#!/usr/bin/awk -f
+
+# Copyright (C) 2018 SUSE LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
+# USA.
+
+# from quilt's patchfns
+
+!body && /^(---|\*\*\*|Index:)[ \t][^ \t]|^diff -/ {
+ body = 1
+}
+
+body {
+ print
+}
diff --git a/scripts/git_sort/patch_header.awk b/scripts/git_sort/patch_header.awk
new file mode 100644
index 0000000000..008161975a
--- /dev/null
+++ b/scripts/git_sort/patch_header.awk
@@ -0,0 +1,29 @@
+#!/usr/bin/awk -f
+
+# Copyright (C) 2018 SUSE LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
+# USA.
+
+# from quilt's patchfns
+
+/^(---|\*\*\*|Index:)[ \t][^ \t]|^diff -/ {
+ exit
+}
+
+{
+ print
+}
+
diff --git a/scripts/git_sort/pre-commit.sh b/scripts/git_sort/pre-commit.sh
new file mode 100755
index 0000000000..b6ecb8f7e0
--- /dev/null
+++ b/scripts/git_sort/pre-commit.sh
@@ -0,0 +1,97 @@
+#!/bin/bash
+
+# Copyright (C) 2018 SUSE LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
+# USA.
+
+_libdir=$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")
+
+sorted_section_changed () {
+ status=$(git diff-index --cached --name-status --diff-filter=AM HEAD \
+ -- series.conf | awk '{print $1}')
+ case "$status" in
+ "")
+ return 1
+ ;;
+ A)
+ return 0
+ ;;
+ M)
+ diff -q <(
+ git cat-file blob HEAD:series.conf |
+ "$_libdir"/series_conf.py
+ ) <(
+ git cat-file blob :series.conf |
+ "$_libdir"/series_conf.py
+ ) > /dev/null
+ if [ $? -eq 1 ]; then
+ return 0
+ else
+ return 1
+ fi
+ ;;
+ esac
+
+ echo "Error detecting changes in series.conf sorted section." \
+ > /dev/stderr
+ return 2
+}
+
+sorted_patches_changed () {
+ common=$(comm -12 <(
+ git diff-index --cached --name-only --diff-filter=AMD HEAD | sort
+ ) <(
+ git cat-file blob :series.conf |
+ "$_libdir"/series_conf.py --name-only | sort
+ ) | wc -l)
+
+ if ! [ "$common" -eq "$common" ] 2>/dev/stderr; then
+ # not an integer
+ echo "Error detecting changes in series.conf sorted patches." \
+ > /dev/stderr
+ return 2
+ fi
+
+ if [ $common -gt 0 ]; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+if sorted_section_changed || sorted_patches_changed; then
+ trap '[ -n "$tmpdir" -a -d "$tmpdir" ] && rm -r "$tmpdir"' EXIT
+ tmpdir=$(mktemp --directory --tmpdir gs_pc.XXXXXXXXXX)
+
+ # series_sort.py should examine the patches in the index, not the
+ # working tree. Check them out.
+ git cat-file blob :series.conf |
+ "$_libdir"/series_conf.py --name-only |
+ git checkout-index --quiet --prefix="$tmpdir/" --stdin
+
+ git cat-file blob :series.conf |
+ "$_libdir"/series_sort.py --check --prefix="$tmpdir"
+ retval=$?
+
+ rm -r "$tmpdir"
+ unset tmpdir
+ trap - EXIT
+
+ if [ $retval -ne 0 ]; then
+ echo "\"sorted patches\" section of series.conf failed check. Please read \"scripts/git_sort/README.md\", in particular the section \"Refreshing the order of patches in series.conf\"."
+ exit 1
+ fi
+fi
diff --git a/scripts/git_sort/qcp.py b/scripts/git_sort/qcp.py
new file mode 100755
index 0000000000..1b69943372
--- /dev/null
+++ b/scripts/git_sort/qcp.py
@@ -0,0 +1,138 @@
+#!/usr/bin/python3
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2018 SUSE LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
+# USA.
+
+import argparse
+import io
+import os
+import os.path
+import pygit2
+import shutil
+import subprocess
+import sys
+import tempfile
+
+import exc
+import lib
+from patch import Patch
+import series_conf
+
+
+def format_import(references, tmpdir, dstdir, rev, poi=[]):
+ assert len(poi) == 0 # todo
+ args = ("git", "format-patch", "--output-directory", tmpdir, "--notes",
+ "--max-count=1", "--subject-prefix=", "--no-numbered", rev,)
+ src = subprocess.check_output(args).decode().strip()
+ # remove number prefix
+ name = os.path.basename(src)[5:]
+ dst = os.path.join(dstdir, name)
+ if os.path.exists(os.path.join("patches", dst)):
+ name = "%s-%s.patch" % (name[:-6], rev[:8],)
+ dst = os.path.join(dstdir, name)
+
+ subprocess.check_call((os.path.join(lib.libdir(), "clean_header.sh"),
+ "--commit=%s" % rev, "--reference=%s" % references,
+ src,))
+ subprocess.check_call(("quilt", "import", "-P", dst, src,))
+ # This will remind the user to run refresh_patch.sh
+ lib.touch(".pc/%s~refresh" % (dst,))
+
+ return 0
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ description="Generate a patch from a git commit and import it into quilt.")
+ parser.add_argument("-r", "--references",
+ help="bsc# or FATE# number used to tag the patch file.")
+ parser.add_argument("-d", "--destination",
+ help="Destination \"patches.xxx\" directory.")
+ parser.add_argument("-f", "--followup", action="store_true",
+ help="Reuse references and destination from the patch "
+ "containing the commit specified in the first "
+ "\"Fixes\" tag in the commit log of the commit to "
+ "import.")
+ parser.add_argument("rev", help="Upstream commit id to import.")
+ parser.add_argument("poi", help="Limit patch to specified paths.",
+ nargs=argparse.REMAINDER)
+ args = parser.parse_args()
+
+ if not (args.references and args.destination or args.followup):
+ print("Error: you must specify --references and --destination or "
+ "--followup.", file=sys.stderr)
+ sys.exit(1)
+
+ if (args.references or args.destination) and args.followup:
+ print("Warning: --followup overrides information from --references and "
+ "--destination.", file=sys.stderr)
+
+ if not lib.check_series():
+ sys.exit(1)
+
+ repo_path = lib.repo_path()
+ if "GIT_DIR" not in os.environ:
+ os.environ["GIT_DIR"] = repo_path
+ repo = pygit2.Repository(repo_path)
+ try:
+ commit = repo.revparse_single(args.rev)
+ except ValueError:
+ print("Error: \"%s\" is not a valid revision." % (args.rev,),
+ file=sys.stderr)
+ sys.exit(1)
+ except KeyError:
+ print("Error: revision \"%s\" not found in \"%s\"." %
+ (args.rev, repo_path), file=sys.stderr)
+ sys.exit(1)
+
+ if args.followup:
+ with Patch(io.BytesIO(commit.message.encode())) as patch:
+ try:
+ fixes = series_conf.firstword(patch.get("Fixes")[0])
+ except IndexError:
+ print("Error: no \"Fixes\" tag found in commit \"%s\"." %
+ (str(commit.id)[:12]), file=sys.stderr)
+ sys.exit(1)
+ fixes = str(repo.revparse_single(fixes).id)
+
+ series = open("series")
+ cwd = os.getcwd()
+ os.chdir("patches")
+ try:
+ with series_conf.find_commit(fixes, series) as (name, patch,):
+ destination = os.path.dirname(name)
+ references = " ".join(patch.get("References"))
+ except exc.KSNotFound:
+ print("Error: no patch found which contains commit %s." %
+ (fixes[:12],), file=sys.stderr)
+ sys.exit(1)
+ os.chdir(cwd)
+
+ print("Info: using references \"%s\" from patch \"%s\" which contains "
+ "commit %s." % (references, name, fixes[:12]))
+ else:
+ destination = args.destination
+ references = args.references
+
+ tmpdir = tempfile.mkdtemp(prefix="qcp.")
+ try:
+ result = format_import(references, tmpdir, destination, str(commit.id),
+ args.poi)
+ finally:
+ shutil.rmtree(tmpdir)
+ sys.exit(result)
diff --git a/scripts/git_sort/qdupcheck.py b/scripts/git_sort/qdupcheck.py
new file mode 100755
index 0000000000..d8c7920feb
--- /dev/null
+++ b/scripts/git_sort/qdupcheck.py
@@ -0,0 +1,76 @@
+#!/usr/bin/python3
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2018 SUSE LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
+# USA.
+
+import argparse
+import os
+import os.path
+import pygit2
+import subprocess
+import sys
+
+import exc
+import lib
+import series_conf
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ description="Check if a commit id is already backported by a patch in "
+ "series.conf.")
+ parser.add_argument("rev", help="Upstream commit id.")
+ args = parser.parse_args()
+
+ if not lib.check_series():
+ sys.exit(1)
+
+ repo_path = lib.repo_path()
+ repo = pygit2.Repository(repo_path)
+ try:
+ commit = str(repo.revparse_single(args.rev).id)
+ except KeyError:
+ print("Error: revision \"%s\" not found in \"%s\"." %
+ (args.rev, repo_path), file=sys.stderr)
+ sys.exit(1)
+
+ series = open("series")
+ cwd = os.getcwd()
+ os.chdir("patches")
+ try:
+ with series_conf.find_commit(commit, series) as (name, patch,):
+ print("Commit %s already present in patch\n\t%s" % (
+ commit[:12], name,))
+ references = " ".join(patch.get("References"))
+ if references:
+ print("for\n\t%s" % (references,))
+
+ try:
+ top = subprocess.check_output(
+ ("quilt", "--quiltrc", "-", "top",),
+ cwd=cwd, stderr=subprocess.STDOUT,).decode().strip()
+ except subprocess.CalledProcessError as err:
+ if err.output.decode() == "No patches applied\n":
+ top = None
+ else:
+ raise
+ if top == name:
+ print("This is the top patch.")
+ sys.exit(1)
+ except exc.KSNotFound:
+ pass
diff --git a/scripts/git_sort/qgoto.py b/scripts/git_sort/qgoto.py
new file mode 100755
index 0000000000..a164432dd2
--- /dev/null
+++ b/scripts/git_sort/qgoto.py
@@ -0,0 +1,62 @@
+#!/usr/bin/python3
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2018 SUSE LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
+# USA.
+
+import argparse
+import os
+import subprocess
+import sys
+
+import exc
+import lib
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ description="Print the quilt push or pop command required to reach the "
+ "position where the specified commit should be imported.")
+ parser.add_argument("rev", help="Upstream commit id.")
+ args = parser.parse_args()
+
+ if not lib.check_series():
+ sys.exit(1)
+
+ try:
+ top = subprocess.check_output(
+ ("quilt", "--quiltrc", "-", "top",),
+ stderr=subprocess.STDOUT).decode().strip()
+ except subprocess.CalledProcessError as err:
+ if err.output.decode() == "No patches applied\n":
+ top = None
+ else:
+ raise
+
+ series = open("series")
+ os.chdir("patches")
+
+ try:
+ (name, delta,) = lib.sequence_insert(series, args.rev, top)
+ except exc.KSException as err:
+ print("Error: %s" % (err,), file=sys.stderr)
+ sys.exit(1)
+
+ if delta > 0:
+ print("push %d" % (delta,))
+ elif delta < 0:
+ print("pop %d" % (-1 * delta,))
diff --git a/scripts/git_sort/quilt-mode.sh b/scripts/git_sort/quilt-mode.sh
new file mode 100644
index 0000000000..b6ab09c380
--- /dev/null
+++ b/scripts/git_sort/quilt-mode.sh
@@ -0,0 +1,412 @@
+# Contains a set of shell functions to assist in backporting upstream commits
+# to SUSE's kernel-source.git.
+
+# Copyright (C) 2018 SUSE LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
+# USA.
+
+_libdir=$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")
+. "$_libdir"/lib.sh
+. "$_libdir"/lib_tag.sh
+
+alias q=quilt
+
+_switcheroo () {
+ if [ -r series ] &&
+ head -n1 series | grep -qv "^# Kernel patches configuration file$" &&
+ [ -r patches/series.conf ] &&
+ head -n1 patches/series.conf | grep -q "^# Kernel patches configuration file$"; then
+ ln -sf patches/series.conf series
+ fi
+}
+_switcheroo
+
+
+qfmake () {
+ local i
+ local doit=1
+ while true ; do
+ case "$1" in
+ -h|--help)
+ echo "Usage: ${FUNCNAME[1]} [options] [extra arguments passed to make]"
+ echo ""
+ echo "Build targets that have been modified by top patch (using a simple heuristic)."
+ echo ""
+ echo "Options:"
+ echo " -x, --exclude <target|dir> Exclude target or targets under directory from automatic building."
+ echo " -X, --no-exclude <target|dir> Remove previously set exclusion."
+ echo " -r, --reset Reset exclusion list."
+ echo " -s, --show Show exclusion list."
+ echo " -h, --help Print this help"
+ return
+ ;;
+ -x|--exclude)
+ if printf "%s\n" "${qfm_excludes[@]}" | grep -qv "$2"; then
+ qfm_excludes+=("$2")
+ fi
+ shift
+ doit=
+ ;;
+ -X|--no-exclude)
+ for i in $(seq 0 $((${#qfm_excludes[@]} - 1))); do
+ if [ "${qfm_excludes[i]}" = "$2" ]; then
+ qfm_excludes=("${qfm_excludes[@]:0:$i}" "${qfm_excludes[@]:$((i + 1))}")
+ break
+ fi
+ done
+ shift
+ doit=
+ ;;
+ -r|--reset)
+ qfm_excludes=()
+ return
+ ;;
+ -s|--show)
+ if [ "${#qfm_excludes[@]}" -gt 0 ]; then
+ printf "%s\n" "${qfm_excludes[@]}"
+ fi
+ return
+ ;;
+ *)
+ break
+ ;;
+ esac
+ shift
+ done
+
+ if [ -z "$doit" ]; then
+ return
+ fi
+
+ local targets new_target
+ for new_target in "$@" $(quilt --quiltrc - files | sed -n -e 's/.c$/.o/p'); do
+ local exclude
+ local add=1
+ for exclude in "${qfm_excludes[@]}"; do
+ # new_target is under exclude
+ if echo "$new_target" | grep -q '^'"$exclude"; then
+ add=
+ break
+ fi
+ done
+ if [ -z "$add" ]; then
+ continue
+ fi
+
+ # filter targets to remove elements that are included under
+ # other elements
+ for i in $(seq 0 $((${#targets[@]} - 1))); do
+ local target=${targets[i]}
+ # new_target is under target
+ if echo "$new_target" | grep -q '^'"$target"; then
+ add=
+ break
+ # target is under new_target
+ elif echo "$target" | grep -q '^'"$new_target"; then
+ # remove targets[i]
+ targets=("${targets[@]:0:$i}" "${targets[@]:$((i + 1))}")
+ fi
+ done
+ if [ "$add" ]; then
+ targets+=("$new_target")
+ fi
+ done
+
+ if [ ${#targets[@]} -gt 0 ]; then
+ make "${targets[@]}"
+ fi
+}
+
+
+qf1 () {
+ cat $(quilt --quiltrc "$_libdir"/quiltrc.qf1 top)
+}
+
+
+qgoto () {
+ if command=$("$_libdir"/qgoto.py "$@") && [ "$command" ]; then
+ quilt $command
+ fi
+}
+
+
+qdupcheck () {
+ "$_libdir"/qdupcheck.py "$@"
+}
+
+
+qdiffcheck () {
+ local git_dir
+ git_dir=$("$_libdir"/../linux_git.sh) || return 1
+ local rev=$(tag_get git-commit < $(q top) | GIT_DIR=$git_dir expand_git_ref)
+ interdiff <(GIT_DIR=$git_dir $_libdir/git-f1 $rev) $(q top)
+}
+
+
+#unset _references _destination
+qcp () {
+ # capture and save some options
+ local r_set d_set
+ local args
+ while [ "$1" ] ; do
+ case "$1" in
+ -r|--references)
+ _references=$2
+ args+=($1 "$2")
+ r_set=1
+ shift
+ ;;
+ -d|--destination)
+ _destination=$2
+ args+=($1 "$2")
+ d_set=1
+ shift
+ ;;
+ *)
+ args+=($1)
+ ;;
+ esac
+ shift
+ done
+
+ if [ -z "$r_set" -a "$_references" ]; then
+ args=(-r "$_references" "${args[@]}")
+ fi
+
+ if [ -z "$d_set" -a "$_destination" ]; then
+ args=(-d "$_destination" "${args[@]}")
+ fi
+
+ "$_libdir"/qcp.py "${args[@]}"
+}
+
+
+# Save -r and -d for later use by qcp
+_saveopts () {
+ local result=$(getopt -o hr:d: --long help,references:,destination: -n "${BASH_SOURCE[0]}:${FUNCNAME[0]}()" -- "$@")
+ if [ $? != 0 ]; then
+ echo "Error: getopt error" >&2
+ return 1
+ fi
+
+ eval set -- "$result"
+
+ while true ; do
+ case "$1" in
+ -h|--help)
+ echo "Usage: ${FUNCNAME[1]} [options]"
+ echo ""
+ echo "Options:"
+ echo " -r, --references <value> bsc# or FATE# number used to tag the patch file."
+ echo " -d, --destination <dir> Destination \"patches.xxx\" directory."
+ echo " -h, --help Print this help"
+ return 1
+ ;;
+ -r|--references)
+ _references=$2
+ shift
+ ;;
+ -d|--destination)
+ _destination=$2
+ shift
+ ;;
+ --)
+ shift
+ break
+ ;;
+ *)
+ echo "Error: could not parse arguments" >&2
+ return 1
+ ;;
+ esac
+ shift
+ done
+}
+
+
+#unset series
+qadd () {
+ local git_dir
+ git_dir=$("$_libdir"/../linux_git.sh) || return 1
+
+ if [ $BASH_SUBSHELL -gt 0 ]; then
+ echo "Error: it looks like this function is being run in a subshell. It will not be effective because its purpose is to set an environment variable. You could run it like this instead: \`${FUNCNAME[0]} <<< \$(<cmd>)\`." > /dev/stderr
+ return 1
+ fi
+
+ if ! _saveopts "$@"; then
+ return
+ fi
+
+ local _series=$(grep .)
+
+ mapfile -t series <<< "$(
+ (
+ [ ${#series[@]} -gt 0 ] && printf "%s\n" "${series[@]}"
+ [ -n "$_series" ] && echo "$_series"
+ ) | GIT_DIR=$git_dir "$_libdir"/git-sort
+ )"
+
+ if [ -z "${series[0]}" ]; then
+ unset series[0]
+ fi
+}
+
+
+qedit () {
+ local git_dir
+ git_dir=$("$_libdir"/../linux_git.sh) || return 1
+
+ if [ "${tmpfile+set}" = "set" ]; then
+ local _tmpfile=$tmpfile
+ fi
+
+ trap '[ -n "$tmpfile" -a -f "$tmpfile" ] && rm "$tmpfile"' EXIT
+ tmpfile=$(mktemp --tmpdir qedit.XXXXXXXXXX)
+ [ ${#series[@]} -gt 0 ] && printf "%s\n" "${series[@]}" > "$tmpfile"
+
+ ${EDITOR:-${VISUAL:-vi}} "$tmpfile"
+
+ mapfile -t series <<< "$(grep . "$tmpfile" |
+ GIT_DIR=$git_dir $_libdir/git-sort)"
+
+ if [ -z "${series[0]}" ]; then
+ unset series[0]
+ fi
+
+ rm "$tmpfile"
+ if [ "${_tmpfile+set}" = "set" ]; then
+ tmpfile=$_tmpfile
+ else
+ unset tmpfile
+ fi
+ trap - EXIT
+}
+
+
+qcat () {
+ [ ${#series[@]} -gt 0 ] && printf "%s\n" "${series[@]}"
+}
+
+
+_strip_begin () {
+ sed -re 's/^[[:space:]]+//'
+}
+
+
+qnext () {
+ [ ${#series[@]} -gt 0 ] && echo "${series[0]}" | _strip_begin
+}
+
+
+qskip () {
+ if [ ${#series[@]} -gt 0 ]; then
+ echo "Skipped: $(echo "${series[0]}" | _strip_begin)"
+ series=("${series[@]:1}")
+ if [ ${#series[@]} -gt 0 ]; then
+ echo "Next: $(echo "${series[0]}" | _strip_begin)"
+ else
+ echo "No more entries"
+ fi
+ else
+ return 1
+ fi
+}
+
+
+_stablecheck () {
+ local entry=$1
+ local patch=$2
+ local git_dir
+ git_dir=$("$_libdir"/../linux_git.sh) || return 1
+
+ local rev=$(echo "$patch" | awk '{
+ match($0, "patch-([[:digit:]]+\\.[[:digit:]]+)\\.([[:digit:]]+)(-([[:digit:]]+))?", a)
+ if (a[3]) {
+ print "v" a[1] "." a[2] "..v" a[1] "." a[4]
+ } else {
+ print "v" a[1] "..v" a[1] "." a[2]
+ }
+ }')
+ local output=$(GIT_DIR=$git_dir git log "$rev" --pretty=tformat:%H --grep "$entry")
+ local nb=$(echo "$output" | wc -l)
+ if [ "$output" -a $nb -eq 1 ]; then
+ echo -en "This commit was backported to a stable branch as\n\t"
+ GIT_DIR=$git_dir $_libdir/git-overview -m "$output"
+ echo
+ elif [ $nb -gt 1 ]; then
+ echo "Warning: $nb potential stable commits found:" > /dev/stderr
+ GIT_DIR=$git_dir git log "$rev" --oneline --grep "$entry" > /dev/stderr
+ else
+ echo "Warning: no potential stable commit found." > /dev/stderr
+ fi
+}
+
+
+qdoit () {
+ local entry=$(qnext | awk '{print $1}')
+ while [ "$entry" ]; do
+ local command
+ if ! command=$("$_libdir"/qgoto.py "$entry"); then
+ echo "Error: qgoto.py exited with an error" > /dev/stderr
+ return 1
+ fi
+ while [ "$command" ]; do
+ if ! quilt $command; then
+ echo "\`quilt $command\` did not complete sucessfully. Please examine the situation." > /dev/stderr
+ return 1
+ fi
+
+ if ! command=$("$_libdir"/qgoto.py "$entry"); then
+ echo "Error: qgoto.py exited with an error" > /dev/stderr
+ return 1
+ fi
+ done
+
+ local output
+ if ! output=$(qdupcheck $entry); then
+ echo
+ echo "$output"
+ echo
+ local patch=$(echo "$output" | awk '/patches.kernel.org\/patch-/ {print $1}')
+ if [ "$patch" ]; then
+ _stablecheck "$entry" "$patch"
+ fi
+ echo "The next commit is already present in the series. Please examine the situation." > /dev/stderr
+ return 1
+ fi
+
+ if ! qcp $entry; then
+ echo "\`qcp $entry\` did not complete sucessfully. Please examine the situation." > /dev/stderr
+ return 1
+ fi
+ series=("${series[@]:1}")
+
+ if ! quilt push; then
+ echo "The last commit did not apply successfully. Please examine the situation." > /dev/stderr
+ return 1
+ fi
+
+ ./refresh_patch.sh
+
+ if ! qfmake "$@"; then
+ echo "The last applied commit results in a build failure. Please examine the situation." > /dev/stderr
+ return 1
+ fi
+
+ entry=$(qnext | awk '{print $1}')
+ done
+}
diff --git a/scripts/git_sort/quiltrc.qf1 b/scripts/git_sort/quiltrc.qf1
new file mode 100644
index 0000000000..d272df16ee
--- /dev/null
+++ b/scripts/git_sort/quiltrc.qf1
@@ -0,0 +1 @@
+QUILT_PATCHES_PREFIX=yes
diff --git a/scripts/git_sort/refs_in_series.sh b/scripts/git_sort/refs_in_series.sh
new file mode 100755
index 0000000000..43ddc05167
--- /dev/null
+++ b/scripts/git_sort/refs_in_series.sh
@@ -0,0 +1,103 @@
+#!/bin/bash
+
+# Copyright (C) 2018 SUSE LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
+# USA.
+
+# Check if a commit is already part of a patch in SUSE's kernel-source.git
+# Useful to check if a list of commits have already been backported.
+
+
+progname=$(basename "$0")
+libdir=$(dirname "$(readlink -f "$0")")
+git_dir=$("$libdir"/../linux_git.sh) || exit 1
+
+usage () {
+ echo "Usage: $progname [paths of interest...]"
+ echo ""
+ echo "Read git references from stdin and check if a patch (present in "
+ echo "\"series.conf\") applies the part of the related commit that is below"
+ echo "the \"paths of interest\"."
+ echo ""
+}
+
+result=$(getopt -o h --long help -n "$progname" -- "$@")
+
+if [ $? != 0 ]; then
+ echo "Error: getopt error" >&2
+ exit 1
+fi
+
+eval set -- "$result"
+
+while true ; do
+ case "$1" in
+ -h|--help)
+ usage
+ exit 0
+ ;;
+ --)
+ shift
+ break
+ ;;
+ *)
+ echo "Error: could not parse arguments" >&2
+ exit 1
+ ;;
+ esac
+ shift
+done
+
+if [ ! -r "series.conf" ]; then
+ echo "Error: \"series.conf\" file could not be read. Are you at the base of a kernel-source.git tree?" > /dev/stderr
+ exit 1
+fi
+
+for arg in "$@"; do
+ includeargs+="--include=\"$arg\" "
+done
+
+series=$(
+ while read file rest; do
+ if [ -r "$file" ]; then
+ echo "$file"
+ fi
+ done < "series.conf"
+)
+
+while read line; do
+ set $line
+ ref=$1
+ orig_stat_nb=$(GIT_DIR=$git_dir git format-patch --stdout -n1 $ref | eval git apply --numstat "$includeargs" | wc -l)
+ found=
+ while read patch; do
+ if [ ! "$patch" ]; then
+ continue
+ fi
+ patch_stat_nb=$(eval git apply --numstat "$includeargs" < "$patch" | wc -l)
+ if grep -q "$patch" <<< "$series" && [ "$orig_stat_nb" = "$patch_stat_nb" ]; then
+ found=1
+ break
+ fi
+ done <<< "$(git grep -li "^git-commit: $ref")"
+
+ if [ "$found" ]; then
+ echo -n "* "
+ else
+ echo -n " "
+ fi
+ echo "$line"
+done
diff --git a/scripts/git_sort/sequence-insert.py b/scripts/git_sort/sequence-insert.py
new file mode 100755
index 0000000000..78f753912a
--- /dev/null
+++ b/scripts/git_sort/sequence-insert.py
@@ -0,0 +1,51 @@
+#!/usr/bin/python3
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2018 SUSE LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
+# USA.
+
+"""
+When we want to backport a specific commit at its right position in the sorted
+sub-series, it is most efficient to use sequence_patch.sh to expand the tree up
+to the patch just before where the new commit will be added. The current script
+prints out which patch that is. Use in conjunction with sequence-patch.sh:
+ kernel-source$ ./scripts/sequence-patch.sh $(./scripts/git_sort/sequence-insert.py 5c8227d0d3b1)
+"""
+
+import argparse
+import os
+import sys
+
+import exc
+import lib
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ description="Print the name of the patch over which the specified "
+ "commit should be imported.")
+ parser.add_argument("rev", help="Upstream commit id.")
+ args = parser.parse_args()
+
+ try:
+ (name, delta,) = lib.sequence_insert(open("series.conf"), args.rev,
+ None)
+ except exc.KSException as err:
+ print("Error: %s" % (err,), file=sys.stderr)
+ sys.exit(1)
+
+ print(name)
diff --git a/scripts/git_sort/series_conf.py b/scripts/git_sort/series_conf.py
new file mode 100755
index 0000000000..86cb451c4b
--- /dev/null
+++ b/scripts/git_sort/series_conf.py
@@ -0,0 +1,169 @@
+#!/usr/bin/python3
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2018 SUSE LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
+# USA.
+
+# This script is used by the commit hook to detect if there are changes in the
+# sorted section. Developers may commit to kernel-source without having changed
+# the sorted section and used the git-sort tools, therefore without having the
+# pygit2 module available. Therefore, this script should avoid a dependency on
+# pygit2 since it's not present on a default python install and we don't want to
+# force developers to install pygit2 just to commit unrelated changes to
+# kernel-source.
+
+import argparse
+import contextlib
+import errno
+import sys
+
+import exc
+from patch import Patch
+
+
+start_text = "sorted patches"
+end_text = "end of sorted patches"
+
+
+def split(series):
+ before = []
+ inside = []
+ after = []
+
+ whitespace = []
+ comments = []
+
+ current = before
+ for line in series:
+ l = line.strip()
+
+ if l == "":
+ if comments:
+ current.extend(comments)
+ comments = []
+ whitespace.append(line)
+ continue
+ elif l.startswith("#"):
+ if whitespace:
+ current.extend(whitespace)
+ whitespace = []
+ comments.append(line)
+
+ if current == before and l.lower() == "# %s" % (start_text,):
+ current = inside
+ elif current == inside and l.lower() == "# %s" % (end_text,):
+ current = after
+ else:
+ if comments:
+ current.extend(comments)
+ comments = []
+ if whitespace:
+ current.extend(whitespace)
+ whitespace = []
+ current.append(line)
+ if comments:
+ current.extend(comments)
+ comments = []
+ if whitespace:
+ current.extend(whitespace)
+ whitespace = []
+
+ if current is before:
+ raise exc.KSNotFound("Sorted subseries not found.")
+
+ current.extend(comments)
+ current.extend(whitespace)
+
+ return (before, inside, after,)
+
+
+def filter_patches(line):
+ line = line.strip()
+
+ if line == "" or line.startswith(("#", "-", "+",)):
+ return False
+ else:
+ return True
+
+
+def firstword(value):
+ return value.split(None, 1)[0]
+
+
+filter_series = lambda lines : [firstword(line) for line in lines
+ if filter_patches(line)]
+
+
+@contextlib.contextmanager
+def find_commit(commit, series, mode="rb"):
+ """
+ commit: unabbreviated git commit id
+ series: list of lines from series.conf
+ mode: mode to open the patch files in, should be "rb" or "r+b"
+
+ Caller must chdir to where the entries in series can be found.
+
+ Returns patch.Patch instances
+ """
+ for name in filter_series(series):
+ patch = Patch(open(name, mode=mode))
+ found = False
+ if commit in [firstword(value)
+ for value in patch.get("Git-commit")
+ if value]:
+ found = True
+ yield name, patch
+ patch.writeback()
+ if found:
+ return
+ raise exc.KSNotFound()
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ description="Extract the sorted patches section of a series.conf file.")
+ parser.add_argument("-n", "--name-only", action="store_true",
+ help="Print only patch names.")
+ parser.add_argument("series", nargs="?", metavar="series.conf",
+ help="series.conf file. Default: read input from stdin.")
+ args = parser.parse_args()
+
+ if args.series is not None:
+ f = open(args.series)
+ else:
+ f = sys.stdin
+ lines = f.readlines()
+
+ try:
+ before, inside, after = split(lines)
+ except exc.KSNotFound:
+ pass
+ else:
+ if args.name_only:
+ inside = filter_series(inside)
+ inside = [line + "\n" for line in inside]
+
+ try:
+ sys.stdout.writelines(inside)
+ # Avoid an unsightly error that may occur when not all output is
+ # read:
+ # Exception ignored in: <_io.TextIOWrapper name='<stdout>' mode='w' encoding='UTF-8'>
+ # BrokenPipeError: [Errno 32] Broken pipe
+ sys.stdout.flush()
+ except BrokenPipeError:
+ sys.stderr.close()
+ sys.exit()
diff --git a/scripts/git_sort/series_insert.py b/scripts/git_sort/series_insert.py
new file mode 100755
index 0000000000..d55d7d0d79
--- /dev/null
+++ b/scripts/git_sort/series_insert.py
@@ -0,0 +1,143 @@
+#!/usr/bin/python3
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2018 SUSE LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
+# USA.
+
+"""
+Script to insert new patches in series.conf according to the upstream order of
+commits that the patches backport.
+"""
+
+import argparse
+import collections
+import pygit2
+import sys
+
+import exc
+import git_sort
+import lib
+import series_conf
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ description="Insert new patches in series.conf according to the "
+ "upstream order of commits that the patches backport.")
+ parser.add_argument("patches", nargs="+", metavar="PATCH",
+ help="Patch file to insert in series.conf")
+ args = parser.parse_args()
+
+ repo_path = lib.repo_path()
+ repo = pygit2.Repository(repo_path)
+ index = lib.git_sort.SortIndex(repo)
+
+ try:
+ with open("series.conf") as f:
+ lines = f.readlines()
+ except IOError as err:
+ print("Error: could not open series file, %s" % (err,), file=sys.stderr)
+ sys.exit(1)
+
+ try:
+ before, inside, after = series_conf.split(lines)
+ current_entries = lib.parse_inside(index, inside, False)
+ except exc.KSError as err:
+ print("Error: %s" % (err,), file=sys.stderr)
+ sys.exit(1)
+
+ if list(filter(lib.tag_needs_update, current_entries)):
+ print("Error: Some Git-repo tags for patches currently in series.conf "
+ "are outdated. Please run series_sort.py first and commit the "
+ "result before adding new patches.", file=sys.stderr)
+ sys.exit(1)
+
+ current_names = set([entry.name for entry in current_entries])
+ current_revs = {rev : entry.name
+ for entry in current_entries
+ if entry.dest_head != git_sort.oot
+ for rev in entry.revs}
+ new_lines = set()
+ new_entries = []
+ for name in args.patches:
+ if name in current_names:
+ print("Error: patch \"%s\" is already in series.conf." % (name,),
+ file=sys.stderr)
+ sys.exit(1)
+ entry = lib.InputEntry("\t%s\n" % (name,))
+ new_lines.add(entry.value)
+ try:
+ entry.from_patch(index, name, git_sort.oot, True)
+ except exc.KSError as err:
+ print("Error: %s" % (err,), file=sys.stderr)
+ sys.exit(1)
+ if entry.dest_head != git_sort.oot:
+ for rev in entry.revs:
+ try:
+ match = current_revs[rev]
+ except KeyError:
+ continue
+ else:
+ print("Warning: commit %s in new patch \"%s\" is already "
+ "present in patch \"%s\" from series.conf." % (
+ rev[12:], entry.name, match,), file=sys.stderr)
+ new_entries.append(entry)
+
+ try:
+ sorted_entries = lib.series_sort(index, current_entries + new_entries)
+ except exc.KSError as err:
+ print("Error: %s" % (err,), file=sys.stderr)
+ sys.exit(1)
+
+ cur_sorted_entries = collections.OrderedDict()
+ for head, lines in list(sorted_entries.items()):
+ current_lines = [line for line in lines if line not in new_lines]
+ if current_lines:
+ cur_sorted_entries[head] = current_lines
+
+ cur_inside = lib.flatten([
+ lib.series_header(inside),
+ lib.series_format(cur_sorted_entries),
+ lib.series_footer(inside),
+ ])
+
+ if inside != cur_inside:
+ print("Error: Current series.conf is not sorted. "
+ "Please run series_sort.py first and commit the result before "
+ "adding new patches.", file=sys.stderr)
+ sys.exit(1)
+
+ new_inside = lib.flatten([
+ lib.series_header(inside),
+ lib.series_format(sorted_entries),
+ lib.series_footer(inside),
+ ])
+
+ output = lib.flatten([
+ before,
+ new_inside,
+ after,
+ ])
+
+ with open("series.conf", mode="w") as f:
+ f.writelines(output)
+
+ try:
+ lib.update_tags(index, list(filter(lib.tag_needs_update, new_entries)))
+ except exc.KSError as err:
+ print("Error: %s" % (err,), file=sys.stderr)
+ sys.exit(1)
diff --git a/scripts/git_sort/series_sort.py b/scripts/git_sort/series_sort.py
new file mode 100755
index 0000000000..ae50761a97
--- /dev/null
+++ b/scripts/git_sort/series_sort.py
@@ -0,0 +1,149 @@
+#!/usr/bin/python3
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2018 SUSE LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
+# USA.
+
+"""
+Script to sort series.conf lines according to the upstream order of commits that
+the patches backport.
+
+The script can either read series.conf lines (or a subset thereof) from stdin or
+from the file named in the first argument.
+
+A convenient way to use series_sort.py to filter a subset of lines
+within series.conf when using the vim text editor is to visually
+select the lines and filter them through the script:
+ shift-v
+ j j j j [...] # or ctrl-d or /pattern<enter>
+ :'<,'>! ~/<path>/series_sort.py
+"""
+
+import argparse
+import os
+import sys
+
+try:
+ import pygit2
+except ImportError as err:
+ print("Error: %s" % (err,), file=sys.stderr)
+ print("Please install the \"pygit2\" python3 module. For more details, "
+ "please refer to the \"Installation Requirements\" section of "
+ "\"scripts/git_sort/README.md\".", file=sys.stderr)
+ sys.exit(1)
+
+import exc
+import git_sort
+import lib
+import series_conf
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ description="Sort series.conf lines according to the upstream order of "
+ "commits that the patches backport.")
+ parser.add_argument("-p", "--prefix", metavar="DIR",
+ help="Search for patches in this directory. Default: "
+ "current directory.")
+ parser.add_argument("-c", "--check", action="store_true",
+ help="Report via exit status 2 if the series is not "
+ "sorted. Default: false.")
+ parser.add_argument("-u", "--upstream", action="store_true",
+ help="Move patches upstream between subsystem sections "
+ "as appropriate. Default: false.")
+ parser.add_argument("series", nargs="?", metavar="series.conf",
+ help="series.conf file which will be modified in "
+ "place. Default: read input from stdin.")
+ args = parser.parse_args()
+
+ repo_path = lib.repo_path()
+ repo = pygit2.Repository(repo_path)
+ index = git_sort.SortIndex(repo)
+
+ if args.series is not None:
+ try:
+ f = open(args.series)
+ except FileNotFoundError as err:
+ print("Error: %s" % (err,), file=sys.stderr)
+ sys.exit(1)
+ series = os.path.abspath(args.series)
+ else:
+ f = sys.stdin
+ lines = f.readlines()
+
+ if args.prefix is not None:
+ os.chdir(args.prefix)
+
+ try:
+ before, inside, after = series_conf.split(lines)
+ except exc.KSNotFound as err:
+ if args.series is None:
+ before = []
+ inside = lines
+ after = []
+ elif args.check:
+ # no sorted section
+ sys.exit(0)
+ else:
+ print("Error: %s" % (err,), file=sys.stderr)
+ sys.exit(1)
+
+ try:
+ input_entries = lib.parse_inside(index, inside, args.upstream)
+ except exc.KSError as err:
+ print("Error: %s" % (err,), file=sys.stderr)
+ sys.exit(1)
+
+ try:
+ sorted_entries = lib.series_sort(index, input_entries)
+ except exc.KSError as err:
+ print("Error: %s" % (err,), file=sys.stderr)
+ sys.exit(1)
+
+ new_inside = lib.flatten([
+ lib.series_header(inside),
+ lib.series_format(sorted_entries),
+ lib.series_footer(inside),
+ ])
+
+ to_update = list(filter(lib.tag_needs_update, input_entries))
+ if args.check:
+ result = 0
+ if inside != new_inside:
+ print("Input is not sorted.")
+ result = 2
+ if len(to_update):
+ print("Git-repo tags are outdated.")
+ result = 2
+ sys.exit(result)
+ else:
+ output = lib.flatten([
+ before,
+ new_inside,
+ after,
+ ])
+
+ if args.series is not None:
+ f = open(series, mode="w")
+ else:
+ f = sys.stdout
+ f.writelines(output)
+ try:
+ lib.update_tags(index, to_update)
+ except exc.KSError as err:
+ print("Error: %s" % (err,), file=sys.stderr)
+ sys.exit(1)
diff --git a/scripts/git_sort/tests/__init__.py b/scripts/git_sort/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/scripts/git_sort/tests/__init__.py
diff --git a/scripts/git_sort/tests/opensuse-15.0/Dockerfile b/scripts/git_sort/tests/opensuse-15.0/Dockerfile
new file mode 100644
index 0000000000..0959660370
--- /dev/null
+++ b/scripts/git_sort/tests/opensuse-15.0/Dockerfile
@@ -0,0 +1,25 @@
+# https://hub.docker.com/r/opensuse/leap/
+FROM opensuse/leap:15.0 AS base
+
+RUN zypper -n ref
+
+FROM base AS packages
+
+RUN zypper -n in git python3 python3-dbm rcs
+
+RUN git config --global user.email "you@example.com"
+RUN git config --global user.name "Your Name"
+
+RUN zypper -n ar -G https://download.opensuse.org/repositories/Kernel:/tools/openSUSE_Leap_15.0/Kernel:tools.repo
+RUN zypper -n in python3-pygit2
+
+RUN zypper -n ar -G https://download.opensuse.org/repositories/home:/benjamin_poirier:/series_sort/openSUSE_Leap_15.0/home:benjamin_poirier:series_sort.repo
+RUN zypper -n in --from home_benjamin_poirier_series_sort quilt
+
+FROM packages
+
+VOLUME /scripts
+
+WORKDIR /scripts/git_sort
+
+CMD python3 -m unittest discover -v
diff --git a/scripts/git_sort/tests/opensuse-42.3/Dockerfile b/scripts/git_sort/tests/opensuse-42.3/Dockerfile
new file mode 100644
index 0000000000..cb369809ac
--- /dev/null
+++ b/scripts/git_sort/tests/opensuse-42.3/Dockerfile
@@ -0,0 +1,25 @@
+# https://hub.docker.com/r/opensuse/leap/
+FROM opensuse/leap:42.3 AS base
+
+RUN zypper -n ref
+
+FROM base AS packages
+
+RUN zypper -n in git python3 python3-dbm rcs
+
+RUN git config --global user.email "you@example.com"
+RUN git config --global user.name "Your Name"
+
+RUN zypper -n ar -G https://download.opensuse.org/repositories/Kernel:/tools/openSUSE_Leap_42.3/Kernel:tools.repo
+RUN zypper -n in python3-pygit2
+
+RUN zypper -n ar -G https://download.opensuse.org/repositories/home:/benjamin_poirier:/series_sort/openSUSE_Leap_42.3/home:benjamin_poirier:series_sort.repo
+RUN zypper -n in --from home_benjamin_poirier_series_sort quilt
+
+FROM packages
+
+VOLUME /scripts
+
+WORKDIR /scripts/git_sort
+
+CMD python3 -m unittest discover -v
diff --git a/scripts/git_sort/tests/opensuse-tumbleweed/Dockerfile b/scripts/git_sort/tests/opensuse-tumbleweed/Dockerfile
new file mode 100644
index 0000000000..182c8d0689
--- /dev/null
+++ b/scripts/git_sort/tests/opensuse-tumbleweed/Dockerfile
@@ -0,0 +1,22 @@
+# https://hub.docker.com/r/opensuse/tumbleweed/
+FROM opensuse/tumbleweed AS base
+
+RUN zypper -n ref
+
+FROM base AS packages
+
+RUN zypper -n in git python3 python3-dbm python3-pygit2 rcs
+
+RUN git config --global user.email "you@example.com"
+RUN git config --global user.name "Your Name"
+
+RUN zypper -n ar -G https://download.opensuse.org/repositories/home:/benjamin_poirier:/series_sort/openSUSE_Tumbleweed/home:benjamin_poirier:series_sort.repo
+RUN zypper -n in --from home_benjamin_poirier_series_sort quilt
+
+FROM packages
+
+VOLUME /scripts
+
+WORKDIR /scripts/git_sort
+
+CMD python3 -m unittest discover -v
diff --git a/scripts/git_sort/tests/run_all.sh b/scripts/git_sort/tests/run_all.sh
new file mode 100755
index 0000000000..12235a28d2
--- /dev/null
+++ b/scripts/git_sort/tests/run_all.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+
+libdir=$(dirname "$(readlink -f "$0")")
+
+# The sle12-sp2 image is not picked up by registry.suse.de, import it manually
+if [ $(docker image ls -q benjamin_poirier/docker_images/sle-12-sp2:latest | wc -l) -ne 1 ]; then
+ echo "Fetching base image for sle12-sp2..."
+ wget -q -O - http://download.suse.de/ibs/home:/benjamin_poirier:/docker_images:/SLE-12-SP2/images/x86_64/sles12sp2-docker-image.rpm | \
+ rpm2cpio - | cpio -i --quiet --to-stdout *.tar.xz | xzcat | \
+ docker import - benjamin_poirier/docker_images/sle-12-sp2
+fi
+
+for release in \
+ sle12-sp2 \
+ sle12-sp3 \
+ sle15 \
+ opensuse-42.3 \
+ opensuse-15.0 \
+ opensuse-tumbleweed \
+ ; do
+ echo "Building container image for $release..."
+ docker build -q -t gs-test-$release "$libdir/$release"
+ echo "Running tests in $release:"
+ docker run --rm --name=gs-test-$release \
+ --mount type=bind,source="$libdir/../../",target=/scripts,readonly \
+ gs-test-$release
+done
diff --git a/scripts/git_sort/tests/sle12-sp2/Dockerfile b/scripts/git_sort/tests/sle12-sp2/Dockerfile
new file mode 100644
index 0000000000..9de8830e78
--- /dev/null
+++ b/scripts/git_sort/tests/sle12-sp2/Dockerfile
@@ -0,0 +1,24 @@
+FROM benjamin_poirier/docker_images/sle-12-sp2:latest AS base
+
+RUN zypper -n ref
+
+FROM base AS packages
+
+RUN zypper -n in git python3 python3-dbm rcs
+
+RUN git config --global user.email "you@example.com"
+RUN git config --global user.name "Your Name"
+
+RUN zypper -n ar -G https://download.opensuse.org/repositories/Kernel:/tools/SLE_12_SP2/Kernel:tools.repo
+RUN zypper -n in python3-pygit2
+
+RUN zypper -n ar -G https://download.opensuse.org/repositories/home:/benjamin_poirier:/series_sort/SLE_12_SP2/home:benjamin_poirier:series_sort.repo
+RUN zypper -n in --from home_benjamin_poirier_series_sort quilt
+
+FROM packages
+
+VOLUME /scripts
+
+WORKDIR /scripts/git_sort
+
+CMD python3 -m unittest discover -v
diff --git a/scripts/git_sort/tests/sle12-sp3/Dockerfile b/scripts/git_sort/tests/sle12-sp3/Dockerfile
new file mode 100644
index 0000000000..df097b6b9f
--- /dev/null
+++ b/scripts/git_sort/tests/sle12-sp3/Dockerfile
@@ -0,0 +1,25 @@
+# http://registry.suse.de/
+FROM registry.suse.de/home/benjamin_poirier/docker_images/sle-12-sp3/images/suse/sles12sp3:2.0.3 AS base
+
+RUN zypper -n ref
+
+FROM base AS packages
+
+RUN zypper -n in git python3 python3-dbm rcs
+
+RUN git config --global user.email "you@example.com"
+RUN git config --global user.name "Your Name"
+
+RUN zypper -n ar -G https://download.opensuse.org/repositories/Kernel:/tools/SLE_12_SP3/Kernel:tools.repo
+RUN zypper -n in python3-pygit2
+
+RUN zypper -n ar -G https://download.opensuse.org/repositories/home:/benjamin_poirier:/series_sort/SLE_12_SP3/home:benjamin_poirier:series_sort.repo
+RUN zypper -n in --from home_benjamin_poirier_series_sort quilt
+
+FROM packages
+
+VOLUME /scripts
+
+WORKDIR /scripts/git_sort
+
+CMD python3 -m unittest discover -v
diff --git a/scripts/git_sort/tests/sle15/Dockerfile b/scripts/git_sort/tests/sle15/Dockerfile
new file mode 100644
index 0000000000..3b08991cd6
--- /dev/null
+++ b/scripts/git_sort/tests/sle15/Dockerfile
@@ -0,0 +1,25 @@
+# http://registry.suse.de/
+FROM registry.suse.de/home/david_chang/branches/devel/docker/images/sle15/images/suse/sles15:2.0.3 AS base
+
+RUN zypper -n ref
+
+FROM base AS packages
+
+RUN zypper -n in git python3 python3-dbm rcs awk
+
+RUN git config --global user.email "you@example.com"
+RUN git config --global user.name "Your Name"
+
+RUN zypper -n ar -G https://download.opensuse.org/repositories/Kernel:/tools/SLE_15/Kernel:tools.repo
+RUN zypper -n in python3-pygit2
+
+RUN zypper -n ar -G https://download.opensuse.org/repositories/home:/benjamin_poirier:/series_sort/SLE_15/home:benjamin_poirier:series_sort.repo
+RUN zypper -n in --from home_benjamin_poirier_series_sort quilt
+
+FROM packages
+
+VOLUME /scripts
+
+WORKDIR /scripts/git_sort
+
+CMD python3 -m unittest discover -v
diff --git a/scripts/git_sort/tests/support.py b/scripts/git_sort/tests/support.py
new file mode 100644
index 0000000000..dfff2b855f
--- /dev/null
+++ b/scripts/git_sort/tests/support.py
@@ -0,0 +1,122 @@
+#!/usr/bin/python3
+# -*- coding: utf-8 -*-
+
+import datetime
+import os.path
+import pygit2
+
+
+# from http://www.pygit2.org/recipes/git-show.html
+class FixedOffset(datetime.tzinfo):
+ """Fixed offset in minutes east from UTC."""
+
+ def __init__(self, offset):
+ self.__offset = datetime.timedelta(minutes = offset)
+
+ def utcoffset(self, dt):
+ return self.__offset
+
+ def tzname(self, dt):
+ return None # we don't know the time zone's name
+
+ def dst(self, dt):
+ return datetime.timedelta(0) # we don't know about DST
+
+
+def format_sanitized_subject(message):
+ """
+ Reimplemented from the similarly named function in the git source.
+ """
+ def is_title_char(c):
+ if ((c >= 'a' and c <= 'z') or (c >= 'A' and c <= 'Z') or
+ (c >= '0' and c <= '9') or c == '.' or c == '_'):
+ return True
+ else:
+ return False
+
+ result = []
+ space = False
+ i = 0
+ end = message.find("\n")
+ if end == -1:
+ end = len(message)
+ while i < end:
+ c = message[i]
+ if is_title_char(c):
+ if space and result:
+ result.append("-")
+ result.append(c)
+ space = False
+ if c == ".":
+ while i + 1 < end and message[i + 1] == ".":
+ i = i + 1
+ else:
+ space = True
+ i = i + 1
+ return "".join(result[:52])
+
+
+def format_patch(commit, mainline=None, repo=None, references=None,
+ directory=""):
+ name = os.path.join(directory, format_sanitized_subject(commit.message) +
+ ".patch")
+
+ with open(name, mode="w") as f:
+ f.write("From: %s <%s>\n" % (commit.author.name, commit.author.email,))
+ tzinfo = FixedOffset(commit.author.offset)
+ dt = datetime.datetime.fromtimestamp(float(commit.author.time), tzinfo)
+ f.write("Date: %s\n" % (dt.strftime("%c %z"),))
+ if mainline and repo is None:
+ f.write("Patch-mainline: %s\n" % (mainline,))
+ f.write("Git-commit: %s\n" % (str(commit.id),))
+ elif mainline is None and repo:
+ f.write("Patch-mainline: Queued in subsystem maintainer repository\n")
+ f.write("Git-repo: %s\n" % (repo,))
+ f.write("Git-commit: %s\n" % (str(commit.id),))
+ else:
+ f.write("Patch-mainline: No\n")
+ if references is not None:
+ f.write("References: %s\n" % (references,))
+ f.write("Subject: %s" % (commit.message,))
+ if not commit.message.endswith("\n"):
+ f.write("\n")
+ if commit.message.find("\n") == -1:
+ f.write("\n")
+ else:
+ if commit.message.count("\n") == 1:
+ # ends with a newline but consists only of a subject.
+ f.write("\n")
+ f.write("---\n")
+ args = []
+ if len(commit.parents):
+ args.append(commit.parents[0].tree)
+ diff = commit.tree.diff_to_tree(*args, swap=True)
+ f.write(diff.stats.format(pygit2.GIT_DIFF_STATS_FULL, width=79))
+ f.write("\n")
+ patch = diff.patch
+ if patch is not None:
+ f.write(diff.patch)
+ f.write("--\ngs-tests\n")
+
+ return name
+
+
+def format_series(content):
+ def format_section(section):
+ if section[0] is not None:
+ header = "\t# %s\n" % (section[0],)
+ else:
+ header = ""
+ return "%s%s" % (header,
+ "\n".join(["\t%s" % (name,) for name in section[1]]),)
+ return \
+""" ########################################################
+ # sorted patches
+ ########################################################
+%s
+ ########################################################
+ # end of sorted patches
+ ########################################################
+""" % (
+ "\n\n".join(map(format_section, content)))
+
diff --git a/scripts/git_sort/tests/test_git_sort.py b/scripts/git_sort/tests/test_git_sort.py
new file mode 100755
index 0000000000..187b38cfbd
--- /dev/null
+++ b/scripts/git_sort/tests/test_git_sort.py
@@ -0,0 +1,388 @@
+#!/usr/bin/python3
+# -*- coding: utf-8 -*-
+
+import collections
+import os
+import os.path
+import pygit2
+import shelve
+import shutil
+import subprocess
+import sys
+import tempfile
+import unittest
+
+import git_sort
+import lib
+
+
+class TestRepoURL(unittest.TestCase):
+ def test_eq(self):
+ self.assertEqual(
+ git_sort.RepoURL("git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git"),
+ git_sort.RepoURL("git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git")
+ )
+
+ self.assertEqual(
+ git_sort.RepoURL("git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux-2.6.git"),
+ git_sort.RepoURL("git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git")
+ )
+
+ self.assertEqual(
+ git_sort.RepoURL("git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git"),
+ git_sort.RepoURL("http://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git")
+ )
+
+ self.assertNotEqual(
+ git_sort.RepoURL("git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git"),
+ git_sort.RepoURL("git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git")
+ )
+
+ self.assertEqual(
+ git_sort.RepoURL("git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git"),
+ git_sort.RepoURL("git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux")
+ )
+
+ self.assertEqual(
+ git_sort.RepoURL("git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git"),
+ git_sort.RepoURL("torvalds/linux.git")
+ )
+
+ self.assertEqual(
+ git_sort.RepoURL("torvalds/linux.git"),
+ git_sort.RepoURL("torvalds/linux")
+ )
+
+ self.assertNotEqual(
+ git_sort.RepoURL("torvalds/linux.git"),
+ git_sort.RepoURL("davem/net.git")
+ )
+
+
+ def test_repr(self):
+ url_canon = "git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git"
+ url_non_canon = "git://linuxtv.org/media_tree.git"
+
+ self.assertEqual(
+ repr(git_sort.RepoURL(url_canon)),
+ url_canon
+ )
+
+ self.assertEqual(
+ str(git_sort.RepoURL(url_canon)),
+ "torvalds/linux"
+ )
+
+ self.assertEqual(
+ repr(git_sort.RepoURL(url_non_canon)),
+ url_non_canon
+ )
+
+ self.assertEqual(
+ str(git_sort.RepoURL(url_non_canon)),
+ url_non_canon
+ )
+
+ self.assertEqual(
+ str(git_sort.RepoURL(None)),
+ ""
+ )
+
+
+class TestHead(unittest.TestCase):
+ def test_eq(self):
+ self.assertEqual(
+ git_sort.Head(git_sort.RepoURL("torvalds/linux.git")),
+ git_sort.Head(git_sort.RepoURL("torvalds/linux.git")),
+ )
+
+ self.assertEqual(
+ git_sort.Head(git_sort.RepoURL("torvalds/linux.git")),
+ git_sort.Head(git_sort.RepoURL("torvalds/linux.git"), "master"),
+ )
+
+ self.assertNotEqual(
+ git_sort.Head(git_sort.RepoURL("torvalds/linux.git")),
+ git_sort.Head(git_sort.RepoURL("davem/net.git")),
+ )
+
+ self.assertTrue(
+ git_sort.Head(git_sort.RepoURL("torvalds/linux.git")) <
+ git_sort.Head(git_sort.RepoURL("davem/net.git"))
+ )
+
+
+class TestIndex(unittest.TestCase):
+ def setUp(self):
+ os.environ["XDG_CACHE_HOME"] = tempfile.mkdtemp(prefix="gs_cache")
+ self.repo_dir = tempfile.mkdtemp(prefix="gs_repo")
+ self.repo = pygit2.init_repository(self.repo_dir)
+
+ author = pygit2.Signature('Alice Author', 'alice@authors.tld')
+ committer = pygit2.Signature('Cecil Committer', 'cecil@committers.tld')
+ tree = self.repo.TreeBuilder().write()
+
+ parent = []
+ self.commits = []
+ for i in range(3):
+ subject = "commit %d" % (i,)
+ cid = self.repo.create_commit(
+ "refs/heads/master",
+ author,
+ committer,
+ "%s\n\nlog" % (subject,),
+ tree,
+ parent
+ )
+ parent = [cid]
+ self.commits.append(str(cid))
+
+ self.index = git_sort.SortIndex(self.repo)
+
+
+ def tearDown(self):
+ shutil.rmtree(os.environ["XDG_CACHE_HOME"])
+ shutil.rmtree(self.repo_dir)
+
+
+ def test_heads(self):
+ self.assertEqual(
+ git_sort.get_heads(self.repo),
+ collections.OrderedDict([
+ (git_sort.Head(git_sort.RepoURL(None), "HEAD"),
+ self.commits[-1],)])
+ )
+
+
+ def test_lookup(self):
+ ci0 = self.index.lookup(self.commits[0])
+ ci1 = self.index.lookup(self.commits[1])
+
+ self.assertEqual(ci0, ci0)
+ self.assertTrue(ci0 < ci1)
+
+
+ def test_empty_input(self):
+ os.chdir(self.repo_dir)
+ gs_path = os.path.join(lib.libdir(), "git_sort.py")
+ subprocess.check_output(gs_path, input="\n".encode())
+
+
+class TestIndexLinux(unittest.TestCase):
+ def setUp(self):
+ os.environ["XDG_CACHE_HOME"] = tempfile.mkdtemp(prefix="gs_cache")
+ self.repo_dir = tempfile.mkdtemp(prefix="gs_repo")
+ self.repo = pygit2.init_repository(self.repo_dir)
+
+ author = pygit2.Signature('Alice Author', 'alice@authors.tld')
+ committer = pygit2.Signature('Cecil Committer', 'cecil@committers.tld')
+ tree = self.repo.TreeBuilder().write()
+
+ self.commits = []
+ m0 = self.repo.create_commit(
+ "refs/heads/mainline",
+ author,
+ committer,
+ "mainline 0\n\nlog",
+ tree,
+ []
+ )
+ self.commits.append(self.repo.get(m0))
+ self.repo.create_reference_direct("refs/tags/v4.8", m0, False)
+ self.repo.create_tag("v4.9", m0, pygit2.GIT_REF_OID, committer,
+ "Linux 4.9")
+
+ n0 = self.repo.create_commit(
+ "refs/heads/net",
+ author,
+ committer,
+ "net 0\n\nlog",
+ tree,
+ [m0]
+ )
+ self.commits.append(self.repo.get(n0))
+
+ self.repo.checkout("refs/heads/mainline")
+
+ m1 = self.repo.create_commit(
+ "refs/heads/mainline",
+ author,
+ committer,
+ "mainline 1, merge net\n\nlog",
+ tree,
+ [m0, n0]
+ )
+ self.repo.create_tag("v4.10", m1, pygit2.GIT_REF_OID, committer,
+ "Linux 4.10")
+
+ self.repo.remotes.create("origin",
+ "git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git")
+ self.repo.references.create("refs/remotes/origin/master", m1)
+
+ self.repo.remotes.create("net",
+ "git://git.kernel.org/pub/scm/linux/kernel/git/davem/net.git")
+ self.repo.references.create("refs/remotes/net/master", n0)
+
+ self.heads = {"mainline" : str(m1),
+ "net" : str(n0)}
+ self.index = git_sort.SortIndex(self.repo)
+
+ #sys.stdin.readline()
+
+
+ def tearDown(self):
+ shutil.rmtree(os.environ["XDG_CACHE_HOME"])
+ shutil.rmtree(self.repo_dir)
+
+
+ def test_heads(self):
+ self.assertEqual(
+ git_sort.get_heads(self.repo),
+ collections.OrderedDict([
+ (git_sort.Head(git_sort.RepoURL("torvalds/linux.git")),
+ self.heads["mainline"]),
+ (git_sort.Head(git_sort.RepoURL("davem/net.git")),
+ self.heads["net"]),
+ ])
+ )
+
+
+ def test_describe(self):
+ ic = self.index.lookup(str(self.commits[1].id))
+ self.assertEqual(self.index.describe(ic.index), "v4.10")
+
+
+class TestCache(unittest.TestCase):
+ def setUp(self):
+ os.environ["XDG_CACHE_HOME"] = tempfile.mkdtemp(prefix="gs_cache")
+ self.repo_dir = tempfile.mkdtemp(prefix="gs_repo")
+ self.repo = pygit2.init_repository(self.repo_dir)
+
+ author = pygit2.Signature('Alice Author', 'alice@authors.tld')
+ committer = pygit2.Signature('Cecil Committer', 'cecil@committers.tld')
+ tree = self.repo.TreeBuilder().write()
+
+ parent = []
+ commits = []
+ for i in range(3):
+ subject = "commit %d" % (i,)
+ cid = self.repo.create_commit(
+ "refs/heads/master",
+ author,
+ committer,
+ "%s\n\nlog" % (subject,),
+ tree,
+ parent
+ )
+ parent = [cid]
+ commits.append("%s %s" % (str(cid), subject,))
+ self.commits = commits
+
+
+ def tearDown(self):
+ shutil.rmtree(os.environ["XDG_CACHE_HOME"])
+ shutil.rmtree(self.repo_dir)
+
+
+ def test_cache(self):
+ gs_path = os.path.join(lib.libdir(), "git_sort.py")
+ cache_path = os.path.join(os.environ["XDG_CACHE_HOME"], "git-sort")
+
+ input_text = "\n".join(self.commits)
+
+ os.chdir(self.repo_dir)
+ output = subprocess.check_output([gs_path, "-d"]).decode().splitlines()
+ self.assertEqual(output[-1], "Will rebuild history")
+
+ # "-d" should not create a cache
+ retval = 0
+ try:
+ os.stat(cache_path)
+ except OSError as e:
+ retval = e.errno
+ self.assertEqual(retval, 2)
+
+ output_ref = subprocess.check_output(
+ gs_path, input=input_text.encode()).decode()
+ time1 = os.stat(cache_path).st_mtime
+
+ output = subprocess.check_output([gs_path, "-d"]).decode().splitlines()
+ self.assertEqual(output[-1], "Will not rebuild history")
+
+ # "-d" should not modify a cache
+ self.assertEqual(os.stat(cache_path).st_mtime, time1)
+
+ # test that git-sort action is the same as "-d" states (no cache
+ # rebuild)
+ output = subprocess.check_output(
+ gs_path, input=input_text.encode()).decode()
+ self.assertEqual(output, output_ref)
+ self.assertEqual(os.stat(cache_path).st_mtime, time1)
+
+ # test version number change
+ shelve.open(cache_path)["version"] = 1
+ output = subprocess.check_output([gs_path, "-d"]).decode().splitlines()
+ self.assertEqual(output[1], "Unsupported cache version")
+ self.assertEqual(output[-1], "Will rebuild history")
+
+ output = subprocess.check_output(
+ gs_path, input=input_text.encode()).decode()
+ self.assertEqual(output, output_ref)
+
+ output = subprocess.check_output([gs_path, "-d"]).decode().splitlines()
+ self.assertEqual(output[-1], "Will not rebuild history")
+
+ # corrupt the cache structure
+ shelve.open(cache_path)["history"] = {
+ "linux.git" : ["abc", "abc", "abc"],
+ "net" : ["abc", "abc", "abc"],
+ "net-next" : ["abc", "abc", "abc"],
+ }
+ output = subprocess.check_output([gs_path, "-d"]).decode().splitlines()
+ self.assertEqual(output[1], "Inconsistent cache content")
+ self.assertEqual(output[-1], "Will rebuild history")
+
+ output = subprocess.check_output(
+ gs_path, input=input_text.encode()).decode()
+ self.assertEqual(output, output_ref)
+
+ output = subprocess.check_output([gs_path, "-d"]).decode().splitlines()
+ self.assertEqual(output[-1], "Will not rebuild history")
+
+
+class TestErrors(unittest.TestCase):
+ def setUp(self):
+ self.repo_dir = tempfile.mkdtemp(prefix="gs_repo")
+
+
+ def tearDown(self):
+ shutil.rmtree(self.repo_dir)
+
+
+ def test_errors(self):
+ gs_path = os.path.join(lib.libdir(), "git_sort.py")
+ os.chdir(self.repo_dir)
+
+ try:
+ subprocess.check_output([gs_path, "-d"], stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as err:
+ self.assertEqual(err.returncode, 1)
+ self.assertEqual(err.output.decode().strip(),
+ "Error: Not a git repository")
+ else:
+ self.assertTrue(False)
+
+ try:
+ subprocess.check_output([gs_path], stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as err:
+ self.assertEqual(err.returncode, 1)
+ self.assertEqual(err.output.decode().strip(),
+ "Error: Not a git repository")
+ else:
+ self.assertTrue(False)
+
+
+if __name__ == '__main__':
+ # Run a single testcase
+ suite = unittest.TestLoader().loadTestsFromTestCase(TestIndexLinux)
+ unittest.TextTestRunner(verbosity=2).run(suite)
diff --git a/scripts/git_sort/tests/test_quilt_mode.py b/scripts/git_sort/tests/test_quilt_mode.py
new file mode 100755
index 0000000000..95f35a74b4
--- /dev/null
+++ b/scripts/git_sort/tests/test_quilt_mode.py
@@ -0,0 +1,726 @@
+#!/usr/bin/python3
+# -*- coding: utf-8 -*-
+
+import os
+import os.path
+import pygit2
+import shutil
+import subprocess
+import sys
+import tempfile
+import unittest
+
+import git_sort
+import lib
+import series_conf
+import tests.support
+
+
+class TestQuiltMode(unittest.TestCase):
+ def setUp(self):
+ os.environ["XDG_CACHE_HOME"] = tempfile.mkdtemp(prefix="gs_cache")
+
+ # setup stub linux repository
+ os.environ["LINUX_GIT"] = tempfile.mkdtemp(prefix="gs_repo")
+ self.repo = pygit2.init_repository(os.environ["LINUX_GIT"])
+ self.repo.config["user.email"] = "agraf@suse.de"
+ self.repo.config["user.name"] = "Alexander Graf"
+
+ author = pygit2.Signature("Alice Author", "alice@authors.tld")
+ committer = pygit2.Signature("Cecil Committer", "cecil@committers.tld")
+ tree = self.repo.TreeBuilder()
+
+ tree.insert("README",
+ self.repo.create_blob("NAME = Roaring Lionus\n"),
+ pygit2.GIT_FILEMODE_BLOB)
+ self.commits = []
+ self.commits.append(self.repo.create_commit(
+ "refs/heads/mainline",
+ author,
+ committer,
+ "Linux 4.9",
+ tree.write(),
+ []
+ ))
+ self.repo.create_tag("v4.9", self.commits[-1], pygit2.GIT_REF_OID,
+ committer, "Linux 4.9")
+
+ tree.insert("README",
+ self.repo.create_blob("NAME = Anniversary Edition\n"),
+ pygit2.GIT_FILEMODE_BLOB)
+ self.commits.append(self.repo.create_commit(
+ "refs/heads/mainline",
+ author,
+ committer,
+ "Linux 4.10-rc5",
+ tree.write(),
+ [self.commits[-1]]
+ ))
+ self.repo.create_tag("v4.10-rc5", self.commits[-1], pygit2.GIT_REF_OID,
+ committer, "Linux 4.10-rc5")
+
+ tree.insert("driver.c",
+ self.repo.create_blob("#include <linux/module.h>\n"),
+ pygit2.GIT_FILEMODE_BLOB)
+ author2 = pygit2.Signature("Marc Zyngier", "marc.zyngier@arm.com")
+ self.commits.append(self.repo.create_commit(
+ "refs/heads/mainline",
+ author2,
+ author2,
+ """KVM: arm/arm64: vgic-v3: Add accessors for the ICH_APxRn_EL2 registers
+
+As we're about to access the Active Priority registers a lot more,
+let's define accessors that take the register number as a parameter.
+
+Tested-by: Alexander Graf <agraf@suse.de>
+Acked-by: David Daney <david.daney@cavium.com>
+Reviewed-by: Eric Auger <eric.auger@redhat.com>
+Signed-off-by: Marc Zyngier <marc.zyngier@arm.com>
+Signed-off-by: Christoffer Dall <cdall@linaro.org>
+""",
+ tree.write(),
+ [self.commits[-1]]
+ ))
+
+ tree.insert("core.c",
+ self.repo.create_blob("#include <linux/kernel.h>\n"),
+ pygit2.GIT_FILEMODE_BLOB)
+ author3 = pygit2.Signature("Peter Zijlstra", "peterz@infradead.org")
+ self.commits.append(self.repo.create_commit(
+ "refs/heads/mainline",
+ author3,
+ author3,
+ """sched/debug: Ignore TASK_IDLE for SysRq-W
+
+Markus reported that tasks in TASK_IDLE state are reported by SysRq-W,
+which results in undesirable clutter.
+
+Reported-by: Markus Trippelsdorf <markus@trippelsdorf.de>
+Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
+Cc: Linus Torvalds <torvalds@linux-foundation.org>
+Cc: Peter Zijlstra <peterz@infradead.org>
+Cc: Thomas Gleixner <tglx@linutronix.de>
+Cc: linux-kernel@vger.kernel.org
+Signed-off-by: Ingo Molnar <mingo@kernel.org>
+""",
+ tree.write(),
+ [self.commits[-1]]
+ ))
+
+ tree.insert("README",
+ self.repo.create_blob("NAME = Fearless Coyote\n"),
+ pygit2.GIT_FILEMODE_BLOB)
+ self.commits.append(self.repo.create_commit(
+ "refs/heads/mainline",
+ author,
+ committer,
+ "Linux 4.10-rc6",
+ tree.write(),
+ [self.commits[-1]]
+ ))
+ self.repo.create_tag("v4.10-rc6", self.commits[-1], pygit2.GIT_REF_OID,
+ committer, "Linux 4.10-rc6")
+
+ self.repo.remotes.create(
+ "origin",
+ "git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git")
+ self.repo.references.create("refs/remotes/origin/master",
+ self.commits[-1])
+
+ # setup stub kernel-source content
+ self.ks_dir = tempfile.mkdtemp(prefix="gs_ks")
+ patch_dir = os.path.join(self.ks_dir, "patches.suse")
+ os.mkdir(patch_dir)
+ os.chdir(patch_dir)
+ with open(os.path.join(self.ks_dir, "series.conf"), mode="w") as f:
+ f.write(
+"""# Kernel patches configuration file
+
+ ########################################################
+ # sorted patches
+ ########################################################
+""")
+ for commit, tag in (
+ (self.commits[0], "v4.9",),
+ (self.commits[1], "v4.10-rc5",),
+ ):
+ f.write("\tpatches.suse/%s\n" % (
+ tests.support.format_patch(self.repo.get(commit),
+ mainline=tag),))
+ f.write(
+"""
+ ########################################################
+ # end of sorted patches
+ ########################################################
+""")
+
+ ss_path = os.path.join(lib.libdir(), "series_sort.py")
+ os.chdir(self.ks_dir)
+
+ # This overlaps what is tested by test_series_sort, hence, not put in a
+ # test of its own.
+ subprocess.check_call([ss_path, "-c", "series.conf"])
+ with open("series.conf") as f:
+ content1 = f.read()
+ subprocess.check_call([ss_path, "series.conf"])
+ with open("series.conf") as f:
+ content2 = f.read()
+ self.assertEqual(content2, content1)
+
+ subprocess.check_call(("git", "init", "./",), stdout=subprocess.DEVNULL)
+ subprocess.check_call(("git", "add", "series.conf", "patches.suse",),
+ stdout=subprocess.DEVNULL)
+ subprocess.check_call(("git", "commit", "-m", "import",),
+ stdout=subprocess.DEVNULL)
+
+ os.makedirs("tmp/current")
+ os.chdir("tmp/current")
+ subprocess.check_call(
+ ["quilt", "setup", "--sourcedir", "../../", "../../series.conf"])
+
+
+ def tearDown(self):
+ shutil.rmtree(os.environ["XDG_CACHE_HOME"])
+ shutil.rmtree(os.environ["LINUX_GIT"])
+ shutil.rmtree(self.ks_dir)
+
+
+ def test_quilt_mode(self):
+ qm_path = os.path.join(lib.libdir(), "quilt-mode.sh")
+
+ # test series file replacement
+ with open("series") as f:
+ entries = ["%s\n" % (l,) for l in
+ [line.strip() for line in f.readlines()]
+ if l and not l.startswith("#")]
+ # remove the symlink
+ os.unlink("series")
+ with open("series", mode="w") as f:
+ f.writelines(entries)
+ subprocess.check_call(
+ (os.path.join(lib.libdir(), "qgoto.py"), str(self.commits[0]),),
+ stdout=subprocess.DEVNULL)
+
+ # test qgoto
+ subprocess.check_call(
+ ". %s; qgoto %s" % (qm_path, str(self.commits[0])), shell=True,
+ stdout=subprocess.DEVNULL, executable="/bin/bash")
+
+ # test qdupcheck
+ try:
+ subprocess.check_output(
+ ". %s; qdupcheck %s" % (qm_path, str(self.commits[1])),
+ shell=True, executable="/bin/bash")
+ except subprocess.CalledProcessError as err:
+ self.assertEqual(err.returncode, 1)
+ self.assertEqual(err.output.decode().splitlines()[-1].strip(),
+ "patches.suse/Linux-4.10-rc5.patch")
+ else:
+ self.assertTrue(False)
+
+ subprocess.check_call(
+ ". %s; qgoto %s" % (qm_path, str(self.commits[1])), shell=True,
+ stdout=subprocess.DEVNULL, executable="/bin/bash")
+
+ try:
+ subprocess.check_output(
+ ". %s; qdupcheck %s" % (qm_path, str(self.commits[1])),
+ shell=True, executable="/bin/bash")
+ except subprocess.CalledProcessError as err:
+ self.assertEqual(err.returncode, 1)
+ self.assertEqual(err.output.decode().splitlines()[-1].strip(),
+ "This is the top patch.")
+ else:
+ self.assertTrue(False)
+
+ # import commits[2]
+ subprocess.check_call(
+ ". %s; qgoto %s" % (qm_path, str(self.commits[2])), shell=True,
+ executable="/bin/bash")
+ subprocess.check_call(
+ """. %s; qcp -r "bsc#1077761" -d patches.suse %s""" % (
+ qm_path, str(self.commits[2])),
+ shell=True, stdout=subprocess.DEVNULL, executable="/bin/bash")
+
+ retval = subprocess.check_output(("quilt", "--quiltrc", "-", "next",))
+ name = "patches.suse/KVM-arm-arm64-vgic-v3-Add-accessors-for-the-ICH_APxR.patch"
+ self.assertEqual(retval.decode().strip(), name)
+
+ try:
+ with open(os.path.join(self.ks_dir, name)) as f:
+ retval = f.readlines().index(
+ "Acked-by: Alexander Graf <agraf@suse.de>\n")
+ except ValueError:
+ retval = -1
+ self.assertNotEqual(retval, -1)
+
+ subprocess.check_call(("quilt", "--quiltrc", "-", "push",),
+ stdout=subprocess.DEVNULL)
+
+ try:
+ subprocess.check_output(("quilt", "--quiltrc", "-", "pop",),
+ stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as err:
+ self.assertEqual(err.returncode, 1)
+ self.assertTrue(err.output.decode().endswith(
+ "needs to be refreshed first.\n"))
+ else:
+ self.assertTrue(False)
+
+ subprocess.check_call(("quilt", "--quiltrc", "-", "refresh",),
+ stdout=subprocess.DEVNULL)
+ subprocess.check_call(("quilt", "--quiltrc", "-", "pop",),
+ stdout=subprocess.DEVNULL)
+ subprocess.check_call(("quilt", "--quiltrc", "-", "push",),
+ stdout=subprocess.DEVNULL)
+
+ # prepare repository
+ os.chdir(self.ks_dir)
+ subprocess.check_call(("git", "add", "series.conf", "patches.suse",),
+ stdout=subprocess.DEVNULL)
+ subprocess.check_call(
+ ("git", "commit", "-m",
+ "KVM: arm/arm64: vgic-v3: Add accessors for the ICH_APxRn_EL2 registers",),
+ stdout=subprocess.DEVNULL)
+ subprocess.check_call(("git", "checkout", "-q", "-b", "other",
+ "HEAD^",))
+ shutil.rmtree("tmp/current")
+ os.makedirs("tmp/current")
+ os.chdir("tmp/current")
+ subprocess.check_call(("quilt", "setup", "--sourcedir", "../../",
+ "../../series.conf",),)
+
+ # import commits[3]
+ subprocess.check_call(
+ ". %s; qgoto %s" % (qm_path, str(self.commits[3])), shell=True,
+ stdout=subprocess.DEVNULL, executable="/bin/bash")
+ subprocess.check_call(
+ """. %s; qcp -r "bsc#123" -d patches.suse %s""" % (
+ qm_path, str(self.commits[3])),
+ shell=True, stdout=subprocess.DEVNULL, executable="/bin/bash")
+
+ subprocess.check_call(("quilt", "--quiltrc", "-", "push",),
+ stdout=subprocess.DEVNULL)
+ subprocess.check_call(("quilt", "--quiltrc", "-", "refresh",),
+ stdout=subprocess.DEVNULL)
+ name = subprocess.check_output(
+ ("quilt", "--quiltrc", "-", "top",)).decode().strip()
+
+ os.chdir(self.ks_dir)
+ subprocess.check_call(("git", "add", "series.conf", "patches.suse",),
+ stdout=subprocess.DEVNULL)
+
+ # test pre-commit.sh
+ pc_path = os.path.join(lib.libdir(), "pre-commit.sh")
+
+ subprocess.check_call(pc_path, stdout=subprocess.DEVNULL)
+
+ with open("series.conf") as f:
+ content = f.readlines()
+
+ content2 = list(content)
+ middle = int(len(content2) / 2)
+ content2[middle], content2[middle + 1] = \
+ content2[middle + 1], content2[middle]
+
+ with open("series.conf", mode="w") as f:
+ f.writelines(content2)
+
+ # check should be done against index, not working tree
+ subprocess.check_call(pc_path, stdout=subprocess.DEVNULL)
+
+ subprocess.check_call(("git", "add", "series.conf",),
+ stdout=subprocess.DEVNULL)
+
+ # ... test a bad sorted section
+ try:
+ subprocess.check_output(pc_path, stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as err:
+ self.assertEqual(err.returncode, 1)
+ self.assertTrue(err.output.decode().startswith(
+ "Input is not sorted."))
+ else:
+ self.assertTrue(False)
+
+ with open("series.conf", mode="w") as f:
+ f.writelines(content)
+
+ subprocess.check_call(("git", "add", "series.conf",),
+ stdout=subprocess.DEVNULL)
+
+ subprocess.check_call(("git", "commit", "-m",
+ "sched/debug: Ignore TASK_IDLE for SysRq-W",),
+ stdout=subprocess.DEVNULL)
+
+ # ... test a bad sorted patch
+ with open(name) as f:
+ content = f.readlines()
+ content2 = list(content)
+ for i in range(len(content2)):
+ if content2[i].startswith("Git-commit: "):
+ content2[i] = "Git-commit: cb329c2e40cf6cfc7bcd7c36ce5547f95e972ea5\n"
+ break
+ with open(name, mode="w") as f:
+ f.writelines(content2)
+ subprocess.check_call(("git", "add", name,), stdout=subprocess.DEVNULL)
+
+ try:
+ subprocess.check_output(pc_path, stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as err:
+ self.assertEqual(err.returncode, 1)
+ self.assertTrue(err.output.decode().startswith(
+ "Error: There is a problem with patch \"%s\"." % (name,)))
+ else:
+ self.assertTrue(False)
+
+ with open(name, mode="w") as f:
+ f.writelines(content)
+ subprocess.check_call(("git", "add", name,), stdout=subprocess.DEVNULL)
+
+ # test merge_tool.py
+ subprocess.check_call(("git", "checkout", "-q", "master",))
+ shutil.rmtree("tmp/current")
+ subprocess.check_call(
+ ("git", "config", "--add", "mergetool.git-sort.cmd",
+ "%s $LOCAL $BASE $REMOTE $MERGED" % (
+ os.path.join(lib.libdir(), "merge_tool.py"),),))
+ subprocess.check_call(("git", "config", "--add",
+ "mergetool.git-sort.trustexitcode", "true",))
+ retval = subprocess.call(("git", "merge", "other",),
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL)
+ self.assertEqual(retval, 1)
+ retval = subprocess.check_output(
+ ("git", "mergetool", "--tool=git-sort", "series.conf",))
+ self.assertEqual(
+ retval.decode().splitlines()[-1].strip(),
+ "1 commits added, 0 commits removed from base to remote.")
+ with open("series.conf") as f:
+ entries = series_conf.filter_series(f.readlines())
+ self.assertEqual(entries,
+ ["patches.suse/%s.patch" %
+ (tests.support.format_sanitized_subject(
+ self.repo.get(commit).message),)
+ for commit in self.commits[:4]])
+ retval = subprocess.check_output(("git", "status", "--porcelain",
+ "series.conf",))
+ self.assertEqual(retval.decode().strip(), "M series.conf")
+
+
+class TestMergeTool(unittest.TestCase):
+ def setUp(self):
+ os.environ["XDG_CACHE_HOME"] = tempfile.mkdtemp(prefix="gs_cache")
+
+ # setup stub linux repository
+ os.environ["LINUX_GIT"] = tempfile.mkdtemp(prefix="gs_repo")
+ self.repo = pygit2.init_repository(os.environ["LINUX_GIT"])
+
+ author = pygit2.Signature("Alice Author", "alice@authors.tld")
+ committer = pygit2.Signature("Cecil Committer", "cecil@committers.tld")
+ tree = self.repo.TreeBuilder()
+
+ k_org_canon_prefix = "git://git.kernel.org/pub/scm/linux/kernel/git/"
+ self.mainline_repo = k_org_canon_prefix + "torvalds/linux.git"
+ self.repo.remotes.create("origin", self.mainline_repo)
+
+ self.commits = {}
+ self.commits["mainline 0"] = self.repo.create_commit(
+ "refs/heads/mainline",
+ author,
+ committer,
+ "mainline 0\n\nlog",
+ tree.write(),
+ []
+ )
+
+ self.commits["mainline 1"] = self.repo.create_commit(
+ "refs/heads/mainline",
+ author,
+ committer,
+ "mainline 1\n\nlog",
+ tree.write(),
+ [self.commits["mainline 0"]]
+ )
+
+ self.commits["mainline 2"] = self.repo.create_commit(
+ "refs/heads/mainline",
+ author,
+ committer,
+ "mainline 2\n\nlog",
+ tree.write(),
+ [self.commits["mainline 1"]]
+ )
+ self.repo.references.create("refs/remotes/origin/master",
+ self.commits["mainline 2"])
+
+ self.repo.checkout("refs/heads/mainline")
+
+ # setup stub kernel-source content
+ self.ks_dir = tempfile.mkdtemp(prefix="gs_ks")
+ os.chdir(self.ks_dir)
+
+ subprocess.check_call(("git", "init", "./",), stdout=subprocess.DEVNULL)
+ subprocess.check_call(
+ ("git", "config", "--add", "mergetool.git-sort.cmd",
+ "%s $LOCAL $BASE $REMOTE $MERGED" % (
+ os.path.join(lib.libdir(), "merge_tool.py"),),))
+ subprocess.check_call(("git", "config", "--add",
+ "mergetool.git-sort.trustexitcode", "true",))
+
+ self.patch_dir = "patches.suse"
+ os.mkdir(self.patch_dir)
+
+
+ def tearDown(self):
+ shutil.rmtree(os.environ["XDG_CACHE_HOME"])
+ shutil.rmtree(os.environ["LINUX_GIT"])
+ shutil.rmtree(self.ks_dir)
+
+
+ def test_moved(self):
+ names = {}
+
+ # local branch
+ names["mainline 0"] = tests.support.format_patch(
+ self.repo.get(self.commits["mainline 0"]), mainline="v0",
+ directory=self.patch_dir)
+ names["mainline 1"] = tests.support.format_patch(
+ self.repo.get(self.commits["mainline 1"]),
+ directory=self.patch_dir)
+
+ with open("series.conf", mode="w") as f:
+ f.write(tests.support.format_series((
+ (None, (
+ names["mainline 0"],
+ )),
+ ("out-of-tree patches", (
+ names["mainline 1"],
+ )),
+ )))
+
+ subprocess.check_call(("git", "add", "series.conf", "patches.suse",),
+ stdout=subprocess.DEVNULL)
+ subprocess.check_call(("git", "commit", "-m", "mainline 0",),
+ stdout=subprocess.DEVNULL)
+
+ names["mainline 2"] = tests.support.format_patch(
+ self.repo.get(self.commits["mainline 2"]), mainline="v0",
+ directory=self.patch_dir)
+
+ with open("series.conf", mode="w") as f:
+ f.write(tests.support.format_series((
+ (None, (
+ names["mainline 0"],
+ names["mainline 2"],
+ )),
+ ("out-of-tree patches", (
+ names["mainline 1"],
+ )),
+ )))
+
+ subprocess.check_call(("git", "add", "series.conf", "patches.suse",),
+ stdout=subprocess.DEVNULL)
+ subprocess.check_call(("git", "commit", "-m", "mainline 2",),
+ stdout=subprocess.DEVNULL)
+
+ # remote branch
+ subprocess.check_call(("git", "checkout", "-q", "-b", "other",
+ "HEAD^",))
+ names["mainline 1"] = tests.support.format_patch(
+ self.repo.get(self.commits["mainline 1"]), mainline="v0",
+ directory=self.patch_dir)
+
+ with open("series.conf", mode="w") as f:
+ f.write(tests.support.format_series((
+ (None, (
+ names["mainline 0"],
+ names["mainline 1"],
+ )),
+ )))
+
+ subprocess.check_call(("git", "add", "series.conf", "patches.suse",),
+ stdout=subprocess.DEVNULL)
+ subprocess.check_call(("git", "commit", "-m", "Refresh mainline 1",),
+ stdout=subprocess.DEVNULL)
+
+ # test merge_tool.py
+ subprocess.check_call(("git", "checkout", "-q", "master",))
+ retval = subprocess.call(("git", "merge", "other",),
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL)
+ self.assertEqual(retval, 1)
+ #sys.stdin.readline()
+ retval = subprocess.check_output(
+ ("git", "mergetool", "--tool=git-sort", "series.conf",),
+ stdin=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
+ self.assertEqual(
+ "1 commits changed section from base to remote.",
+ retval.decode().splitlines()[-1].strip())
+ with open("series.conf") as f:
+ self.assertEqual(
+ tests.support.format_series((
+ (None, (
+ names["mainline 0"],
+ names["mainline 1"],
+ names["mainline 2"],
+ )),
+ )),
+ f.read())
+ retval = subprocess.check_output(("git", "status", "--porcelain",
+ "series.conf",))
+ self.assertEqual(retval.decode().strip(), "M series.conf")
+
+
+class TestQCP(unittest.TestCase):
+ def setUp(self):
+ os.environ["XDG_CACHE_HOME"] = tempfile.mkdtemp(prefix="gs_cache")
+
+ # setup stub linux repository
+ os.environ["LINUX_GIT"] = tempfile.mkdtemp(prefix="gs_repo")
+ self.repo = pygit2.init_repository(os.environ["LINUX_GIT"])
+ self.repo.config["user.email"] = "author1@example.com"
+ self.repo.config["user.name"] = "Author One"
+
+ author = pygit2.Signature("Author One", "author1@example.com")
+ committer = pygit2.Signature("Maintainer One", "maintainer1@example.com")
+ tree = self.repo.TreeBuilder()
+
+ tree.insert("driver.c",
+ self.repo.create_blob("#include <bad.h>\n"),
+ pygit2.GIT_FILEMODE_BLOB)
+ self.commits = []
+ self.commits.append(self.repo.create_commit(
+ "refs/heads/mainline",
+ author,
+ committer,
+ """Add a very small module
+
+... which was not tested.
+
+Signed-off-by: Author One <author1@example.com>
+Signed-off-by: Maintainer One <maintainer@example.com>
+""",
+ tree.write(),
+ []
+ ))
+
+ tree.insert("driver.c",
+ self.repo.create_blob("#include <linux/module.h>\n"),
+ pygit2.GIT_FILEMODE_BLOB)
+ self.commits.append(self.repo.create_commit(
+ "refs/heads/mainline",
+ author,
+ committer,
+ """Fix the very small module
+
+syzbot is reporting deadlocks at __blkdev_get() [1].
+
+----------------------------------------
+[ 92.493919] systemd-udevd D12696 525 1 0x00000000
+[ 92.495891] Call Trace:
+[ 92.501560] schedule+0x23/0x80
+[ 92.502923] schedule_preempt_disabled+0x5/0x10
+[ 92.504645] __mutex_lock+0x416/0x9e0
+[ 92.510760] __blkdev_get+0x73/0x4f0
+[ 92.512220] blkdev_get+0x12e/0x390
+[ 92.518151] do_dentry_open+0x1c3/0x2f0
+[ 92.519815] path_openat+0x5d9/0xdc0
+[ 92.521437] do_filp_open+0x7d/0xf0
+[ 92.527365] do_sys_open+0x1b8/0x250
+[ 92.528831] do_syscall_64+0x6e/0x270
+[ 92.530341] entry_SYSCALL_64_after_hwframe+0x42/0xb7
+
+[ 92.931922] 1 lock held by systemd-udevd/525:
+[ 92.933642] #0: 00000000a2849e25 (&bdev->bd_mutex){+.+.}, at: __blkdev_get+0x73/0x4f0
+----------------------------------------
+
+The reason of deadlock turned out that wait_event_interruptible() in
+
+Reported-by: Markus Trippelsdorf <markus@trippelsdorf.de>
+Fixes: %s ("Add a very small module")
+Signed-off-by: Author One <author1@example.com>
+Signed-off-by: Maintainer One <maintainer@example.com>
+""" % (str(self.commits[-1],)),
+ tree.write(),
+ [self.commits[-1]]
+ ))
+
+ self.repo.create_tag("v4.10-rc6", self.commits[-1], pygit2.GIT_REF_OID,
+ committer, "Linux 4.10-rc6")
+
+ self.repo.remotes.create(
+ "origin",
+ "git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git")
+ self.repo.references.create("refs/remotes/origin/master",
+ self.commits[-1])
+
+ # setup stub kernel-source content
+ self.ks_dir = tempfile.mkdtemp(prefix="gs_ks")
+ patch_dir = os.path.join(self.ks_dir, "patches.suse")
+ os.mkdir(patch_dir)
+ os.chdir(patch_dir)
+ with open(os.path.join(self.ks_dir, "series.conf"), mode="w") as f:
+ f.write(
+"""# Kernel patches configuration file
+
+ ########################################################
+ # sorted patches
+ ########################################################
+""")
+ f.write("\tpatches.suse/%s\n" % (
+ tests.support.format_patch(self.repo.get(self.commits[0]),
+ mainline="v4.9",
+ references="bsc#123"),))
+ f.write(
+"""
+ ########################################################
+ # end of sorted patches
+ ########################################################
+""")
+
+ ss_path = os.path.join(lib.libdir(), "series_sort.py")
+ os.chdir(self.ks_dir)
+
+ # This overlaps what is tested by test_series_sort, hence, not put in a
+ # test of its own.
+ subprocess.check_call([ss_path, "-c", "series.conf"])
+ with open("series.conf") as f:
+ content1 = f.read()
+ subprocess.check_call([ss_path, "series.conf"])
+ with open("series.conf") as f:
+ content2 = f.read()
+ self.assertEqual(content2, content1)
+
+ os.makedirs("tmp/current")
+ os.chdir("tmp/current")
+ subprocess.check_call(
+ ["quilt", "setup", "--sourcedir", "../../", "../../series.conf"])
+
+
+ def tearDown(self):
+ shutil.rmtree(os.environ["XDG_CACHE_HOME"])
+ shutil.rmtree(os.environ["LINUX_GIT"])
+ shutil.rmtree(self.ks_dir)
+
+
+ def test_fixup(self):
+ qm_path = os.path.join(lib.libdir(), "quilt-mode.sh")
+
+ # import commits[1]
+ subprocess.check_call(
+ ". %s; qgoto %s" % (qm_path, str(self.commits[1])), shell=True,
+ stdout=subprocess.DEVNULL, executable="/bin/bash")
+ subprocess.check_call(
+ """. %s; qcp -f %s""" % (
+ qm_path, str(self.commits[1])),
+ shell=True, stdout=subprocess.DEVNULL, executable="/bin/bash")
+
+ retval = subprocess.check_output(("quilt", "--quiltrc", "-", "next",))
+ name = "patches.suse/Fix-the-very-small-module.patch"
+ self.assertEqual(retval.decode().strip(), name)
+
+
+if __name__ == '__main__':
+ # Run a single testcase
+ suite = unittest.TestLoader().loadTestsFromTestCase(TestQCP)
+ unittest.TextTestRunner(verbosity=2).run(suite)
diff --git a/scripts/git_sort/tests/test_series_insert.py b/scripts/git_sort/tests/test_series_insert.py
new file mode 100755
index 0000000000..c93489a0c9
--- /dev/null
+++ b/scripts/git_sort/tests/test_series_insert.py
@@ -0,0 +1,118 @@
+#!/usr/bin/python3
+# -*- coding: utf-8 -*-
+
+import os
+import pygit2
+import shutil
+import subprocess
+import tempfile
+import unittest
+import sys
+
+import git_sort
+import lib
+import tests.support
+
+
+class TestSeriesInsert(unittest.TestCase):
+ def setUp(self):
+ os.environ["XDG_CACHE_HOME"] = tempfile.mkdtemp(prefix="gs_cache")
+
+ # setup stub linux repository
+ os.environ["LINUX_GIT"] = tempfile.mkdtemp(prefix="gs_repo")
+ self.repo = pygit2.init_repository(os.environ["LINUX_GIT"])
+
+ author = pygit2.Signature('Alice Author', 'alice@authors.tld')
+ committer = pygit2.Signature('Cecil Committer', 'cecil@committers.tld')
+ tree = self.repo.TreeBuilder().write()
+
+ parent = []
+ commits = []
+ for i in range(3):
+ subject = "mainline %d" % (i,)
+ cid = self.repo.create_commit(
+ "refs/heads/master",
+ author,
+ committer,
+ "%s\n\nlog" % (subject,),
+ tree,
+ parent
+ )
+ parent = [cid]
+ commits.append(cid)
+ self.commits = commits
+
+ self.repo.remotes.create(
+ "origin",
+ "git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git")
+ self.repo.references.create("refs/remotes/origin/master", commits[-1])
+
+ self.index = git_sort.SortIndex(self.repo)
+
+ # setup stub kernel-source content
+ self.ks_dir = tempfile.mkdtemp(prefix="gs_ks")
+ patch_dir = os.path.join(self.ks_dir, "patches.suse")
+ os.mkdir(patch_dir)
+ os.chdir(patch_dir)
+ for commit in commits:
+ tests.support.format_patch(self.repo.get(commit),
+ mainline="v3.45-rc6")
+
+ def tearDown(self):
+ shutil.rmtree(os.environ["XDG_CACHE_HOME"])
+ shutil.rmtree(os.environ["LINUX_GIT"])
+ shutil.rmtree(self.ks_dir)
+
+
+ def test_simple(self):
+ si_path = os.path.join(lib.libdir(), "series_insert.py")
+ os.chdir(self.ks_dir)
+
+ series = "series.conf"
+ series1 = tests.support.format_series((
+ (None,
+ ("patches.suse/mainline-%d.patch" % (i,) for i in (0, 2,))),
+ ))
+ with open(series, mode="w") as f:
+ f.write(series1)
+
+ subprocess.check_call([si_path, "patches.suse/mainline-1.patch"])
+ with open(series) as f:
+ content = f.read()
+ self.assertEqual(content,
+ tests.support.format_series((
+ (None,
+ ("patches.suse/mainline-%d.patch" % (i,) for i in range(3))),
+ )))
+
+ content = []
+ with open("patches.suse/mainline-1.patch") as f:
+ for line in f:
+ if line.startswith("Git-commit: "):
+ line = "Git-commit: invalid\n"
+ content.append(line)
+ with open("patches.suse/mainline-1.patch", mode="w+") as f:
+ f.writelines(content)
+
+ with open(series, mode="w") as f:
+ f.write(series1)
+
+ try:
+ subprocess.check_output([si_path, "patches.suse/mainline-1.patch"],
+ stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as err:
+ self.assertEqual(err.returncode, 1)
+ self.assertEqual(
+ err.output.decode(),
+ "Error: Git-commit tag \"invalid\" in patch "
+ "\"patches.suse/mainline-1.patch\" is not a valid revision.\n")
+ else:
+ self.assertTrue(False)
+
+ os.unlink(series)
+
+
+if __name__ == '__main__':
+ # Run a single testcase
+ suite = unittest.TestLoader().loadTestsFromTestCase(TestSeriesInsert)
+ unittest.TextTestRunner(verbosity=2).run(suite)
diff --git a/scripts/git_sort/tests/test_series_sort.py b/scripts/git_sort/tests/test_series_sort.py
new file mode 100755
index 0000000000..66317c2025
--- /dev/null
+++ b/scripts/git_sort/tests/test_series_sort.py
@@ -0,0 +1,931 @@
+#!/usr/bin/python3
+# -*- coding: utf-8 -*-
+
+import os
+import os.path
+import pygit2
+import shutil
+import subprocess
+import sys
+import tempfile
+import unittest
+
+import git_sort
+import lib
+import tests.support
+
+
+class TestSeriesSort(unittest.TestCase):
+ def setUp(self):
+ self.ss_path = os.path.join(lib.libdir(), "series_sort.py")
+
+ os.environ["XDG_CACHE_HOME"] = tempfile.mkdtemp(prefix="gs_cache")
+
+ # setup stub linux repository
+ os.environ["LINUX_GIT"] = tempfile.mkdtemp(prefix="gs_repo")
+ self.repo = pygit2.init_repository(os.environ["LINUX_GIT"])
+
+ author = pygit2.Signature('Alice Author', 'alice@authors.tld')
+ committer = pygit2.Signature('Cecil Committer', 'cecil@committers.tld')
+ tree = self.repo.TreeBuilder().write()
+
+ m0 = self.repo.create_commit(
+ "refs/heads/mainline",
+ author,
+ committer,
+ "mainline 0\n\nlog",
+ tree,
+ []
+ )
+
+ n0 = self.repo.create_commit(
+ "refs/heads/net",
+ author,
+ committer,
+ "net 0\n\nlog",
+ tree,
+ [m0]
+ )
+
+ self.repo.checkout("refs/heads/mainline")
+ m1 = self.repo.create_commit(
+ "refs/heads/mainline",
+ author,
+ committer,
+ "mainline 1, merge net\n\nlog",
+ tree,
+ [m0, n0]
+ )
+
+ m2 = self.repo.create_commit(
+ "refs/heads/mainline",
+ author,
+ committer,
+ "mainline 2\n\nlog",
+ tree,
+ [m1]
+ )
+
+ n1 = self.repo.create_commit(
+ "refs/heads/net",
+ author,
+ committer,
+ "net 1\n\nlog",
+ tree,
+ [n0]
+ )
+
+ n2 = self.repo.create_commit(
+ "refs/heads/net",
+ author,
+ committer,
+ "net 2\n\nlog",
+ tree,
+ [n1]
+ )
+
+ oot0 = self.repo.create_commit(
+ "refs/heads/oot",
+ author,
+ committer,
+ "oot 0\n\nlog",
+ tree,
+ [m0]
+ )
+
+ oot1 = self.repo.create_commit(
+ "refs/heads/oot",
+ author,
+ committer,
+ "oot 1\n\nlog",
+ tree,
+ [oot0]
+ )
+
+ k_org_canon_prefix = "git://git.kernel.org/pub/scm/linux/kernel/git/"
+ origin_repo = k_org_canon_prefix + "torvalds/linux.git"
+ self.repo.remotes.create("origin", origin_repo)
+ self.repo.references.create("refs/remotes/origin/master", m2)
+
+ net_repo = k_org_canon_prefix + "davem/net.git"
+ self.repo.remotes.create("net", net_repo)
+ self.repo.references.create("refs/remotes/net/master", n2)
+
+ self.index = git_sort.SortIndex(self.repo)
+
+ # setup stub kernel-source content
+ self.ks_dir = tempfile.mkdtemp(prefix="gs_ks")
+ patch_dir = os.path.join(self.ks_dir, "patches.suse")
+ os.mkdir(patch_dir)
+ os.chdir(patch_dir)
+ tests.support.format_patch(self.repo.get(m0), mainline="v3.45-rc6")
+ tests.support.format_patch(self.repo.get(n0), mainline="v3.45-rc6")
+ tests.support.format_patch(self.repo.get(n1), repo=net_repo)
+ tests.support.format_patch(self.repo.get(n2), repo=net_repo)
+ tests.support.format_patch(self.repo.get(oot0))
+ tests.support.format_patch(self.repo.get(oot1))
+ os.chdir(self.ks_dir)
+
+
+ def tearDown(self):
+ shutil.rmtree(os.environ["XDG_CACHE_HOME"])
+ shutil.rmtree(os.environ["LINUX_GIT"])
+ shutil.rmtree(self.ks_dir)
+
+
+ def test_nofile(self):
+ try:
+ subprocess.check_output([self.ss_path, "aaa"],
+ stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as err:
+ self.assertEqual(err.returncode, 1)
+ self.assertEqual(
+ err.output.decode(),
+ "Error: [Errno 2] No such file or directory: 'aaa'\n")
+ else:
+ self.assertTrue(False)
+
+
+ def test_absent(self):
+ (tmp, series,) = tempfile.mkstemp(dir=self.ks_dir)
+ with open(series, mode="w") as f:
+ f.write(
+"""
+ patches.suse/unsorted-before.patch
+""")
+
+ try:
+ subprocess.check_output([self.ss_path, series],
+ stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as err:
+ self.assertEqual(err.output.decode(), "Error: Sorted subseries not found.\n")
+ else:
+ self.assertTrue(False)
+
+ os.unlink(series)
+
+
+ def test_sort_small(self):
+ (tmp, series,) = tempfile.mkstemp(dir=self.ks_dir)
+ with open(series, mode="w") as f:
+ f.write(tests.support.format_series((
+ (None, (
+ "patches.suse/mainline-0.patch",
+ "patches.suse/net-0.patch",
+ )),
+ )))
+
+ subprocess.check_call([self.ss_path, "-c", series])
+ with open(series) as f:
+ content1 = f.read()
+ subprocess.check_call([self.ss_path, series])
+ with open(series) as f:
+ content2 = f.read()
+ self.assertEqual(content2, content1)
+
+ os.unlink(series)
+
+
+ def test_sort(self):
+ (tmp, series,) = tempfile.mkstemp(dir=self.ks_dir)
+ with open(series, mode="w") as f:
+ f.write(
+"""
+ patches.suse/unsorted-before.patch
+
+ ########################################################
+ # sorted patches
+ ########################################################
+ patches.suse/mainline-0.patch
+ patches.suse/net-0.patch
+
+ # davem/net
+ patches.suse/net-1.patch
+ patches.suse/net-2.patch
+
+ # out-of-tree patches
+ patches.suse/oot-0.patch
+ patches.suse/oot-1.patch
+
+ ########################################################
+ # end of sorted patches
+ ########################################################
+
+ patches.suse/unsorted-after.patch
+""")
+
+ subprocess.check_call([self.ss_path, "-c", series])
+ with open(series) as f:
+ content1 = f.read()
+ subprocess.check_call([self.ss_path, series])
+ with open(series) as f:
+ content2 = f.read()
+ self.assertEqual(content2, content1)
+
+ os.unlink(series)
+
+
+class TestFromPatch(unittest.TestCase):
+ """
+ The naming of these tests stems from the following factors which determine
+ how a patch is sorted:
+ * commit found in index
+ * patch's series.conf current_head is indexed (ie. the local repo
+ fetches from that remote)
+ * patch appears to have moved downstream/didn't move/upstream
+ * patch's tag is good ("Git-repo:" == current_head.url)
+ * patches may be moved upstream between subsystem sections
+ """
+
+ def setUp(self):
+ self.maxDiff = None
+ self.ss_path = os.path.join(lib.libdir(), "series_sort.py")
+
+ os.environ["XDG_CACHE_HOME"] = tempfile.mkdtemp(prefix="gs_cache")
+
+ # setup stub linux repository
+ os.environ["LINUX_GIT"] = tempfile.mkdtemp(prefix="gs_repo")
+ self.repo = pygit2.init_repository(os.environ["LINUX_GIT"])
+
+ author = pygit2.Signature("Alice Author", "alice@authors.tld", time=0,
+ offset=0)
+ committer = pygit2.Signature("Cecil Committer", "cecil@committers.tld",
+ time=0, offset=0)
+ tree = self.repo.TreeBuilder()
+
+ k_org_canon_prefix = "git://git.kernel.org/pub/scm/linux/kernel/git/"
+ self.mainline_repo = k_org_canon_prefix + "torvalds/linux.git"
+ self.repo.remotes.create("origin", self.mainline_repo)
+ self.net_next_repo = k_org_canon_prefix + "davem/net-next.git"
+ self.repo.remotes.create("net-next", self.net_next_repo)
+ self.net_repo = k_org_canon_prefix + "davem/net.git"
+ self.rdma_repo = k_org_canon_prefix + "rdma/rdma.git"
+ self.repo.remotes.create("rdma", self.rdma_repo)
+ self.dledford_repo = k_org_canon_prefix + "dledford/rdma.git"
+ self.repo.remotes.create("dledford/rdma", self.dledford_repo)
+ self.nf_repo = k_org_canon_prefix + "pablo/nf.git"
+ self.repo.remotes.create("pablo/nf", self.nf_repo)
+
+ self.commits = {}
+ self.commits["mainline 0"] = self.repo.create_commit(
+ "refs/heads/mainline",
+ author,
+ committer,
+ "mainline 0\n\nlog",
+ tree.write(),
+ []
+ )
+
+ self.commits["net-next 0"] = self.repo.create_commit(
+ "refs/heads/net-next",
+ author,
+ committer,
+ "net-next 0\n\nlog",
+ tree.write(),
+ [self.commits["mainline 0"]]
+ )
+ self.repo.references.create("refs/remotes/net-next/master",
+ self.commits["net-next 0"])
+
+ self.commits["other 0"] = self.repo.create_commit(
+ "refs/heads/other",
+ author,
+ committer,
+ "other 0\n\nlog",
+ tree.write(),
+ [self.commits["mainline 0"]]
+ )
+
+ self.commits["rdma for-next 0"] = self.repo.create_commit(
+ "refs/heads/rdma-next",
+ author,
+ committer,
+ "rdma for-next 0\n\nlog",
+ tree.write(),
+ [self.commits["mainline 0"]]
+ )
+
+ self.commits["mainline 1"] = self.repo.create_commit(
+ "refs/heads/mainline",
+ author,
+ committer,
+ "mainline 1, merge rdma\n\nlog",
+ tree.write(),
+ [self.commits["mainline 0"], self.commits["rdma for-next 0"]]
+ )
+
+ self.commits["dledford/rdma k.o/for-next 0"] = self.repo.create_commit(
+ "refs/heads/dledford-next",
+ author,
+ committer,
+ "dledford/rdma k.o/for-next 0\n\nlog",
+ tree.write(),
+ [self.commits["rdma for-next 0"]]
+ )
+ self.repo.references.create(
+ "refs/remotes/dledford/rdma/k.o/for-next",
+ self.commits["dledford/rdma k.o/for-next 0"])
+ self.repo.references.create("refs/remotes/rdma/for-next",
+ self.commits["dledford/rdma k.o/for-next 0"])
+ self.repo.references.create("refs/remotes/rdma/for-rc",
+ self.commits["dledford/rdma k.o/for-next 0"])
+
+ self.commits["net 0"] = self.repo.create_commit(
+ "refs/heads/net",
+ author,
+ committer,
+ "net 0\n\nlog",
+ tree.write(),
+ [self.commits["mainline 0"]]
+ )
+
+ self.commits["nf 0"] = self.repo.create_commit(
+ "refs/heads/nf",
+ author,
+ committer,
+ "nf 0\n\nlog",
+ tree.write(),
+ [self.commits["mainline 0"]]
+ )
+ self.repo.references.create("refs/remotes/pablo/nf/master",
+ self.commits["nf 0"])
+
+ self.commits["mainline 2"] = self.repo.create_commit(
+ "refs/heads/mainline",
+ author,
+ committer,
+ "mainline 2, merge net\n\nlog",
+ tree.write(),
+ [self.commits["mainline 1"], self.commits["net 0"]]
+ )
+
+ self.commits["net 1"] = self.repo.create_commit(
+ "refs/heads/net",
+ author,
+ committer,
+ "net 1\n\nlog",
+ tree.write(),
+ [self.commits["net 0"]]
+ )
+
+ tree.insert("README",
+ self.repo.create_blob("NAME = v4.1 release\n"),
+ pygit2.GIT_FILEMODE_BLOB)
+ self.commits["v4.1"] = self.repo.create_commit(
+ "refs/heads/mainline",
+ author,
+ committer,
+ "Linux 4.1",
+ tree.write(),
+ [self.commits["mainline 2"]]
+ )
+ self.repo.references.create("refs/remotes/origin/master",
+ self.commits["v4.1"])
+ self.repo.create_tag("v4.1", self.commits["v4.1"], pygit2.GIT_REF_OID,
+ committer, "Linux 4.1")
+
+ self.repo.checkout("refs/heads/mainline")
+
+ # setup stub kernel-source content
+ self.ks_dir = tempfile.mkdtemp(prefix="gs_ks")
+ os.chdir(self.ks_dir)
+ self.patch_dir = "patches.suse"
+ os.mkdir(self.patch_dir)
+
+
+ def tearDown(self):
+ shutil.rmtree(os.environ["XDG_CACHE_HOME"])
+ shutil.rmtree(os.environ["LINUX_GIT"])
+ shutil.rmtree(self.ks_dir)
+
+
+ def check_tag(self, patch, tag, value):
+ with open(patch) as f:
+ for line in f:
+ if line.startswith(tag):
+ self.assertEqual(line[len(tag):-1], value)
+
+
+ def _transform_arg(move_upstream):
+ if move_upstream is None:
+ return [[], ["-u"]]
+ elif move_upstream:
+ return [["-u"]]
+ else:
+ return [[]]
+
+
+ def check_failure(self, msg, move_upstream=None):
+ for extra_arg in self.__class__._transform_arg(move_upstream):
+ try:
+ subprocess.check_output(
+ [self.ss_path] + extra_arg + ["-c", "series.conf"],
+ stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as err:
+ self.assertEqual(err.returncode, 1)
+ self.assertTrue(err.output.decode(), msg)
+ else:
+ self.assertTrue(False)
+
+ try:
+ subprocess.check_output(
+ [self.ss_path] + extra_arg + ["series.conf"],
+ stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as err:
+ self.assertEqual(err.returncode, 1)
+ self.assertEqual(err.output.decode(), msg)
+ else:
+ self.assertTrue(False)
+
+
+ def check_constant(self, name, move_upstream=None):
+ for extra_arg in self.__class__._transform_arg(move_upstream):
+ subprocess.check_call(
+ [self.ss_path] + extra_arg + ["-c", "series.conf"])
+
+ with open("series.conf") as f:
+ series1 = f.read()
+ with open(name) as f:
+ patch1 = f.read()
+ subprocess.check_call([self.ss_path] + extra_arg + ["series.conf"])
+ with open("series.conf") as f:
+ series2 = f.read()
+ with open(name) as f:
+ patch2 = f.read()
+ self.assertEqual(series2, series1)
+ self.assertEqual(patch2, patch1)
+
+
+ def check_outdated(self, name, msg, series2, move_upstream=None):
+ (tmp, series,) = tempfile.mkstemp(dir=self.ks_dir)
+ (tmp, patch,) = tempfile.mkstemp(dir=self.ks_dir)
+ shutil.copy(name, patch)
+
+ for extra_arg in self.__class__._transform_arg(move_upstream):
+ shutil.copy(patch, name)
+ try:
+ subprocess.check_output(
+ [self.ss_path] + extra_arg + ["-c", "series.conf"],
+ stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as err:
+ self.assertEqual(err.returncode, 2)
+ self.assertEqual(err.output.decode(), msg)
+ else:
+ self.assertTrue(False)
+
+ shutil.copy("series.conf", series)
+ subprocess.check_call([self.ss_path] + extra_arg + [series])
+ with open(series) as f:
+ content2 = f.read()
+ self.assertEqual(content2, series2)
+
+ os.unlink(series)
+ os.unlink(patch)
+
+
+ def test_found_indexed_downstream_good(self):
+ """
+ patch sorted in mainline, commit found in net-next
+ error, possible causes:
+ mainline repo is outdated
+ because it was not found in mainline index/appears to have
+ moved downstream
+ order in remotes is wrong
+ because it appears to have moved downstream
+ section and Git-repo are wrong
+ because it appears to have moved downstream and tag is good
+ """
+
+ name = tests.support.format_patch(
+ self.repo.get(self.commits["net-next 0"]), mainline="v0",
+ directory=self.patch_dir)
+
+ with open("series.conf", mode="w") as f:
+ f.write(tests.support.format_series((
+ (None, (
+ name,
+ )),
+ )))
+
+ self.check_failure(
+"Error: There is a problem with patch \"%s\". The patch is in the wrong section of series.conf or the remote fetching from \"torvalds/linux\" needs to be fetched or the relative order of \"davem/net-next\" and \"torvalds/linux\" in \"remotes\" is incorrect. Manual intervention is required.\n" % (name,))
+
+
+ def test_found_indexed_downstream_bad(self):
+ """
+ patch sorted in mainline, commit found in net-next
+ error, possible causes:
+ mainline repo is outdated
+ because it was not found in mainline index/appears to have
+ moved downstream
+ section is wrong or Git-repo is wrong
+ because it appears to have moved downstream and the two
+ differ
+ """
+ name = tests.support.format_patch(
+ self.repo.get(self.commits["net-next 0"]), repo=self.net_next_repo,
+ directory=self.patch_dir)
+
+ with open("series.conf", mode="w") as f:
+ f.write(tests.support.format_series((
+ (None, (
+ name,
+ )),
+ )))
+
+ self.check_failure(
+"Error: There is a problem with patch \"%s\". The patch is in the wrong section of series.conf or the remote fetching from \"torvalds/linux\" needs to be fetched. Manual intervention is required.\n" % (name,))
+
+
+ def test_found_indexed_nomove_good(self):
+ """
+ patch sorted in net-next
+ stays there
+ """
+ name = tests.support.format_patch(
+ self.repo.get(self.commits["net-next 0"]), repo=self.net_next_repo,
+ directory=self.patch_dir)
+
+ with open("series.conf", mode="w") as f:
+ f.write(tests.support.format_series((
+ ("davem/net-next", (
+ name,
+ )),
+ )))
+
+ self.check_constant(name)
+
+
+ def test_found_indexed_nomove_bad(self):
+ """
+ patch sorted in net-next, tagged with mainline
+ stays there
+ update tag
+ """
+ name = tests.support.format_patch(
+ self.repo.get(self.commits["net-next 0"]), repo=self.net_repo,
+ directory=self.patch_dir)
+
+ series1 = tests.support.format_series((
+ ("davem/net-next", (
+ name,
+ )),
+ ))
+ with open("series.conf", mode="w") as f:
+ f.write(series1)
+
+ self.check_outdated(name, "Git-repo tags are outdated.\n", series1)
+ self.check_tag(name, "Git-repo: ", self.net_next_repo)
+
+
+ def prepare_found_indexed_upstream_good(self):
+ name = tests.support.format_patch(
+ self.repo.get(self.commits["rdma for-next 0"]), repo=self.rdma_repo,
+ directory=self.patch_dir)
+
+ with open("series.conf", mode="w") as f:
+ f.write(tests.support.format_series((
+ ("rdma/rdma for-next", (
+ name,
+ )),
+ )))
+
+ series2 = tests.support.format_series((
+ (None, (
+ name,
+ )),
+ ))
+
+ return name, series2
+
+
+ def test_found_indexed_upstream_good_moveupstream(self):
+ """
+ patch sorted in rdma for-next, commit found in mainline
+ moves to mainline
+ tag is updated
+ """
+ name, series2 = self.prepare_found_indexed_upstream_good()
+
+ self.check_outdated(name,
+ "Input is not sorted.\nGit-repo tags are outdated.\n", series2,
+ True)
+ self.check_tag(name, "Git-repo: ", self.mainline_repo)
+
+
+ def test_found_indexed_upstream_good_nomoveupstream(self):
+ """
+ patch sorted in rdma for-next, commit found in mainline
+ stays there
+ """
+ name, series2 = self.prepare_found_indexed_upstream_good()
+
+ self.check_constant(name, False)
+
+
+ def prepare_found_indexed_upstream_bad2(self):
+ alt_repo = self.rdma_repo.replace("git://", "https://")
+
+ name = tests.support.format_patch(
+ self.repo.get(self.commits["dledford/rdma k.o/for-next 0"]),
+ repo=alt_repo, directory=self.patch_dir)
+
+ with open("series.conf", mode="w") as f:
+ f.write(tests.support.format_series((
+ ("dledford/rdma k.o/for-next", (
+ name,
+ )),
+ )))
+
+ series2 = tests.support.format_series((
+ ("rdma/rdma for-rc", (
+ name,
+ )),
+ ))
+
+ return name, series2, alt_repo
+
+
+ def test_found_indexed_upstream_bad2_moveupstream(self):
+ """
+ patch sorted in dledford/rdma k.o/for-next, tagged with rdma/rdma,
+ commit found in rdma/rdma for-rc
+ moves to rdma/rdma for-rc
+ tag is NOT updated
+
+ This is a special case. See the log of commit 0ac6457e94e8
+ ("scripts/git_sort/lib.py: Rewrite Git-repo only if it differs.")
+ """
+ name, series2, alt_repo = self.prepare_found_indexed_upstream_bad2()
+
+ self.check_outdated(name, "Input is not sorted.\n", series2, True)
+ self.check_tag(name, "Git-repo: ", alt_repo)
+
+
+ def test_found_indexed_upstream_bad2_nomoveupstream(self):
+ """
+ patch sorted in dledford/rdma k.o/for-next, tagged with rdma/rdma,
+ commit found in rdma/rdma for-rc
+ error, possible causes:
+ section is wrong or Git-repo is wrong
+ because they differ and there is no way to know which head
+ the user intended.
+ """
+ name, series2, alt_repo = self.prepare_found_indexed_upstream_bad2()
+
+ self.check_failure(
+"Error: There is a problem with patch \"%s\". The Git-repo tag is incorrect or the patch is in the wrong section of series.conf. Manual intervention is required.\n" % (name,), False)
+
+
+ def test_found_notindexed_downstream_good(self):
+ """
+ patch sorted in net (not fetched), commit found in net-next
+ stays there
+ """
+ name = tests.support.format_patch(
+ self.repo.get(self.commits["net-next 0"]), repo=self.net_repo,
+ directory=self.patch_dir)
+
+ with open("series.conf", mode="w") as f:
+ f.write(tests.support.format_series((
+ ("davem/net", (
+ name,
+ )),
+ )))
+
+ self.check_constant(name)
+
+
+ def test_found_notindexed_downstream_bad(self):
+ """
+ patch sorted in net (not fetched), commit found in net-next,
+ git-repo tag is bad
+ error, possible causes:
+ section is wrong or Git-repo is wrong
+ because they differ and there is no (usual) scenario where
+ commits move downstream
+ """
+ name = tests.support.format_patch(
+ self.repo.get(self.commits["net-next 0"]), repo=self.rdma_repo,
+ directory=self.patch_dir)
+
+ with open("series.conf", mode="w") as f:
+ f.write(tests.support.format_series((
+ ("davem/net", (
+ name,
+ )),
+ )))
+
+ self.check_failure(
+"Error: There is a problem with patch \"%s\". The Git-repo tag is incorrect or the patch is in the wrong section of series.conf. Manual intervention is required.\n" % (name,))
+
+
+ # test_found_notindexed_nomove_NA()
+ # cannot be tested (without stubbing some code to return invalid data)
+
+
+ def prepare_found_notindexed_upstream_good(self):
+ name = tests.support.format_patch(
+ self.repo.get(self.commits["net 0"]), repo=self.net_repo,
+ directory=self.patch_dir)
+
+ with open("series.conf", mode="w") as f:
+ f.write(tests.support.format_series((
+ ("davem/net", (
+ name,
+ )),
+ )))
+
+ series2 = tests.support.format_series((
+ (None, (
+ name,
+ )),
+ ))
+
+ return name, series2
+
+
+ @staticmethod
+ def filter_out_tags(name):
+ with open(name) as f:
+ result = [line
+ for line in f
+ if not line.startswith(("Git-repo", "Patch-mainline",))]
+
+ return result
+
+
+ def test_found_notindexed_upstream_good_moveupstream(self):
+ """
+ patch sorted in net (not fetched), commit found in mainline
+ moves to mainline
+ tag is updated
+ """
+ name, series2 = self.prepare_found_notindexed_upstream_good()
+ before = self.filter_out_tags(name)
+
+ self.check_outdated(name,
+ "Input is not sorted.\nGit-repo tags are outdated.\n", series2,
+ True)
+ self.check_tag(name, "Git-repo: ", self.mainline_repo)
+
+ # check that only the expected tags changed
+ after = self.filter_out_tags(name)
+ self.assertEqual(before, after)
+
+
+ def test_found_notindexed_upstream_good_nomoveupstream(self):
+ """
+ patch sorted in net (not fetched), commit found in mainline
+ stays there
+ """
+ name, series2 = self.prepare_found_notindexed_upstream_good()
+
+ self.check_constant(name, False)
+
+ def prepare_found_notindexed_upstream_bad2(self):
+ alt_repo = self.nf_repo.replace("git://", "https://")
+
+ name = tests.support.format_patch(
+ self.repo.get(self.commits["nf 0"]), repo=alt_repo,
+ directory=self.patch_dir)
+
+ with open("series.conf", mode="w") as f:
+ f.write(tests.support.format_series((
+ ("pablo/nf-next", (
+ name,
+ )),
+ )))
+
+ series2 = tests.support.format_series((
+ ("pablo/nf", (
+ name,
+ )),
+ ))
+
+ return name, series2, alt_repo
+
+
+ def test_found_notindexed_upstream_bad2_moveupstream(self):
+ """
+ patch sorted in pablo nf-next (not fetched), commit found in pablo nf,
+ git-repo tag is bad
+ moves to pablo nf
+ tag is NOT updated
+
+ This is a special case. See the log of commit 0ac6457e94e8
+ ("scripts/git_sort/lib.py: Rewrite Git-repo only if it differs.")
+ """
+ name, series2, alt_repo = self.prepare_found_notindexed_upstream_bad2()
+
+ self.check_outdated(name, "Input is not sorted.\n", series2, True)
+ self.check_tag(name, "Git-repo: ", alt_repo)
+
+
+ def test_found_notindexed_upstream_bad2_nomoveupstream(self):
+ """
+ patch sorted in pablo nf-next (not fetched), commit found in pablo nf,
+ git-repo tag is bad
+ error, possible causes:
+ section is wrong or Git-repo is wrong
+ because they differ and there is no way to know which head
+ the user intended.
+ """
+ name, series2, alt_repo = self.prepare_found_notindexed_upstream_bad2()
+
+ self.check_failure(
+"Error: There is a problem with patch \"%s\". The Git-repo tag is incorrect or the patch is in the wrong section of series.conf. Manual intervention is required.\n" % (name,), False)
+
+
+ def test_notfound_indexed_NA_good(self):
+ """
+ patch sorted in net-next
+ error, possible causes:
+ net-next repo is outdated
+ Git-commit is wrong
+ section and Git-repo are wrong
+ """
+ commit = self.repo.get(self.commits["other 0"])
+ name = tests.support.format_patch(commit, repo=self.net_next_repo,
+ directory=self.patch_dir)
+
+ with open("series.conf", mode="w") as f:
+ f.write(tests.support.format_series((
+ ("davem/net-next", (
+ name,
+ )),
+ )))
+
+ self.check_failure(
+"Error: There is a problem with patch \"%s\". Commit \"%s\" not found in git-sort index. The remote fetching from \"davem/net-next\" needs to be fetched or the Git-commit tag is incorrect or the patch is in the wrong section of series.conf. Manual intervention is required.\n" % (name, str(commit.id),))
+
+
+ def test_notfound_indexed_NA_bad(self):
+ """
+ patch sorted in net-next, git-repo tag is bad
+ error, possible causes:
+ section or Git-repo are wrong
+ """
+ name = tests.support.format_patch(
+ self.repo.get(self.commits["other 0"]), repo=self.rdma_repo,
+ directory=self.patch_dir)
+
+ with open("series.conf", mode="w") as f:
+ f.write(tests.support.format_series((
+ ("davem/net-next", (
+ name,
+ )),
+ )))
+
+ self.check_failure(
+"Error: There is a problem with patch \"%s\". The Git-repo tag is incorrect or the patch is in the wrong section of series.conf. Manual intervention is required.\n" % (name,))
+
+
+ def test_notfound_notindexed_NA_good(self):
+ """
+ patch sorted in net
+ stays there
+ """
+ name = tests.support.format_patch(
+ self.repo.get(self.commits["net 1"]), repo=self.net_repo,
+ directory=self.patch_dir)
+
+ with open("series.conf", mode="w") as f:
+ f.write(tests.support.format_series((
+ ("davem/net", (
+ name,
+ )),
+ )))
+
+ self.check_constant(name)
+
+
+ def test_notfound_notindexed_NA_bad(self):
+ """
+ patch sorted in net, bad git-repo tag
+ error, possible causes:
+ Git-repo is wrong
+ series.conf section is wrong and (git-commit is wrong or the
+ remote is outdated or not available locally
+ Git-repo is not indexed because it's missing in git_sort.py's
+ remote list
+ """
+ name = tests.support.format_patch(
+ self.repo.get(self.commits["net 1"]), repo=self.rdma_repo,
+ directory=self.patch_dir)
+
+ with open("series.conf", mode="w") as f:
+ f.write(tests.support.format_series((
+ ("davem/net", (
+ name,
+ )),
+ )))
+
+ self.check_failure(
+"Error: There is a problem with patch \"%s\". The Git-repo tag is incorrect or the patch is in the wrong section of series.conf and (the Git-commit tag is incorrect or the relevant remote is outdated or not available locally) or an entry for this repository is missing from \"remotes\". In the last case, please edit \"remotes\" in \"scripts/git_sort/git_sort.py\" and commit the result. Manual intervention is required.\n" % (name,))
+
+
+if __name__ == '__main__':
+ # Run a single testcase
+ suite = unittest.TestLoader().loadTestsFromTestCase(TestFromPatch)
+ unittest.TextTestRunner(verbosity=2).run(suite)
diff --git a/scripts/git_sort/update_clone.py b/scripts/git_sort/update_clone.py
new file mode 100755
index 0000000000..0223235dfa
--- /dev/null
+++ b/scripts/git_sort/update_clone.py
@@ -0,0 +1,116 @@
+#!/usr/bin/python3
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2018 SUSE LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
+# USA.
+
+import argparse
+import collections
+import pygit2
+import re
+import shlex
+
+import git_sort
+import lib
+
+
+proto_match = re.compile("^(git|https?)://")
+invalid_match = re.compile("~")
+ext = ".git"
+
+
+def transform(name):
+ """
+ Transform a remote url into a string that can be used as a git remote name.
+ Useful for remotes that do not start with the kernel.org canonical prefix.
+ """
+ name = proto_match.sub("", name, 1)
+ name = invalid_match.sub("_", name)
+ if name.endswith(ext):
+ name = name[:-1 * len(ext)]
+
+ return name
+
+
+def sync_remote_list(directory):
+ quoted_directory = shlex.quote(directory)
+ commands = []
+
+ try:
+ repo = pygit2.Repository(directory)
+ except pygit2.GitError:
+ commands.append("git init %s\n" % (quoted_directory,))
+ current_remotes = {}
+ else:
+ current_remotes = {remote.name : git_sort.RepoURL(remote.url)
+ for remote in repo.remotes}
+
+ commands.append("cd %s\n" % (quoted_directory,))
+
+ new_remotes = collections.OrderedDict(
+ ((transform(str(head.repo_url)), head.repo_url,)
+ for head in git_sort.remotes))
+
+ # modify existing remotes whose url has changed
+ commands.extend(["git remote set-url %s %s\n" % (
+ shlex.quote(name), shlex.quote(repr(repo_url)),)
+ for name, repo_url in new_remotes.items()
+ if name in current_remotes and repo_url != current_remotes[name]
+ ])
+
+ # add missing remotes
+ current = set(current_remotes)
+ new = set(new_remotes)
+
+ mainline = str(git_sort.remotes[0].repo_url)
+ def option(name):
+ if name == mainline:
+ return ""
+ else:
+ return " --no-tags"
+
+ commands.extend(["git remote add%s %s %s\n" % (
+ option(name), shlex.quote(name), shlex.quote(repr(new_remotes[name])),)
+ for name in new_remotes
+ if name in new - current
+ ])
+
+ # remove superfluous remotes
+ commands.extend(["git remote remove %s\n" % (
+ shlex.quote(name),)
+ for name in sorted(current - new)
+ ])
+
+ return commands
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ description="Print commands to clone the mainline Linux repository "
+ "and add all remotes configured for git-sort. "
+ "If the target directory is already a git repository, print commands "
+ "to add or remove remotes to match with the remotes from the list "
+ "configured for git-sort. "
+ "That repository can then be used as an ultimate reference for patch "
+ "ordering in series.conf.")
+ parser.add_argument("directory", nargs="?", default="linux",
+ help="Directory name to clone into. Default: linux")
+ args = parser.parse_args()
+
+ print("".join(sync_remote_list(args.directory)), end="")
+
+ print("git fetch --all")
diff --git a/scripts/gitlog2changes b/scripts/gitlog2changes
index 718c2d98f2..7f59a5d2c1 100755
--- a/scripts/gitlog2changes
+++ b/scripts/gitlog2changes
@@ -116,6 +116,12 @@ sub parse_gitlog {
return @res;
}
+my $excludes_file;
+if ($ARGV[0] eq "--excludes") {
+ shift(@ARGV);
+ $excludes_file = shift(@ARGV);
+}
+
my @fixups;
if ($ARGV[0] eq "--fixups") {
shift(@ARGV);
@@ -144,6 +150,17 @@ for my $fix (@fixups) {
}
}
+if ($excludes_file) {
+ open(my $fh, '<', $excludes_file) or die "$excludes_file: $!\n";
+ while (my $id = <$fh>) {
+ next if $id =~ /^#/;
+ chomp($id);
+ # delete the original commit
+ $commits_h{$id}->{commit} = undef;
+ }
+ close($fh);
+}
+
for my $c (sort { $b->{ts} - $a->{ts} } @commits) {
print_commit($c->{commit}, $c->{email}, $c->{ts}, @{$c->{message}});
}
diff --git a/scripts/install-git-hooks b/scripts/install-git-hooks
index d6d32793c3..0c129b09ee 100755
--- a/scripts/install-git-hooks
+++ b/scripts/install-git-hooks
@@ -35,7 +35,8 @@ check_snippet()
install_snippet()
{
- local hook="$GIT_DIR"/hooks/pre-commit
+ local hookdir="$GIT_DIR"/hooks
+ local hook="$hookdir"/pre-commit
local snippet='
# runs kernel-source-patch-check for each added or modified patch
. "$(dirname "$0")"/kernel-source-pre-commit'
@@ -47,6 +48,7 @@ install_snippet()
if check_snippet; then
return
fi
+ mkdir -p "$hookdir"
if ! test -x "$hook"; then
# if the hook was not enabled before, don't run the example code that
# is usually there
@@ -59,7 +61,7 @@ install_snippet()
$inserted = 1;
}' "$hook"
else
- (echo '#!/bin/sh'
+ (echo '#!/bin/bash'
echo
echo "$snippet") >"$hook"
fi
diff --git a/scripts/ld-version.sh b/scripts/ld-version.sh
new file mode 100755
index 0000000000..f2be0ff9a7
--- /dev/null
+++ b/scripts/ld-version.sh
@@ -0,0 +1,11 @@
+#!/usr/bin/awk -f
+# SPDX-License-Identifier: GPL-2.0
+# extract linker version number from stdin and turn into single number
+ {
+ gsub(".*\\)", "");
+ gsub(".*version ", "");
+ gsub("-.*", "");
+ split($1,a, ".");
+ print a[1]*100000000 + a[2]*1000000 + a[3]*10000;
+ exit
+ }
diff --git a/scripts/lib/SUSE/MyBS.pm b/scripts/lib/SUSE/MyBS.pm
index 06c72c3ca9..da42f3d9c7 100644
--- a/scripts/lib/SUSE/MyBS.pm
+++ b/scripts/lib/SUSE/MyBS.pm
@@ -26,7 +26,16 @@ sub new {
}
$self->{url} = URI->new($api_url);
- my $cfgfile = "$ENV{HOME}/.oscrc";
+ my $cfgfile;
+ foreach ("$ENV{HOME}/.oscrc", "$ENV{HOME}/.config/osc/oscrc") {
+ if (-f) {
+ $cfgfile = $_;
+ last;
+ }
+ }
+
+ defined $cfgfile or die "oscrc not found";
+
# replace name: value with name= value that Config::IniFiles can parse
open(my $fh, '<', $cfgfile) or die "$cfgfile: $!\n";
my $data = "";
@@ -326,6 +335,10 @@ sub create_project {
$seen_archs{$arch} = 1;
push(@archs, $arch);
}
+ if (!@archs) {
+ # this repository is not needed
+ next;
+ }
$writer->startTag("repository", @attrs);
$writer->emptyTag("path", repository => $r,
project => $base);
@@ -564,6 +577,14 @@ sub get_logfile {
return $self->get("/build/$project/$repository/$arch/$package/_log?nostream=1");
}
+sub get_make_stderr {
+ my ($self, $project, $package, $repository, $arch) = @_;
+
+ $repository ||= "standard";
+ return $self->get("/build/$project/$repository/$arch/$package/make-stderr.log");
+}
+
+
sub get_kernel_commit {
my ($self, $project, $package, $revision) = @_;
diff --git a/scripts/linux_git.sh b/scripts/linux_git.sh
new file mode 100755
index 0000000000..71698f3745
--- /dev/null
+++ b/scripts/linux_git.sh
@@ -0,0 +1,16 @@
+#!/bin/sh
+
+# Determine the location of the mainline linux git repository to use as a
+# reference by other scripts.
+
+gitdir=${LINUX_GIT:-$HOME/linux-2.6}
+if ! cd "$gitdir" 2>/dev/null; then
+ echo "Error: could not change to LINUX_GIT directory" >&2
+ exit 1
+fi
+unset GIT_DIR
+if ! result=$(git rev-parse --git-dir 2>/dev/null); then
+ echo "No linux git tree found (please set the \"LINUX_GIT\" environment variable)" >&2
+ exit 1
+fi
+readlink -f "$result"
diff --git a/scripts/log2 b/scripts/log2
index c2d537a9ad..8e159d72ea 100755
--- a/scripts/log2
+++ b/scripts/log2
@@ -22,17 +22,7 @@
# git commit wrapper, generates unified commit messages from patch headers
-. ${0%/*}/wd-functions.sh
-
-if ! $using_git; then
- echo "ERROR: not in a git working directory."
- exit 1
-fi
-
-scripts/check-cvs-add || exit 1
-
-trap 'rm -rf "$tmpdir"' EXIT
-tmpdir=$(mktemp -d /tmp/${0##*/}.XXXXXX)
+_libdir=$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")
log_entry() {
local entry=$1
@@ -204,34 +194,113 @@ only_patches()
return 0
}
-# Delete patches in $@ from series.conf.
-filter_series()
+
+trim() {
+ local var="$*"
+
+ # remove leading whitespace characters
+ var="${var#"${var%%[![:space:]]*}"}"
+ # remove trailing whitespace characters
+ var="${var%"${var##*[![:space:]]}"}"
+
+ echo -n "$var"
+}
+
+
+# Insert part of the content added to a new series file back into the old
+# series file
+# splice_series <patch name> 3<old_series 4<new_series
+splice_series()
{
- local re
+ local patch=$1
- if test $# -eq 0; then
- cat series.conf
- return
+ local old new old_eof new_eof
+ local state="matching"
+
+ IFS=
+ while true; do
+ if [ $state = "matching" ]; then
+ read -r -u 3 old || old_eof=1
+ read -r -u 4 new || new_eof=1
+ if [ -z "$old_eof" -a "$new_eof" ]; then
+ echo "Error: new series does not contain all" \
+ "lines from old series." > /dev/stderr
+ return 1
+ fi
+ echo "$new"
+ if [ "$old_eof" -a "$new_eof" ]; then
+ break
+ elif [ "$old_eof" -o "$new" != "$old" ]; then
+ if [ "$(trim "$new")" = "$patch" ]; then
+ state="just after patch"
+ else
+ state="diff"
+ fi
+ fi
+ elif [ $state = "diff" ]; then
+ read -r -u 4 new || new_eof=1
+ if [ "$new_eof" ]; then
+ echo "Error: new series does not contain all" \
+ "lines from old series." > /dev/stderr
+ return 1
+ fi
+ echo "$new"
+ if [ ! "$old_eof" -a "$new" = "$old" ]; then
+ state="matching"
+ elif [ "$(trim "$new")" = "$patch" ]; then
+ state="just after patch"
+ fi
+ elif [ $state = "just after patch" ]; then
+ read -r -u 4 new || new_eof=1
+ if [ "$new_eof" ]; then
+ break
+ elif [ ! "$old_eof" -a "$new" = "$old" ]; then
+ echo "$old"
+ state="after patch"
+ elif [ -z "$(trim "$new")" ]; then
+ echo "$new"
+ state="whitespace seen"
+ fi
+ elif [ $state = "whitespace seen" ]; then
+ read -r -u 4 new || new_eof=1
+ if [ "$new_eof" ]; then
+ break
+ elif [ ! "$old_eof" -a "$new" = "$old" ]; then
+ echo "$old"
+ state="after patch"
+ elif [ -z "$(trim "$new")" ]; then
+ echo "$new"
+ else
+ state="after whitespace"
+ fi
+ elif [ $state = "after whitespace" ]; then
+ read -r -u 4 new || new_eof=1
+ if [ "$new_eof" ]; then
+ break
+ elif [ ! "$old_eof" -a "$new" = "$old" ]; then
+ echo "$old"
+ state="after patch"
+ fi
+ elif [ $state = "after patch" ]; then
+ #echo "@@ state $state old <$old> eof <$old_eof> new <$new> eof <$new_eof>" > /dev/stderr
+ cat <&3
+ break
+ fi
+ done
+
+ if [ $state = "matching" ]; then
+ echo "Error: patch \"$patch\" not found in series." > /dev/stderr
+ return 1
fi
- # escape BRE special characters and colon (used as delimiter)
- set -- "${@//\\/\\\\}"
- set -- "${@//./\\.}"
- set -- "${@//\*/\\*}"
- set -- "${@//\[/\\[}"
- set -- "${@//\]/\\]}"
- set -- "${@//^/\\^}"
- set -- "${@//\$/\\\$}"
- set -- "${@//:/\\:}"
- re=$(printf '\\|%s' "$@")
- re="${re:2}"
- sed '\:^[[:space:]]*\('"$re"'\)[[:space:]]*\(#.*\)\?$: d' series.conf
}
# Commit patches one by one for better bisectability
commit_single_patches()
{
local saved_index=$(git write-tree) patch series
- local file added=() modified_aux=() deleted=() no_edit
+ local file added=() modified_aux=() deleted=()
+ git cat-file blob HEAD:series.conf > "$tmpdir"/old_series
+ cp series.conf "$tmpdir"/new_series
for file in "${modified[@]}"; do
case "$file" in
@@ -259,7 +328,9 @@ commit_single_patches()
patch=$1
shift
# add a series.conf with a single new patch to the index
- series=$(filter_series "$@" | git hash-object -w --stdin)
+ series=$(splice_series "$patch" \
+ 3<"$tmpdir"/old_series 4<"$tmpdir"/new_series | \
+ git hash-object -w --stdin)
git read-tree $(git ls-tree HEAD | \
sed -r "s/(.*)\\<[0-9a-f]{40}\\>(.*\\<series\.conf)$/\1$series\2/" \
| git mktree)
@@ -270,7 +341,7 @@ commit_single_patches()
# Patches are being committed one by one for better bisectability.
# There are $# more patches to commit.
EOF
- if ! do_commit $no_edit; then
+ if ! do_commit $no_edit --no-verify; then
# restore the index so that the user does not need to git add
# the patches again
git read-tree "$saved_index"
@@ -286,12 +357,37 @@ EOF
if test -n "${modified[*]}"; then
no_edit=
fi
- if ! do_commit $no_edit -a; then
+ if ! do_commit $no_edit --no-verify -a; then
git read-tree "$saved_index"
return 1
fi
}
+# do not run "main" code if script is being sourced rather than executed
+[[ $0 != "$BASH_SOURCE" ]] && return
+
+. "$_libdir"/wd-functions.sh
+
+if ! $using_git; then
+ echo "ERROR: not in a git working directory."
+ exit 1
+fi
+
+"$_libdir"/check-cvs-add || exit 1
+
+while [ -n "$1" ] ; do
+ case "$1" in
+ --no-edit) no_edit=--no-edit
+ ;;
+ *) echo Unknown argument "$1" >&2
+ ;;
+ esac
+ shift
+done
+
+trap 'rm -rf "$tmpdir"' EXIT
+tmpdir=$(mktemp -d /tmp/${0##*/}.XXXXXX)
+
if test -e "$(git rev-parse --git-dir)/MERGE_HEAD"; then
# Do not try to fabricate a commit message for merge commits, git itself
# does it better
@@ -303,7 +399,10 @@ added=($(git diff --name-only --diff-filter=A HEAD))
modified=($(git diff --name-only --diff-filter=MT HEAD))
deleted=($(git diff --name-only --diff-filter=D HEAD))
+scripts/check-patch-dirs "${added[@]}" "${modified[@]}" || exit 1
+
if only_patches; then
+ "$_libdir"/git-pre-commit || exit
commit_single_patches || exit
else
# FIXME: -a should not be the default
diff --git a/scripts/osc_wrapper b/scripts/osc_wrapper
index ad71edfe62..5a8b93c23a 100755
--- a/scripts/osc_wrapper
+++ b/scripts/osc_wrapper
@@ -11,8 +11,8 @@ die()
usage()
{
die \
-"Usage: $0 [build] [--ibs | --obs] [--enable-debug | --debug | --disable-debug] [--do-checks | --no-checks] [--ignore-kabi] [<osc_args>] [<specfile>]\n
- $0 upload [ --ibs | --obs ] [--enable-debug | --debug | --disable-debug] [--do-checks | --no-checks] [--ignore-kabi] project [<specfile> | directory]"
+"Usage: $0 [build] [--ibs | --obs] [--enable-debug | --debug | --disable-debug] [--do-checks | --no-checks] [--ignore-kabi] [--klp-symbols] [<osc_args>] [<specfile>]\n
+ $0 upload [ --ibs | --obs ] [--enable-debug | --debug | --disable-debug] [--do-checks | --no-checks] [--ignore-kabi] [--klp-symbols] project [<specfile> | directory]"
}
osc()
@@ -211,7 +211,7 @@ project_exists()
do_build()
{
- local osc_args=() debuginfo=false checks=false kabi=true api
+ local osc_args=() debuginfo=false checks=false kabi=true klps=false api
while test $# -gt 0; do
case "$1" in
@@ -239,6 +239,10 @@ do_build()
kabi=false
shift
;;
+ --klp-symbols)
+ klps=true
+ shift
+ ;;
*.spec)
if test $# -eq 1; then
spec=$1
@@ -263,12 +267,15 @@ do_build()
osc_args=("${osc_args[@]}" --disable-debuginfo)
fi
if ! $checks; then
- osc_args=("${osc_args[@]}" --no-checks
+ osc_args=("${osc_args[@]}" --no-checks
--extra-pkgs=-brp-check-suse --extra-pkgs=-post-build-checks)
fi
if $kabi; then
osc_args=("${osc_args[@]}" --define 'ignore_kabi_badness 0')
fi
+ if ! $klps; then
+ osc_args=("${osc_args[@]}" --define 'klp_symbols 1')
+ fi
arch=$(uname -m | sed 's/^i.86$/i586/')
setup_api $api
@@ -341,6 +348,17 @@ command=
spec=
topdir=$(dirname "$0")/..
+# change 'osc_wrapper --ibs upload' to 'osc_wrapper upload --ibs'
+case "$1" in
+--ibs| --obs)
+ api=$1
+ cmd=$2
+ if [ -n "$cmd" ] ; then
+ shift 2
+ set -- "$cmd" "$api" "$@"
+ fi
+esac
+
case "$1" in
build | upload)
command=$1
diff --git a/scripts/patch-tag-template b/scripts/patch-tag-template
index 4d6650ca9e..353552327e 100644
--- a/scripts/patch-tag-template
+++ b/scripts/patch-tag-template
@@ -38,7 +38,7 @@ Acked-by:
# examples:
# Patch-mainline: never. fixes bugs in SUSE specific code
# Patch-mainline: submitted 2004-11-01
-# Patch-mainline: 2.6.10-rc1 any free form text is valid after the version
+# Patch-mainline: v2.6.10-rc1 any free form text is valid after the version
#
Patch-mainline:
diff --git a/scripts/patch-tags-from-git b/scripts/patch-tags-from-git
index bb60b82684..032c20bde5 100755
--- a/scripts/patch-tags-from-git
+++ b/scripts/patch-tags-from-git
@@ -46,10 +46,8 @@ else
fi
if [ $# -eq 0 ]; then
- if [ "z$LINUX_GIT" = "z" ]; then
- exit 0
- fi
- DIR="$LINUX_GIT"
+ libdir=$(dirname "$(readlink -f "$0")")
+ DIR=$("$libdir"/linux_git.sh) || exit 1
else
DIR=$1
shift
@@ -62,7 +60,7 @@ if [ "z$commit" = "z" ]; then
exit 3
fi
-export GIT_DIR=$DIR/.git
+export GIT_DIR=$DIR
if [ ! -d $GIT_DIR ]; then
echo "No such directory $GIT_DIR"
diff --git a/scripts/python/check-patchhdr b/scripts/python/check-patchhdr
index e0e8b353e8..bfa28fca08 100755
--- a/scripts/python/check-patchhdr
+++ b/scripts/python/check-patchhdr
@@ -1,5 +1,9 @@
-#!/usr/bin/env python
-# vim: sw=4 ts=4 et si:
+#!/usr/bin/python
+# -*- coding: utf-8 -*-,
+
+from __future__ import absolute_import
+from __future__ import print_function
+from __future__ import division
import sys
from optparse import OptionParser
@@ -24,25 +28,27 @@ if __name__ == "__main__":
if options.stdin:
try:
checker = header.Checker(sys.stdin, options.update)
- except header.HeaderException, e:
+ except header.HeaderException as e:
if args:
fn = args[0]
else:
fn = "<stdin>"
- print >>sys.stderr, e.error_message(fn)
- errors += 1
+ print(e.error_message(fn), file=sys.stderr)
+ if not fn.startswith("patches.xen/"):
+ errors += 1
else:
for fn in args:
try:
f = open(fn)
checker = header.Checker(f, options.update)
f.close()
- except header.HeaderException, e:
- print >>sys.stderr, e.error_message(fn)
- errors += 1
+ except header.HeaderException as e:
+ print(e.error_message(fn), file=sys.stderr)
+ if not fn.startswith("patches.xen/"):
+ errors += 1
if errors > 1:
- print >>sys.stderr, ""
+ print("", file=sys.stderr)
if errors:
- print >>sys.stderr, "Please check README file for patch tag rules."
+ print("Please check README file for patch tag rules.", file=sys.stderr)
sys.exit(1)
diff --git a/scripts/python/suse_git/header.py b/scripts/python/suse_git/header.py
index c157e186ec..e820bb56fd 100755
--- a/scripts/python/suse_git/header.py
+++ b/scripts/python/suse_git/header.py
@@ -1,11 +1,11 @@
-#!/usr/bin/env python
-# vim: sw=4 ts=4 et si:
+#!/usr/bin/python
+# -*- coding: utf-8 -*-,
import sys
import re
from optparse import OptionParser
from . import patch
-from StringIO import StringIO
+from io import StringIO
diffstart = re.compile("^(---|\*\*\*|Index:|\+\+\+)[ \t][^ \t]\S+/|^diff -")
tag_regex = re.compile("(\S+):[ \t]*(.*)")
@@ -243,7 +243,7 @@ class HeaderException(patch.PatchException):
for tag in err.target:
if tag['name'].lower() == name.lower():
return True
- except KeyError, e:
+ except KeyError as e:
pass
return False
@@ -322,7 +322,7 @@ class HeaderChecker(patch.PatchChecker):
target[tag].append(new_req)
def do_patch(self):
- for line in self.stream.readlines():
+ for line in self.stream:
if diffstart.match(line):
break
@@ -341,7 +341,7 @@ class HeaderChecker(patch.PatchChecker):
try:
multi = mapping['multi']
- except KeyError, e:
+ except KeyError as e:
multi = False
for t in self.tags:
diff --git a/scripts/python/suse_git/patch.py b/scripts/python/suse_git/patch.py
index 65f308b22c..13ff659a4d 100644
--- a/scripts/python/suse_git/patch.py
+++ b/scripts/python/suse_git/patch.py
@@ -1,5 +1,5 @@
-#!/usr/bin/env python
-# vim: sw=4 ts=4 et si:
+#!/usr/bin/python
+# -*- coding: utf-8 -*-,
import sys
diff --git a/scripts/python/test-all.sh b/scripts/python/test-all.sh
index 23296e6d4d..77cb2eb328 100644
--- a/scripts/python/test-all.sh
+++ b/scripts/python/test-all.sh
@@ -1 +1 @@
-python -m unittest discover
+python3 -m unittest discover
diff --git a/scripts/python/tests/test_header.py b/scripts/python/tests/test_header.py
index b965a4fc69..003c0c84d9 100755
--- a/scripts/python/tests/test_header.py
+++ b/scripts/python/tests/test_header.py
@@ -1,40 +1,24 @@
-#!/usr/bin/env python
+#!/usr/bin/python3
# -*- coding: utf-8 -*-,
-# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
import sys
import os.path
import unittest
-from StringIO import StringIO
+from io import StringIO
from suse_git import header
-# You'll see a slightly strange pattern here:
-# try:
-# self.sometest()
-# self.assertTrue(False)
-# except Exception, e:
-# rest of test
-#
-# This is to test the exception contents. Python's unittest module
-# allows us to assert that a particular exception is raised but
-# it won't let us inspect the contents of it. The assertTrue(False)
-# will cause a test failure if an exception isn't raised; The
-# except HeaderException clause will cause a test failure if the
-# exception isn't HeaderException. When adding new test cases,
-# please follow this pattern when the test case is expecting to fail.
-
class TestHeaderChecker(unittest.TestCase):
def test_empty(self):
try:
self.header = header.Checker("")
- except header.HeaderException, e:
- self.assertTrue(e.errors(header.MissingTagError) == 4)
+ except header.HeaderException as e:
+ self.assertEqual(4, e.errors(header.MissingTagError))
self.assertTrue(e.tag_is_missing('patch-mainline'))
self.assertTrue(e.tag_is_missing('from'))
self.assertTrue(e.tag_is_missing('subject'))
self.assertTrue(e.tag_is_missing('references'))
- self.assertTrue(e.errors() == 4)
+ self.assertEqual(4, e.errors())
def test_subject_dupe(self):
text = """
@@ -46,12 +30,11 @@ Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
References: bsc#12345
Acked-by: developer@suse.com
"""
- try:
+ with self.assertRaises(header.HeaderException) as cm:
self.header = header.Checker(text)
- self.assertTrue(False)
- except header.HeaderException, e:
- self.assertTrue(e.errors(header.DuplicateTagError) == 1)
- self.assertTrue(e.errors() == 1)
+ e = cm.exception
+ self.assertEqual(1, e.errors(header.DuplicateTagError))
+ self.assertEqual(1, e.errors())
def test_patch_mainline_dupe(self):
text = """
@@ -63,12 +46,12 @@ Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
References: bsc#12345
Acked-by: developer@suse.com
"""
- try:
+ with self.assertRaises(header.HeaderException) as cm:
self.header = header.Checker(text)
- self.assertTrue(False)
- except header.HeaderException, e:
- self.assertTrue(e.errors(header.DuplicateTagError) == 1)
- self.assertTrue(e.errors() == 1)
+
+ e = cm.exception
+ self.assertEqual(1, e.errors(header.DuplicateTagError))
+ self.assertEqual(1, e.errors())
def test_patch_mainline_empty(self):
text = """
@@ -78,14 +61,14 @@ Patch-mainline:
References: bsc#12345
Acked-by: developer@suse.com
"""
- try:
+ with self.assertRaises(header.HeaderException) as cm:
self.header = header.Checker(text)
- self.assertTrue(False)
- except header.HeaderException, e:
- self.assertTrue(e.errors(header.EmptyTagError) == 1)
- self.assertTrue(e.errors(header.MissingTagError) == 1)
- self.assertTrue(e.tag_is_missing('patch-mainline'))
- self.assertTrue(e.errors() == 2)
+
+ e = cm.exception
+ self.assertEqual(1, e.errors(header.EmptyTagError))
+ self.assertEqual(1, e.errors(header.MissingTagError))
+ self.assertTrue(e.tag_is_missing('patch-mainline'))
+ self.assertEqual(2, e.errors())
def test_patch_mainline_version_no_ack_or_sob(self):
text = """
@@ -98,11 +81,11 @@ Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
try:
self.header = header.Checker(text)
- except header.HeaderException, e:
- self.assertTrue(e.errors(header.MissingTagError) == 1)
+ except header.HeaderException as e:
+ self.assertEqual(1, e.errors(header.MissingTagError))
self.assertTrue(e.tag_is_missing('acked-by'))
self.assertTrue(e.tag_is_missing('signed-off-by'))
- self.assertTrue(e.errors() == 1)
+ self.assertEqual(1, e.errors())
def test_patch_mainline_version_correct_multi_ack(self):
text = """
@@ -216,12 +199,12 @@ Patch-mainline: n/a
References: bsc#12345
Acked-by: developer@suse.com
"""
- try:
+ with self.assertRaises(header.HeaderException) as cm:
self.header = header.Checker(text)
- self.assertTrue(False)
- except header.HeaderException, e:
- self.assertTrue(e.errors(header.FormatError) == 1)
- self.assertTrue(e.errors() == 1)
+
+ e = cm.exception
+ self.assertEqual(1, e.errors(header.FormatError))
+ self.assertEqual(1, e.errors())
def test_patch_mainline_submitted_correct_ml(self):
text = """
@@ -251,12 +234,12 @@ Patch-mainline: Submitted
References: bsc#12345
Acked-by: developer@suse.com
"""
- try:
+ with self.assertRaises(header.HeaderException) as cm:
self.header = header.Checker(text)
- self.assertTrue(False)
- except header.HeaderException, e:
- self.assertTrue(e.errors(header.FormatError) == 1)
- self.assertTrue(e.errors() == 1)
+
+ e = cm.exception
+ self.assertEqual(1, e.errors(header.FormatError))
+ self.assertEqual(1, e.errors())
def test_patch_mainline_submitted_detail_git_commit(self):
text = """
@@ -267,12 +250,12 @@ Git-repo: git://host/valid/path/to/repo
References: bsc#12345
Acked-by: developer@suse.com
"""
- try:
+ with self.assertRaises(header.HeaderException) as cm:
self.header = header.Checker(text)
- self.assertTrue(False)
- except header.HeaderException, e:
- self.assertTrue(e.errors(header.ExcludedTagError) == 1)
- self.assertTrue(e.errors() == 1)
+
+ e = cm.exception
+ self.assertEqual(1, e.errors(header.ExcludedTagError))
+ self.assertEqual(1, e.errors())
# Required/Excluded conflict between Patch-mainline (Submitted)
# and Git-commit
@@ -285,13 +268,13 @@ Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
References: bsc#12345
Acked-by: developer@suse.com
"""
- try:
+ with self.assertRaises(header.HeaderException) as cm:
self.header = header.Checker(text)
- self.assertTrue(False)
- except header.HeaderException, e:
- self.assertTrue(e.errors(header.MissingTagError) == 1)
- self.assertTrue(e.errors(header.ExcludedTagError) == 1)
- self.assertTrue(e.errors() == 2)
+
+ e = cm.exception
+ self.assertEqual(1, e.errors(header.MissingTagError))
+ self.assertEqual(1, e.errors(header.ExcludedTagError))
+ self.assertEqual(2, e.errors())
def test_patch_mainline_submitted_no_detail(self):
text = """
@@ -301,12 +284,12 @@ Patch-mainline: Submitted
References: bsc#12345
Acked-by: developer@suse.com
"""
- try:
+ with self.assertRaises(header.HeaderException) as cm:
self.header = header.Checker(text)
- self.assertTrue(False)
- except header.HeaderException, e:
- self.assertTrue(e.errors(header.FormatError) == 1)
- self.assertTrue(e.errors() == 1)
+
+ e = cm.exception
+ self.assertEqual(1, e.errors(header.FormatError))
+ self.assertEqual(1, e.errors())
def test_patch_mainline_never_no_detail(self):
text = """
@@ -318,9 +301,9 @@ Acked-by: developer@suse.com
"""
try:
self.header = header.Checker(text)
- except header.HeaderException, e:
- self.assertTrue(e.errors(header.FormatError) == 1)
- self.assertTrue(e.errors() == 1)
+ except header.HeaderException as e:
+ self.assertEqual(1, e.errors(header.FormatError))
+ self.assertEqual(1, e.errors())
def test_patch_mainline_yes_with_detail(self):
text = """
@@ -330,12 +313,12 @@ Patch-mainline: Yes, v4.1-rc1
References: bsc#12345
Acked-by: developer@suse.com
"""
- try:
+ with self.assertRaises(header.HeaderException) as cm:
self.header = header.Checker(text)
- self.assertTrue(False)
- except header.HeaderException, e:
- self.assertTrue(e.errors(header.FormatError) == 1)
- self.assertTrue(e.errors() == 1)
+
+ e = cm.exception
+ self.assertEqual(1, e.errors(header.FormatError))
+ self.assertEqual(1, e.errors())
def test_patch_mainline_yes_no_detail(self):
text = """
@@ -345,12 +328,12 @@ Patch-mainline: Yes
References: bsc#12345
Acked-by: developer@suse.com
"""
- try:
+ with self.assertRaises(header.HeaderException) as cm:
self.header = header.Checker(text)
- self.assertTrue(False)
- except header.HeaderException, e:
- self.assertTrue(e.errors(header.FormatError) == 1)
- self.assertTrue(e.errors() == 1)
+
+ e = cm.exception
+ self.assertEqual(1, e.errors(header.FormatError))
+ self.assertEqual(1, e.errors())
def test_patch_mainline_not_yet_no_detail(self):
text = """
@@ -360,12 +343,12 @@ Patch-mainline: Not yet
References: bsc#12345
Acked-by: developer@suse.com
"""
- try:
+ with self.assertRaises(header.HeaderException) as cm:
self.header = header.Checker(text)
- self.assertTrue(False)
- except header.HeaderException, e:
- self.assertTrue(e.errors(header.FormatError) == 1)
- self.assertTrue(e.errors() == 1)
+
+ e = cm.exception
+ self.assertEqual(1, e.errors(header.FormatError))
+ self.assertEqual(1, e.errors())
def test_patch_mainline_never_detail(self):
text = """
@@ -407,11 +390,11 @@ Acked-by: developer@suse.com
"""
try:
self.header = header.Checker(text)
- except header.HeaderException, e:
+ except header.HeaderException as e:
# Both policy and Git-commit require Patch-mainline
- self.assertTrue(e.errors(header.MissingTagError) == 2)
+ self.assertEqual(2, e.errors(header.MissingTagError))
self.assertTrue(e.tag_is_missing('patch-mainline'))
- self.assertTrue(e.errors() == 2)
+ self.assertEqual(2, e.errors())
def test_patch_mainline_queued_correct(self):
text = """
@@ -433,14 +416,14 @@ Patch-mainline: Queued
References: bsc#12345
Acked-by: developer@suse.com
"""
- try:
+ with self.assertRaises(header.HeaderException) as cm:
self.header = header.Checker(text)
- self.assertTrue(False)
- except header.HeaderException, e:
- self.assertTrue(e.errors(header.MissingTagError) == 2)
- self.assertTrue(e.tag_is_missing('git-commit'))
- self.assertTrue(e.tag_is_missing('git-repo'))
- self.assertTrue(e.errors() == 2)
+
+ e = cm.exception
+ self.assertEqual(2, e.errors(header.MissingTagError))
+ self.assertTrue(e.tag_is_missing('git-commit'))
+ self.assertTrue(e.tag_is_missing('git-repo'))
+ self.assertEqual(2, e.errors())
def test_patch_mainline_queued_with_git_repo(self):
text = """
@@ -451,15 +434,15 @@ Git-repo: git://path/to/git/repo
References: bsc#12345
Acked-by: developer@suse.com
"""
- try:
+ with self.assertRaises(header.HeaderException) as cm:
self.header = header.Checker(text)
- self.assertTrue(False)
- except header.HeaderException, e:
- # Required by both Patch-mainline (Queued) and
- # Git-repo
- self.assertTrue(e.errors(header.MissingTagError) == 2)
- self.assertTrue(e.tag_is_missing('git-commit'))
- self.assertTrue(e.errors() == 2)
+
+ e = cm.exception
+ # Required by both Patch-mainline (Queued) and
+ # Git-repo
+ self.assertEqual(2, e.errors(header.MissingTagError))
+ self.assertTrue(e.tag_is_missing('git-commit'))
+ self.assertEqual(2, e.errors())
def test_patch_mainline_queued_with_git_commit(self):
text = """
@@ -470,13 +453,13 @@ Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
References: bsc#12345
Acked-by: developer@suse.com
"""
- try:
+ with self.assertRaises(header.HeaderException) as cm:
self.header = header.Checker(text)
- self.assertTrue(False)
- except header.HeaderException, e:
- self.assertTrue(e.errors(header.MissingTagError) == 1)
- self.assertTrue(e.tag_is_missing('git-repo'))
- self.assertTrue(e.errors() == 1)
+
+ e = cm.exception
+ self.assertEqual(1, e.errors(header.MissingTagError))
+ self.assertTrue(e.tag_is_missing('git-repo'))
+ self.assertEqual(1, e.errors())
def test_patch_mainline_invalid(self):
text = """
@@ -486,12 +469,12 @@ Patch-mainline: n/a
References: bsc#12345
Acked-by: developer@suse.com
"""
- try:
+ with self.assertRaises(header.HeaderException) as cm:
self.header = header.Checker(text)
- self.assertTrue(False)
- except header.HeaderException, e:
- self.assertTrue(e.errors(header.FormatError) == 1)
- self.assertTrue(e.errors() == 1)
+
+ e = cm.exception
+ self.assertEqual(1, e.errors(header.FormatError))
+ self.assertEqual(1, e.errors())
def test_diff_like_description(self):
text = """
@@ -532,14 +515,14 @@ Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
References:
Acked-by: developer@suse.com
"""
- try:
+ with self.assertRaises(header.HeaderException) as cm:
self.header = header.Checker(text)
- self.assertTrue(False)
- except header.HeaderException, e:
- self.assertTrue(e.errors(header.EmptyTagError) == 1)
- self.assertTrue(e.errors(header.MissingTagError) == 1)
- self.assertTrue(e.tag_is_missing('references'))
- self.assertTrue(e.errors() == 2)
+
+ e = cm.exception
+ self.assertEqual(1, e.errors(header.EmptyTagError))
+ self.assertEqual(1, e.errors(header.MissingTagError))
+ self.assertTrue(e.tag_is_missing('references'))
+ self.assertEqual(2, e.errors())
def test_patch_references_missing(self):
text = """
@@ -549,13 +532,13 @@ Patch-mainline: v4.2-rc1
Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
Acked-by: developer@suse.com
"""
- try:
+ with self.assertRaises(header.HeaderException) as cm:
self.header = header.Checker(text)
- self.assertTrue(False)
- except header.HeaderException, e:
- self.assertTrue(e.errors(header.MissingTagError) == 1)
- self.assertTrue(e.tag_is_missing('references'))
- self.assertTrue(e.errors() == 1)
+
+ e = cm.exception
+ self.assertEqual(1, e.errors(header.MissingTagError))
+ self.assertTrue(e.tag_is_missing('references'))
+ self.assertEqual(1, e.errors())
def test_patch_references_multi(self):
text = """
@@ -605,24 +588,26 @@ Acked-by: developer@suse.com
self.header = header.Checker(text)
-# Enable this check when we want to require a real References tag
-# def test_patch_references_only_freeform(self):
-# text = """
-#From: developer@site.com
-#Subject: some patch
-#Patch-mainline: v4.2-rc1
-#Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
-#References: fix for blahblah
-#Acked-by: developer@suse.com
-#"""
-# try:
-# self.header = header.Checker(text)
-# self.assertTrue(False)
-# except header.HeaderException, e:
-# self.assertTrue(e.errors(header.MissingTagError) == 1)
-# self.assertTrue(e.tag_is_missing('references'))
-# self.assertTrue(e.errors() == 1)
-#
+
+ @unittest.skip("Enable this check when we want to require a real "
+ "References tag")
+ def test_patch_references_only_freeform(self):
+ text = """
+From: developer@site.com
+Subject: some patch
+Patch-mainline: v4.2-rc1
+Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+References: fix for blahblah
+Acked-by: developer@suse.com
+"""
+ with self.assertRaises(header.HeaderException) as cm:
+ self.header = header.Checker(text)
+
+ e = cm.exception
+ self.assertEqual(1, e.errors(header.MissingTagError))
+ self.assertTrue(e.tag_is_missing('references'))
+ self.assertEqual(1, e.errors())
+
def test_patch_references_empty_update(self):
text = """
@@ -633,12 +618,12 @@ Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
References:
Acked-by: developer@suse.com
"""
- try:
+ with self.assertRaises(header.HeaderException) as cm:
self.header = header.Checker(text, True)
- self.assertTrue(False)
- except header.HeaderException, e:
- self.assertTrue(e.errors(header.EmptyTagError) == 1)
- self.assertTrue(e.errors() == 1)
+
+ e = cm.exception
+ self.assertEqual(1, e.errors(header.EmptyTagError))
+ self.assertEqual(1, e.errors())
def test_patch_references_missing_update(self):
text = """
@@ -698,21 +683,21 @@ Acked-by: developer@suse.com
self.header = header.Checker(text, True)
-# Enable this check when we want to require a real References tag
-# def test_patch_references_only_freeform_update(self):
-# text = """
-#From: developer@site.com
-#Subject: some patch
-#Patch-mainline: v4.2-rc1
-#Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
-#References: fix for blahblah
-#Acked-by: developer@suse.com
-#"""
-# try:
-# self.header = header.Checker(text, True)
-# self.assertTrue(False)
-# except header.HeaderException, e:
-# self.assertTrue(e.errors(header.MissingTagError) == 1)
-# self.assertTrue(e.tag_is_missing('references'))
-# self.assertTrue(e.errors() == 1)
-#
+ @unittest.skip("Enable this check when we want to require a real "
+ "References tag")
+ def test_patch_references_only_freeform_update(self):
+ text = """
+From: developer@site.com
+Subject: some patch
+Patch-mainline: v4.2-rc1
+Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+References: fix for blahblah
+Acked-by: developer@suse.com
+"""
+ with self.assertRaises(header.HeaderException) as cm:
+ self.header = header.Checker(text, True)
+
+ e = cm.exception
+ self.assertEqual(1, e.errors(header.MissingTagError))
+ self.assertTrue(e.tag_is_missing('references'))
+ self.assertEqual(1, e.errors())
diff --git a/scripts/run_oldconfig.sh b/scripts/run_oldconfig.sh
index a7a9fba122..de637afa3a 100755
--- a/scripts/run_oldconfig.sh
+++ b/scripts/run_oldconfig.sh
@@ -24,6 +24,12 @@
# dirty scroll region tricks ...
use_region=false
+if test -f scripts/kconfig/Makefile && \
+ grep -q syncconfig scripts/kconfig/Makefile; then
+ syncconfig="syncconfig"
+else
+ syncconfig="silentoldconfig"
+fi
function _region_init_ () {
echo -ne '\x1b[H\033[J' # clear screen
@@ -356,7 +362,7 @@ ask_reuse_config()
filter_config()
{
- sed -e '/^# .* is not set$/p' -e '/^$\|^#/d' "$@" | sort
+ sed -e '/CONFIG_GCC_VERSION/ d' -e '/^# .* is not set$/p' -e '/^$\|^#/d' "$@" | sort
}
# Keep these in the -vanilla fragment even if -default has the same values.
@@ -401,6 +407,40 @@ for config in $config_files; do
MAKE_ARGS="ARCH=$cpu_arch"
;;
esac
+ unset cross_arch
+ unset cross_extra
+ case $config in
+ arm64/*)
+ cross_arch="aarch64"
+ ;;
+ arm*/*)
+ cross_arch="arm"
+ cross_extra="gnueabi-"
+ ;;
+ ppc64le/*)
+ cross_arch="powerpc64le"
+ ;;
+ ppc64/*)
+ cross_arch="powerpc64"
+ ;;
+ ppc/*)
+ cross_arch="powerpc"
+ ;;
+ i386/*)
+ # hack: whatever i386-suse-linux-gcc is, it does not support asm-goto
+ cross_arch="x86_64"
+ ;;
+ *)
+ cross_arch="${config%%/*}"
+ ;;
+ esac
+ cross_compile="${CROSS_COMPILE-${cross_arch}-suse-linux-${cross_extra}}"
+ if [ -n "$cross_compile" -a -x /usr/bin/${cross_compile}gcc ]; then
+ MAKE_ARGS="$MAKE_ARGS CROSS_COMPILE=$cross_compile"
+ fi
+ if [ -n "$CC" ]; then
+ MAKE_ARGS="$MAKE_ARGS CC=$CC"
+ fi
if $silent; then
MAKE_ARGS="$MAKE_ARGS -s"
fi
@@ -453,7 +493,7 @@ for config in $config_files; do
*)
_region_msg_ "working on $config"
if $check; then
- if ! make $MAKE_ARGS silentoldconfig </dev/null; then
+ if ! make $MAKE_ARGS $syncconfig </dev/null; then
echo "${config#$prefix} is out of date"
err=1
rm $config_orig
diff --git a/scripts/sequence-patch.sh b/scripts/sequence-patch.sh
index b71307d049..8ecbf4a61b 100755
--- a/scripts/sequence-patch.sh
+++ b/scripts/sequence-patch.sh
@@ -20,6 +20,8 @@
# you may find current contact information at www.novell.com
#############################################################################
+[ -f $(dirname $0)/../rpm/config.sh ] || exit 0
+
source $(dirname $0)/../rpm/config.sh
source $(dirname $0)/wd-functions.sh
@@ -36,7 +38,7 @@ esac
usage() {
cat <<END
SYNOPSIS: $0 [-qv] [--symbol=...] [--dir=...]
- [--fast] [last-patch-name] [--vanilla] [--fuzz=NUM]
+ [--fast] [--rapid] [last-patch-name] [--vanilla] [--fuzz=NUM]
[--patch-dir=PATH] [--build-dir=PATH] [--config=ARCH-FLAVOR [--kabi]]
[--ctags] [--cscope] [--etags] [--skip-reverse]
@@ -63,12 +65,22 @@ SYNOPSIS: $0 [-qv] [--symbol=...] [--dir=...]
of the component patches fail to apply the tree will not be rolled
back.
+ The --rapid option will use rapidquilt to apply patches.
+
When used with last-patch-name, both --fast and --no-quilt
will set up a quilt environment for the remaining patches.
END
exit 1
}
+apply_rapid_patches() {
+ printf "%s\n" ${PATCHES_BEFORE[@]} >> $PATCH_DIR/series
+ rapidquilt push -a -d $PATCH_DIR -p $PWD $fuzz
+ status=$?
+
+ PATCHES=( ${PATCHES_AFTER[@]} )
+}
+
apply_fast_patches() {
echo "[ Fast-applying ${#PATCHES_BEFORE[@]} patches. ${#PATCHES_AFTER[@]} remain. ]"
LAST_LOG=$(echo "${PATCHES_BEFORE[@]}" | xargs cat | \
@@ -198,7 +210,7 @@ if $have_arch_patches; then
else
arch_opt=""
fi
-options=`getopt -o qvd:F: --long quilt,no-quilt,$arch_opt,symbol:,dir:,combine,fast,vanilla,fuzz,patch-dir:,build-dir:,config:,kabi,ctags,cscope,etags,skip-reverse -- "$@"`
+options=`getopt -o qvd:F: --long quilt,no-quilt,$arch_opt,symbol:,dir:,combine,fast,rapid,vanilla,fuzz,patch-dir:,build-dir:,config:,kabi,ctags,cscope,etags,skip-reverse -- "$@"`
if [ $? -ne 0 ]
then
@@ -211,6 +223,7 @@ QUIET=1
EXTRA_SYMBOLS=
QUILT=true
FAST=
+RAPID=
VANILLA=false
SP_BUILD_DIR=
CONFIG=
@@ -242,6 +255,9 @@ while true; do
--fast)
FAST=1
;;
+ --rapid)
+ RAPID=1
+ ;;
--arch)
export PATCH_ARCH=$2
shift
@@ -322,6 +338,8 @@ if test -z "$CONFIG"; then
CONFIG=$machine-smp
elif test -e "config/$machine/pae"; then
CONFIG=$machine-pae
+ elif test -e "config/$machine/azure"; then
+ CONFIG=$machine-azure
elif test -e "config/$machine/default"; then
CONFIG=$machine-default
elif test -n "$VARIANT" -a -e "config/$machine/${VARIANT#-}"; then
@@ -358,15 +376,17 @@ if [ $# -ne 0 ]; then
usage
fi
-# Some patches require patch 2.5.4. Abort with older versions.
-PATCH_VERSION=$(patch -v | sed -e '/^patch/!d' -e 's/patch //')
-case $PATCH_VERSION in
- ([01].*|2.[1-4].*|2.5.[1-3]) # (check if < 2.5.4)
- echo "patch version $PATCH_VERSION found; " \
- "a version >= 2.5.4 required." >&2
+# We need at least 2.7 now due to kselftests-kmp-default requiring
+# selftests with the need to ensure scripts are execuable.
+PATCH_VERSION_REQ="2.7"
+PATCH_VERSION_REQ_LD_VERSION=$(echo $PATCH_VERSION_REQ | scripts/ld-version.sh)
+PATCH_VERSION=$(patch --version | head -1 | awk '{print $3}')
+PATCH_VERSION_LD=$(echo $PATCH_VERSION | scripts/ld-version.sh)
+
+if [ $PATCH_VERSION_LD -lt $PATCH_VERSION_REQ_LD_VERSION ]; then
+ echo "$0 requires at least patch version $PATCH_VERSION_REQ"
exit 1
- ;;
-esac
+fi
# Check SCRATCH_AREA.
if [ -z "$SCRATCH_AREA" ]; then
@@ -388,6 +408,7 @@ export TMPDIR
ORIG_DIR=$SCRATCH_AREA/linux-$SRCVERSION.orig
TAG=$(get_branch_name)
TAG=${TAG//\//_}
+TAG=${TAG//\#/_}
if $VANILLA; then
TAG=${TAG}-vanilla
fi
@@ -559,10 +580,12 @@ fi
mkdir $PATCH_DIR/.pc
echo 2 > $PATCH_DIR/.pc/.version
-if [ -z "$FAST" ]; then
- apply_patches
-else
+if [ -n "$FAST" ]; then
apply_fast_patches
+elif [ -n "$RAPID" ]; then
+ apply_rapid_patches
+else
+ apply_patches
fi
if [ -n "$EXTRA_SYMBOLS" ]; then
@@ -614,7 +637,20 @@ fi
if test -e supported.conf; then
echo "[ Generating Module.supported ]"
- scripts/guards base external < supported.conf > "$SP_BUILD_DIR/Module.supported"
+ scripts/guards --list --with-guards < supported.conf | \
+ awk '
+ /\+external / {
+ print $(NF) " external";
+ next;
+ }
+ /^-/ {
+ print $(NF) " no";
+ next;
+ }
+ {
+ print $(NF);
+ }
+ ' > "$SP_BUILD_DIR/Module.supported"
fi
if test -n "$CONFIG"; then
@@ -647,8 +683,14 @@ if test -n "$CONFIG"; then
echo "[ No kABI references for $CONFIG ]"
fi
fi
+ if test -f ${PATCH_DIR}/scripts/kconfig/Makefile && \
+ grep -q syncconfig ${PATCH_DIR}/scripts/kconfig/Makefile; then
+ syncconfig="syncconfig"
+ else
+ syncconfig="silentoldconfig"
+ fi
test "$SP_BUILD_DIR" != "$PATCH_DIR" && \
- make -C $PATCH_DIR O=$SP_BUILD_DIR -s silentoldconfig
+ make -C $PATCH_DIR O=$SP_BUILD_DIR -s $syncconfig
fi
# Some archs we use for the config do not exist or have a different name in the
diff --git a/scripts/series2git b/scripts/series2git
index 59bd698449..e62ccc7e02 100755
--- a/scripts/series2git
+++ b/scripts/series2git
@@ -200,7 +200,7 @@ apply_patch()
GIT_AUTHOR_EMAIL=
;;
esac
- local subject=$(ex_hdr Subject "$patch")
+ local subject=$(ex_hdr Subject "$patch" | sed -es'/\[[^]]*\] //')
GIT_AUTHOR_DATE=$(ex_hdr Date "$patch")
GIT_AUTHOR_DATE=$(ex_hdr Date "$patch" \
| perl -p -e 's/(\d\d:\d\d:\d\d) (\d{4})/$2 $1/;s/:$//')
@@ -221,7 +221,7 @@ scripts/guards $EXTRA_SYMBOLS < series.conf | while read patch; do
case "$patch" in
patches.kernel.org/patch-2.6.*.*-rc*)
;;
- patches.kernel.org/patch-[2-9].*.*)
+ patches.kernel.org/*)
if $skip; then
echo "skipping $patch"
continue
diff --git a/scripts/series_sort.py b/scripts/series_sort.py
new file mode 120000
index 0000000000..05f626de7d
--- /dev/null
+++ b/scripts/series_sort.py
@@ -0,0 +1 @@
+git_sort/series_sort.py \ No newline at end of file
diff --git a/scripts/stableids b/scripts/stableids
index 0cef6a5e86..c44704959e 100755
--- a/scripts/stableids
+++ b/scripts/stableids
@@ -1,115 +1,143 @@
#!/usr/bin/perl -w
use strict;
+use File::Basename qq(basename);
+use Git;
+use Storable qw(store);
+use Term::ANSIColor qw(colored);
if (@ARGV < 2) {
- print "Usage: $0 suse_machine stable_version [user_id]\n";
+ print "Usage: $0 suse_machine stable_version\n";
exit 1;
}
my $machine = shift;
my $stable_ver = shift;
-my $idsfile = "/dev/shm/ids";
-my $patchfile = "/dev/shm/patch-";
-my $user = 'Jiri Slaby <jslaby@suse.cz>';
my $old_version;
my $new_version;
-
-if (@ARGV > 0) {
- $user = join(' ', @ARGV);
-}
+my $bnc;
my %bnc_map = (
'3.12' => '1012620',
'4.1' => '1023711',
'4.4' => '1012382',
+ '4.12' => '1060662',
'other' => '1012628',
);
-my $bnc;
-if ($stable_ver =~ /^v?((2\.6\.[0-9]+)\.([0-9]+))$/ ||
- $stable_ver =~ /^v?(([3-9]\.[0-9]+)\.([0-9]+))$/) {
+if ($stable_ver =~ /^v?(([3-9]\.[0-9]+)\.([0-9]+))$/) {
$new_version = $1;
$old_version = $2;
$bnc = $bnc_map{$old_version};
- if ($3 eq 1) {
- $patchfile .= $new_version;
- } else {
+ if ($3 ne 1) {
$old_version .= '.' . ($3 - 1);
- $patchfile .= "$old_version-$3";
}
} else {
die "cannot understand stable version $stable_ver";
}
if (!defined $bnc) {
- print "Kernel version not found in the map, assuming Tumbleweed\n";
+ print colored("Kernel version not found in the map, assuming Tumbleweed\n", 'red');
$bnc = $bnc_map{'other'};
}
-my $range = "v$old_version..v$new_version";
-
-open GIT, "git log --format=%B $range|" ||
- die "git log cannot be run";
-open OUT, "|ssh -C $machine -o StrictHostKeyChecking=no 'cat >$idsfile'" ||
- die "ssh didn't start";
-
-my @SHAs;
-my $cont = 0;
+my $patchpar = '/dev/shm';
+my $patchdir = "patches-$new_version";
+my $patchpath = "$patchpar/$patchdir";
+my $idsfile = "$patchpath/ids";
+if (!mkdir $patchpath) {
+ die "$patchpath already exists";
+}
-while (<GIT>) {
- if ($cont) {
- if (/^\s+([0-9a-f]{40})\s*[\])]$/) {
- print OUT "$1\n";
- push @SHAs, $1;
- $cont = 0;
- next;
+my $range = "v$old_version..v$new_version";
+my $repo = Git->repository();
+my @revs = $repo->command('rev-list', '--reverse', $range);
+my %ids;
+my $counter = 1;
+my @to_delete;
+my $sha_re = qr/[0-9a-f]{40}/;
+
+foreach my $rev (@revs) {
+ my ($filename, @commit_log) = $repo->command('show', '--no-patch',
+ '--format=%f%n%B', $rev);
+
+ my $cont = 0;
+ my @shas;
+ my @unmatched_shas;
+
+ foreach (@commit_log) {
+ if ($cont) {
+ if (/^\s+($sha_re)\s*[\])]$/) {
+ push @shas, $1;
+ $cont = 0;
+ next;
+ }
+ }
+ if (/^commit ($sha_re) upstream\.?$/ ||
+ /^[\[(]\s*[Uu]pstream commit ($sha_re)\s*[\])]$/ ||
+ /^[uU]pstream commit ($sha_re)\.$/ ||
+ /^This is a backport of ($sha_re)$/ ||
+ /^\(cherry picked from commit ($sha_re)\)$/) {
+ push @shas, $1;
+ } elsif (/^[\[(]\s*[Uu]pstream commits ($sha_re)\s+and\s*$/) {
+ push @shas, $1;
+ $cont = 1;
+ } elsif (/^(Fixes:|This reverts commit) $sha_re(?: \(".*)?\.?$/ ||
+ /^Link: .*lkml/) {
+ # ignore
+ } elsif (/\b$sha_re\b/) {
+ push @unmatched_shas, $_;
}
}
- if (/^commit ([0-9a-f]{40}) upstream\.?$/ ||
- /^[\[(]\s*[Uu]pstream commit ([0-9a-f]{40})\s*[\])]$/ ||
- /^[uU]pstream commit ([0-9a-f]{40})\.$/ ||
- /^This is a backport of ([0-9a-f]{40})$/ ||
- /^\(cherry picked from commit ([0-9a-f]{40})\)$/) {
- print OUT "$1\n";
- push @SHAs, $1;
- } elsif (/^[\[(]\s*[Uu]pstream commits ([0-9a-f]{40})\s+and\s*$/) {
- print OUT "$1\n";
- push @SHAs, $1;
- $cont = 1;
- } elsif (/\b[0-9a-f]{40}\b/) {
- print "\tUnmatched SHA: $_";
- }
-}
-
-close OUT;
-close GIT;
-print "Written $idsfile on $machine\n";
+ # better than nothing
+ if (!scalar @shas) {
+ push @shas, $rev;
+ }
-open GIT, "git diff $range|" ||
- die "git diff cannot be run";
-open OUT, "|ssh -C $machine 'cat >$patchfile'" ||
- die "ssh didn't start";
+ my @patch = $repo->command('format-patch', '--stdout',
+ '--signoff', '--no-renames',
+ '-1', $rev,
+ '--add-header', "References: bnc#$bnc",
+ '--add-header', "Patch-mainline: $new_version",
+ (map { ('--add-header', "Git-commit: $_") } @shas));
+
+ # drop From
+ shift(@patch) =~ /^From/ or die "From line is not the first one?";
+
+ my $newname = sprintf("$new_version-%03d-%s", $counter, $filename);
+ # 57 is what git-format-patch uses
+ $newname =~ s/^(.{1,57}).*$/$1.patch/;
+ my $newpath = "$patchpath/$newname";
+
+ open(PATCH, ">$newpath") or die "cannot output to $newpath";
+ print PATCH join "\n", @patch;
+ print PATCH "\n";
+ close PATCH;
+
+ $ids{$newname} = [ @shas ];
+
+ $rev =~ /.{12}/;
+ print colored($&, "yellow"), " -> $newname\n";
+ foreach (@shas) {
+ /.{12}/;
+ print "\tUpstream SHA: ", colored("$&\n", "yellow");
+ }
+ foreach (@unmatched_shas) {
+ print colored("\tUNMATCHED SHA:", 'bold red'), " $_\n";
+ }
-print OUT "From: $user\n";
-print OUT "Subject: Linux $new_version\n";
-print OUT "References: bnc#$bnc\n";
-print OUT "Patch-mainline: $new_version\n";
-foreach (@SHAs) {
- print OUT "Git-commit: $_\n";
+ push @to_delete, $newpath;
+ $counter++;
}
-print OUT "\n";
-print OUT "Signed-off-by: $user\n";
-print OUT "---\n";
+store(\%ids, $idsfile) or die "cannot write $idsfile";
+push @to_delete, $idsfile;
-while (<GIT>) {
- print OUT;
-}
-
-close OUT;
-close GIT;
+system("tar -cC $patchpar $patchdir|ssh -C $machine -o StrictHostKeyChecking=no 'tar -xC $patchpar'") == 0 ||
+ die "ssh didn't start";
+print "Written patches and ids to $machine:$patchpath\n";
-print "Written $patchfile on $machine\n";
+unlink(@to_delete) or print STDERR "cannot delete some temp files\n";
+rmdir("$patchpath") or print STDERR "cannot remove $patchpath\n";
0;
diff --git a/scripts/stableup b/scripts/stableup
index ece1cc4050..e188eab603 100755
--- a/scripts/stableup
+++ b/scripts/stableup
@@ -1,14 +1,17 @@
#!/usr/bin/perl -w
use strict;
-use File::Basename qq(basename);
+use Error qw(:try);
use File::Copy qq(move);
+use Git;
+use Storable qq(retrieve);
+use Term::ANSIColor qw(colored);
my $tmpdir = $ENV{TMPDIR} || "/tmp";
-die "Has to be run from the kernel-source GIT dir" if (! -d ".git");
+my $repo = Git->repository;
-if (scalar @ARGV < 2) {
- print "Usage: $0 patch ids_file [nonzero_to_force_removal]\n";
+if (scalar @ARGV < 1) {
+ print "Usage: $0 patches_dir [nonzero_to_force_removal]\n";
exit 1;
}
@@ -20,86 +23,26 @@ my $series_changed = 0;
$series = <SERIES>;
}
-my $patch = $ARGV[0];
-my $idsfile = $ARGV[1];
-my $force_removal = $ARGV[2] || 0;
-my $destdir = "patches.kernel.org/";
-my $dest = $destdir . basename $patch;
+my $patchdir = $ARGV[0];
+my $idsfile = "$patchdir/ids";
+my $force_removal = $ARGV[1] || 0;
+my $destdir = "patches.kernel.org";
mkdir $destdir if (! -d $destdir);
-if (-f $patch && ! -f $dest) {
- move($patch, $dest) || die "cannot move $patch to $dest";
- print "Moved $patch to $dest\n";
- system("git add $dest");
- print "Added $dest to GIT\n";
- unless ($series =~ s/(latest standard kernel patches(?:\n[^\n]+)+\n)\n/$1\t$dest\n\n/) {
- die "cannot find a place in series.conf to add the patch";
- }
- $series_changed = 1;
-}
-
-open(IDS, "<$idsfile") || die "cannot open $idsfile";
-my $regexp = join "|", map { chomp; $_ } <IDS>;
-close IDS;
-
-my @references = ();
-my %files;
+my $ids = retrieve($idsfile) or die "cannot read $idsfile";
+my $regexp = join "|", map @$_, values %$ids;
+my @candidates = ();
if ($regexp eq "") {
- print STDERR "empty regexp computed? Skipping patches removal...\n";
+ print STDERR colored("empty regexp computed? Skipping patches removal...\n", 'yellow');
} else {
-
- open(GIT, "git grep -E '$regexp' patches.*|") ||
- die "cannot execute git";
-
- my $tags = qr/(?:Git-[Cc]ommit: |Patch-[Mm]ainline: |From )([0-9a-f]{40})/;
-
- while (<GIT>) {
- chomp;
- next if (/patches\.kernel\.org\/patch-[0-9]/);
- /^([^:]+):($tags)?/;
- my $file = $1;
- # file may be deleted already
- if (defined $2 && -f $file) {
- open(PATCH, "<$file") || die "cannot open $file";
- my %shas = ();
- my @refs = ();
- while (my $line = <PATCH>) {
- chomp $line;
- $shas{$1} = 1 if ($line =~ /^$tags/);
- if ($line =~ /^References: (.*)$/) {
- push @refs, (split /[\s,]+/, $1);
- }
- }
- close PATCH;
- if ($force_removal || scalar(keys %shas) == 1) {
- push @references, @refs;
- system("git rm $file");
- $series =~ s/
- (?:
- # empty or non-comment line
- (^(?:$|[ \t]*[^ \t#].*)\n)
- # comment to be deleted
- [ \t]*\#.*\n
- )?
- # file to be deleted
- [ \t]+\Q$file\E[ \t]*\n
- /$1 || ""/mex;
- $series_changed = 1;
- }
- }
- $files{$file} = 1;
- }
-
- close GIT;
-}
-
-if ($series_changed) {
- seek(SERIES, 0, 0) || die "cannot seek series.conf";
- truncate(SERIES, 0) || die "cannot truncate series.conf";
- print SERIES $series;
+ try {
+ @candidates = $repo->command('grep', '-El', $regexp, '--',
+ 'patches.*') or die "cannot execute git grep";
+ } catch Git::Error::Command with {
+ # not found is OK
+ };
}
-close SERIES;
sub output_refs($@) {
my ($fh, @refs) = @_;
@@ -111,36 +54,140 @@ sub output_refs($@) {
print $fh "References: ", join(' ', sort keys %uniq), "\n";
}
-if (scalar @references) { REFS: {
- unless (-f $dest) {
- print STDERR "missed references:\n";
- output_refs(\*STDERR, @references);
- last REFS;
- }
+sub push_refs($@) {
+ my ($dest, @refs) = @_;
+
open(DEST, "<$dest") || die "cannot open $dest for reading";
my @dest = <DEST>;
close DEST;
+
open(DEST, ">$dest") || die "cannot open $dest for writing";
my $had_git_commit = 0;
foreach my $line (@dest) {
if (!$had_git_commit && $line =~ /^Git-commit: /) {
- output_refs(\*DEST, @references);
+ output_refs(\*DEST, @refs);
$had_git_commit = 1;
} elsif ($line =~ /^References: (.*)$/) {
chomp $1;
- push @references, (split /[\s,]+/, $1);
+ push @refs, (split /[\s,]+/, $1);
next;
}
print DEST $line;
}
- undef @dest;
close DEST;
-}}
+}
-foreach my $file (keys %files) {
- if (-e $file) {
- system("git grep -E '$regexp' $file");
+my %files;
+my $tags = qr/(?:Git-[Cc]ommit: |Patch-[Mm]ainline: |From )([0-9a-f]{40})/;
+
+sub handle_removal($$) {
+ my ($line, $dest) = @_;
+
+ $line =~ /^([^:]+):($tags)?/;
+ my $file = $1;
+ my $match = $2;
+
+ $files{$file} = 1;
+
+ # weird git-commit tag or file may be deleted already
+ return unless (defined $match && -f $file);
+
+ print colored("\tRemoving $file\n", "yellow");
+
+ open(PATCH, "<$file") or die "cannot open $file";
+ my %shas = ();
+ my @refs = ();
+ while (my $line = <PATCH>) {
+ chomp $line;
+ $shas{$1} = 1 if ($line =~ /^$tags/);
+ if ($line =~ /^References: (.*)$/) {
+ push @refs, (split /[\s,]+/, $1);
+ }
+ }
+ close PATCH;
+
+ return unless ($force_removal || scalar(keys %shas) == 1);
+
+ try {
+ $repo->command('rm', '--', $file);
+ } catch Git::Error::Command with {
+ # sometimes, they are dirty
+ };
+
+ $series =~ s/
+ (?:
+ # empty or non-comment line
+ (^(?:$|[ \t]*[^ \t#].*)\n)
+ # comment to be deleted
+ [ \t]*\#.*\n
+ )?
+ # file to be deleted
+ [ \t]+\Q$file\E[ \t]*\n
+ /$1 || ""/mex;
+ $series_changed = 1;
+
+ if (scalar @refs) {
+ if (-f $dest) {
+ push_refs($dest, @refs);
+ } else {
+ print STDERR colored("\tmissed references:\n\t", 'red');
+ output_refs(\*STDERR, @refs);
+ }
+ }
+}
+
+foreach my $patch (sort keys %$ids) {
+ my $src = "$patchdir/$patch";
+ my $dest = "$destdir/$patch";
+
+ print "Handling $patch\n";
+ if (-f $src && ! -f $dest) {
+ move($src, $dest) || die "cannot move $src to $dest";
+ print "\tMoved to $destdir\n";
+ $repo->command('add', $dest);
+ print "\tAdded to GIT\n";
+ unless ($series =~ s/(latest standard kernel patches(?:\n[^\n]+)+\n)\n/$1\t$dest\n\n/) {
+ die "cannot find a place in series.conf to add a patch";
+ }
+ $series_changed = 1;
+ }
+
+ my $re = join("|", @{$$ids{$patch}});
+
+ if (scalar @candidates > 0 && $re ne "") {
+ my @found;
+ try {
+ @found = $repo->command('grep', '-E', $re, '--',
+ @candidates) or
+ die "cannot execute git grep";
+ } catch Git::Error::Command with {
+ # not found is OK
+ };
+
+ foreach (@found) {
+ next if (/patches\.kernel\.org\//);
+ handle_removal($_, $dest);
+ }
}
+
+}
+
+if ($series_changed) {
+ seek(SERIES, 0, 0) || die "cannot seek series.conf";
+ truncate(SERIES, 0) || die "cannot truncate series.conf";
+ print SERIES $series;
+}
+close SERIES;
+
+foreach my $file (keys %files) {
+ next unless (-e $file);
+
+ try {
+ $repo->command_noisy('grep', '-E', $regexp, '--',
+ $file);
+ } catch Git::Error::Command with {
+ # not found is OK
+ };
}
0;
diff --git a/scripts/tar-up.sh b/scripts/tar-up.sh
index e2fc1c59ee..9e5ece204b 100755
--- a/scripts/tar-up.sh
+++ b/scripts/tar-up.sh
@@ -195,10 +195,11 @@ CLEANFILES=()
trap 'if test -n "$CLEANFILES"; then rm -rf "${CLEANFILES[@]}"; fi' EXIT
tmpdir=$(mktemp -dt ${0##*/}.XXXXXX)
CLEANFILES=("${CLEANFILES[@]}" "$tmpdir")
+rpmfiles=$(ls rpm/* | grep -v "~$")
-cp -p rpm/* config.conf supported.conf doc/* $build_dir
+cp -p $rpmfiles config.conf supported.conf doc/* $build_dir
match="${flavor:+\\/$flavor$}"
-match="${arch:+^+${arch}${match:+.*}}${match}"
+match="${arch:+^+\\($(echo -n "${arch}" | sed 's/[, ]\+/\\\|/g')\\)\\>${match:+.*}}${match}"
[ -n "$match" ] && sed -i "/^$\|\s*#\|${match}/b; s/\(.*\)/#### \1/" $build_dir/config.conf
if test -e misc/extract-modaliases; then
cp misc/extract-modaliases $build_dir
@@ -207,7 +208,7 @@ fi
if grep -q '^Source.*:[[:space:]]*log\.sh[[:space:]]*$' rpm/kernel-source.spec.in; then
cp -p scripts/rpm-log.sh "$build_dir"/log.sh
fi
-rm -f "$build_dir/kernel-source.changes.old" "$build_dir/gitlog-fixups"
+rm -f "$build_dir/kernel-source.changes.old" "$build_dir/gitlog-fixups" "$build_dir/gitlog-excludes"
if test -e "$build_dir"/config-options.changes; then
# Rename to avoid triggering a build service rule error
mv "$build_dir"/config-options.changes \
@@ -243,6 +244,9 @@ elif $using_git; then
echo "expected \"last commit: <commit>\" in rpm/kernel-source.changes.old" >&2
exit 1
esac
+ if test -e rpm/gitlog-excludes; then
+ exclude=(--excludes "$_" "${exclude[@]}")
+ fi
if test -e rpm/gitlog-fixups; then
exclude=(--fixups "$_" "${exclude[@]}")
fi
diff --git a/scripts/tests/lib.py b/scripts/tests/lib.py
new file mode 100644
index 0000000000..58738a8423
--- /dev/null
+++ b/scripts/tests/lib.py
@@ -0,0 +1,9 @@
+#!/usr/bin/python3
+# -*- coding: utf-8 -*-
+
+import os.path
+
+
+def libdir():
+ return os.path.dirname(os.path.realpath(__file__))
+
diff --git a/scripts/tests/test_linux_git.py b/scripts/tests/test_linux_git.py
new file mode 100755
index 0000000000..ea678ff927
--- /dev/null
+++ b/scripts/tests/test_linux_git.py
@@ -0,0 +1,53 @@
+#!/usr/bin/python3
+# -*- coding: utf-8 -*-
+
+import os.path
+import shutil
+import subprocess
+import tempfile
+import unittest
+
+import lib
+
+
+class TestLinuxGit(unittest.TestCase):
+ def setUp(self):
+ self.tmpdir = tempfile.mkdtemp(prefix="ks_linux_git")
+ self.lg_path = os.path.join(lib.libdir(), "../linux_git.sh")
+
+
+ def tearDown(self):
+ shutil.rmtree(self.tmpdir)
+
+
+ def run_one(self, *, bare, var, output):
+ args = ["git", "init", "--quiet"]
+ if bare:
+ args.append("--bare")
+ args.append(self.tmpdir)
+
+ subprocess.check_call(args, env={})
+
+ retval = subprocess.check_output((self.lg_path,),
+ env={"LINUX_GIT" : var})
+ self.assertEqual(output, retval.decode())
+
+
+ def test_bare(self):
+ self.run_one(bare=True, var=self.tmpdir, output=self.tmpdir + "\n")
+
+
+ def test_nonbare(self):
+ self.run_one(bare=False, var=self.tmpdir,
+ output=os.path.join(self.tmpdir, ".git") + "\n")
+
+
+ def test_nonbare_git(self):
+ self.run_one(bare=False, var=os.path.join(self.tmpdir, ".git"),
+ output=os.path.join(self.tmpdir, ".git") + "\n")
+
+
+if __name__ == '__main__':
+ # Run a single testcase
+ suite = unittest.TestLoader().loadTestsFromTestCase(TestLinuxGit)
+ unittest.TextTestRunner(verbosity=2).run(suite)
diff --git a/scripts/tests/test_log2.py b/scripts/tests/test_log2.py
new file mode 100755
index 0000000000..8c592efa19
--- /dev/null
+++ b/scripts/tests/test_log2.py
@@ -0,0 +1,571 @@
+#!/usr/bin/python3
+# -*- coding: utf-8 -*-
+
+import os.path
+import shutil
+import subprocess
+import tempfile
+import unittest
+import stat
+import sys
+
+import lib
+
+
+class TestSpliceSeries(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ cls.covdir = tempfile.mkdtemp(prefix="gs_log2_cov")
+
+
+ @classmethod
+ def tearDownClass(cls):
+ print("Coverage report in %s. Press enter when done with it." %
+ (cls.covdir,))
+ sys.stdin.readline()
+ shutil.rmtree(cls.covdir)
+
+
+ def setUp(self):
+ self.tmpdir = tempfile.mkdtemp(prefix="gs_log2")
+ os.chdir(self.tmpdir)
+ self.log2_path = os.path.join( lib.libdir(), "../log2")
+
+
+ def tearDown(self):
+ shutil.rmtree(self.tmpdir)
+
+
+ def test_errors(self):
+ vectors = (
+ # missing in matching
+ (
+""" patches.fixes/0
+ patches.fixes/1
+ patches.fixes/2
+""",
+""" patches.fixes/0
+ patches.fixes/1
+""",
+ "patches.fixes/3",
+ "Error: new series does not contain all lines from old "
+ "series.\n",
+ ),
+ # missing in diff
+ (
+""" patches.fixes/0
+ patches.fixes/1
+ patches.fixes/2
+""",
+""" patches.fixes/0
+ patches.fixes/2
+ patches.fixes/3
+""",
+ "patches.fixes/4",
+ "Error: new series does not contain all lines from old "
+ "series.\n",
+ ),
+ # patch not found
+ (
+""" patches.fixes/0
+ patches.fixes/1
+ patches.fixes/2
+""",
+""" patches.fixes/0
+ patches.fixes/1
+ patches.fixes/2
+""",
+ "patches.fixes/3",
+ "Error: patch \"patches.fixes/3\" not found in series.\n",
+ ),
+ )
+
+ for i in range(len(vectors)):
+ old, new, patch, msg = vectors[i]
+ with self.subTest(vector=i):
+ with open("old", mode="w") as f:
+ f.write(old)
+
+ with open("new", mode="w") as f:
+ f.write(new)
+
+ with open("test.sh", mode="w") as f:
+ f.write(
+ """#!/bin/bash
+
+ . %s
+ splice_series %s 3<old 4<new\n""" % (
+ self.log2_path, patch,))
+ os.chmod("test.sh", stat.S_IRWXU)
+
+ sp = subprocess.Popen(
+ ["kcov", "--include-path=%s" % (self.log2_path,),
+ self.__class__.covdir, "test.sh"],
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = sp.communicate()
+ retval = sp.wait()
+
+ self.assertEqual(retval, 1)
+ self.assertEqual(msg, err.decode())
+
+
+ def test_simple(self):
+ vectors = (
+ # append
+ (
+""" patches.fixes/0
+ patches.fixes/1
+ patches.fixes/2
+""",
+""" patches.fixes/0
+ patches.fixes/1
+ patches.fixes/2
+ patches.fixes/3
+""",
+ "patches.fixes/3",),
+ # append
+ (
+""" patches.fixes/0
+ patches.fixes/1
+ patches.fixes/2
+
+""",
+""" patches.fixes/0
+ patches.fixes/1
+ patches.fixes/2
+ patches.fixes/3
+
+""",
+ "patches.fixes/3",),
+ # prepend
+ (
+""" patches.fixes/1
+ patches.fixes/2
+ patches.fixes/3
+""",
+""" patches.fixes/0
+ patches.fixes/1
+ patches.fixes/2
+ patches.fixes/3
+""",
+ "patches.fixes/0",),
+ # insert
+ (
+""" patches.fixes/0
+ patches.fixes/2
+ patches.fixes/3
+""",
+""" patches.fixes/0
+ patches.fixes/1
+ patches.fixes/2
+ patches.fixes/3
+""",
+ "patches.fixes/1",),
+ # with sections
+ (
+""" patches.fixes/0
+
+ # jejb/scsi for-next
+ patches.fixes/1
+ patches.fixes/3
+""",
+""" patches.fixes/0
+
+ # jejb/scsi for-next
+ patches.fixes/1
+ patches.fixes/2
+ patches.fixes/3
+""",
+ "patches.fixes/2",),
+ # add section
+ (
+""" patches.fixes/0
+""",
+""" patches.fixes/0
+
+ # jejb/scsi for-next
+ patches.fixes/1
+""",
+ "patches.fixes/1",),
+ )
+
+ for i in range(len(vectors)):
+ old, new, patch = vectors[i]
+ with self.subTest(vector=i):
+ with open("old", mode="w") as f:
+ f.write(old)
+
+ with open("new", mode="w") as f:
+ f.write(new)
+
+ with open("test.sh", mode="w") as f:
+ f.write(
+ """#!/bin/bash
+
+ . %s
+ splice_series %s 3<old 4<new\n""" % (
+ self.log2_path, patch,))
+ os.chmod("test.sh", stat.S_IRWXU)
+
+ retval = subprocess.check_output(
+ ["kcov", "--include-path=%s" % (self.log2_path,),
+ self.__class__.covdir, "test.sh"])
+ self.assertEqual(new, retval.decode())
+
+
+ def test_intermediate(self):
+ vectors = (
+ # start of new section
+ (
+""" patches.fixes/0
+
+ # out-of-tree patches
+ patches.drivers/0
+ patches.drivers/1
+""",
+""" patches.fixes/0
+
+ # jejb/scsi for-next
+ patches.fixes/1
+ patches.fixes/2
+
+ # out-of-tree patches
+ patches.drivers/0
+ patches.drivers/1
+""",
+ "patches.fixes/1",
+""" patches.fixes/0
+
+ # jejb/scsi for-next
+ patches.fixes/1
+
+ # out-of-tree patches
+ patches.drivers/0
+ patches.drivers/1
+""",),
+ # end of new section
+ (
+""" patches.fixes/0
+
+ # out-of-tree patches
+ patches.drivers/0
+ patches.drivers/1
+""",
+""" patches.fixes/0
+
+ # jejb/scsi for-next
+ patches.fixes/1
+ patches.fixes/2
+
+ # out-of-tree patches
+ patches.drivers/0
+ patches.drivers/1
+""",
+ "patches.fixes/2",
+""" patches.fixes/0
+
+ # jejb/scsi for-next
+ patches.fixes/1
+ patches.fixes/2
+
+ # out-of-tree patches
+ patches.drivers/0
+ patches.drivers/1
+""",),
+ # middle of new section
+ (
+""" patches.fixes/0
+
+ # out-of-tree patches
+ patches.drivers/0
+ patches.drivers/1
+""",
+""" patches.fixes/0
+
+ # jejb/scsi for-next
+ patches.fixes/1
+ patches.fixes/2
+ patches.fixes/3
+
+ # out-of-tree patches
+ patches.drivers/0
+ patches.drivers/1
+""",
+ "patches.fixes/2",
+""" patches.fixes/0
+
+ # jejb/scsi for-next
+ patches.fixes/1
+ patches.fixes/2
+
+ # out-of-tree patches
+ patches.drivers/0
+ patches.drivers/1
+""",),
+ # end of existing section
+ (
+""" patches.fixes/0
+
+ # jejb/scsi for-next
+ patches.fixes/1
+
+ # out-of-tree patches
+ patches.drivers/0
+ patches.drivers/1
+""",
+""" patches.fixes/0
+
+ # jejb/scsi for-next
+ patches.fixes/1
+ patches.fixes/2
+ patches.fixes/3
+
+ # out-of-tree patches
+ patches.drivers/0
+ patches.drivers/1
+""",
+ "patches.fixes/2",
+""" patches.fixes/0
+
+ # jejb/scsi for-next
+ patches.fixes/1
+ patches.fixes/2
+
+ # out-of-tree patches
+ patches.drivers/0
+ patches.drivers/1
+""",),
+ (
+""" patches.fixes/0
+
+ # jejb/scsi for-next
+ patches.fixes/1
+
+ # out-of-tree patches
+ patches.drivers/0
+ patches.drivers/1
+""",
+""" patches.fixes/0
+
+ # jejb/scsi for-next
+ patches.fixes/1
+ patches.fixes/2
+ patches.fixes/3
+
+ # out-of-tree patches
+ patches.drivers/0
+ patches.drivers/1
+""",
+ "patches.fixes/3",
+""" patches.fixes/0
+
+ # jejb/scsi for-next
+ patches.fixes/1
+ patches.fixes/2
+ patches.fixes/3
+
+ # out-of-tree patches
+ patches.drivers/0
+ patches.drivers/1
+""",),
+ # spread in different places
+ (
+""" patches.fixes/0
+
+ # jejb/scsi for-next
+ patches.fixes/2
+
+ # out-of-tree patches
+ patches.drivers/0
+ patches.drivers/1
+""",
+""" patches.fixes/0
+ patches.fixes/1
+
+ # jejb/scsi for-next
+ patches.fixes/2
+ patches.fixes/3
+
+ # out-of-tree patches
+ patches.drivers/0
+ patches.drivers/1
+""",
+ "patches.fixes/1",
+""" patches.fixes/0
+ patches.fixes/1
+
+ # jejb/scsi for-next
+ patches.fixes/2
+
+ # out-of-tree patches
+ patches.drivers/0
+ patches.drivers/1
+""",),
+ (
+""" patches.fixes/0
+
+ # jejb/scsi for-next
+ patches.fixes/2
+
+ # out-of-tree patches
+ patches.drivers/0
+ patches.drivers/1
+""",
+""" patches.fixes/0
+ patches.fixes/1
+
+ # jejb/scsi for-next
+ patches.fixes/2
+ patches.fixes/3
+
+ # out-of-tree patches
+ patches.drivers/0
+ patches.drivers/1
+""",
+ "patches.fixes/3",
+""" patches.fixes/0
+ patches.fixes/1
+
+ # jejb/scsi for-next
+ patches.fixes/2
+ patches.fixes/3
+
+ # out-of-tree patches
+ patches.drivers/0
+ patches.drivers/1
+""",),
+ # two new sections
+ (
+""" patches.fixes/0
+
+ # out-of-tree patches
+ patches.suse/0
+ patches.suse/1
+""",
+""" patches.fixes/0
+
+ # davem/net
+ patches.drivers/1
+ patches.drivers/2
+
+ # davem/net-next
+ patches.drivers/3
+ patches.drivers/4
+
+ # out-of-tree patches
+ patches.suse/0
+ patches.suse/1
+""",
+ "patches.drivers/1",
+""" patches.fixes/0
+
+ # davem/net
+ patches.drivers/1
+
+ # out-of-tree patches
+ patches.suse/0
+ patches.suse/1
+""",),
+ # eof in whitespace
+ (
+""" patches.fixes/0
+""",
+""" patches.fixes/0
+
+ # davem/net
+ patches.drivers/1
+ patches.drivers/2
+
+""",
+ "patches.drivers/1",
+""" patches.fixes/0
+
+ # davem/net
+ patches.drivers/1
+
+""",),
+ # two new sections, multi-line whitespace
+ (
+""" patches.fixes/0
+
+ # out-of-tree patches
+ patches.suse/0
+ patches.suse/1
+""",
+""" patches.fixes/0
+
+ # davem/net
+ patches.drivers/1
+ patches.drivers/2
+
+
+ # davem/net-next
+ patches.drivers/3
+ patches.drivers/4
+
+ # out-of-tree patches
+ patches.suse/0
+ patches.suse/1
+""",
+ "patches.drivers/1",
+""" patches.fixes/0
+
+ # davem/net
+ patches.drivers/1
+
+
+ # out-of-tree patches
+ patches.suse/0
+ patches.suse/1
+""",),
+ # two new sections, eof in new
+ (
+""" patches.fixes/0
+""",
+""" patches.fixes/0
+
+ # davem/net
+ patches.drivers/1
+ patches.drivers/2
+
+ # davem/net-next
+ patches.drivers/3
+ patches.drivers/4
+""",
+ "patches.drivers/1",
+""" patches.fixes/0
+
+ # davem/net
+ patches.drivers/1
+
+""",),
+ )
+
+ for i in range(len(vectors)):
+ old, new, patch, intermediate = vectors[i]
+ with self.subTest(vector=i):
+ with open("old", mode="w") as f:
+ f.write(old)
+
+ with open("new", mode="w") as f:
+ f.write(new)
+
+ with open("test.sh", mode="w") as f:
+ f.write(
+ """#!/bin/bash
+
+ . %s
+ splice_series %s 3<old 4<new\n""" % (
+ self.log2_path, patch,))
+ os.chmod("test.sh", stat.S_IRWXU)
+
+ retval = subprocess.check_output(
+ ["kcov", "--include-path=%s" % (self.log2_path,),
+ self.__class__.covdir, "test.sh"])
+ self.assertEqual(intermediate, retval.decode())
+
+
+if __name__ == '__main__':
+ # Run a single testcase
+ suite = unittest.TestLoader().loadTestsFromTestCase(TestSpliceSeries)
+ unittest.TextTestRunner(verbosity=2).run(suite)
diff --git a/scripts/wd-functions.sh b/scripts/wd-functions.sh
index 3dfb0a657b..b5109e171b 100644
--- a/scripts/wd-functions.sh
+++ b/scripts/wd-functions.sh
@@ -71,12 +71,10 @@ _find_tarball()
_get_tarball_from_git()
{
local version=$1 tag url=$2 default_url
+ local libdir=$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")
+ local git
- git=${LINUX_GIT:-$HOME/linux-2.6}
- if test ! -d "$git/.git"; then
- echo "No linux-2.6 git tree found (try setting the LINUX_GIT variable)" >&2
- exit 1
- fi
+ git=$("$libdir"/linux_git.sh) || exit 1
case "$version" in
*next-*)
tag=refs/tags/next-${version##*next-}
@@ -91,18 +89,18 @@ _get_tarball_from_git()
default_url=git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux-2.6.git
esac
[ -z "$url" ] && url=$default_url
- if ! git --git-dir="$git/.git" cat-file -e "$tag" 2>/dev/null; then
+ if ! git --git-dir="$git" cat-file -e "$tag" 2>/dev/null; then
case "$tag" in
refs/tags/*)
- git --git-dir="$git/.git" fetch "$url" "$tag:$tag"
+ git --git-dir="$git" fetch "$url" "$tag:$tag"
;;
*)
# v2.6.X.Y-rcZ-gabcdef1, not a real tag
- git --git-dir="$git/.git" fetch --tags "$url" \
+ git --git-dir="$git" fetch --tags "$url" \
refs/heads/master:refs/tags/latest
esac
fi
- git --git-dir="$git/.git" archive --prefix="linux-$version/" "$tag"
+ git --git-dir="$git" archive --prefix="linux-$version/" "$tag"
}
get_tarball()