create rootf's & SDK from 2018.02.9 buildroot (#10)

---------

Co-authored-by: tiopex <tiopxyz@gmail.com>
Co-authored-by: tiopex <67048640+tiopex@users.noreply.github.com>
This commit is contained in:
Apaczer
2023-03-11 21:06:02 +01:00
committed by GitHub
parent 534f7aea40
commit dcf31c6a1e
12528 changed files with 149033 additions and 303064 deletions

View File

@@ -1,19 +1,16 @@
#!/usr/bin/env python3
#!/usr/bin/env python
# See utils/checkpackagelib/readme.txt before editing this file.
from __future__ import print_function
import argparse
import inspect
import os
import re
import six
import sys
import checkpackagelib.base
import checkpackagelib.lib_config
import checkpackagelib.lib_hash
import checkpackagelib.lib_mk
import checkpackagelib.lib_patch
import checkpackagelib.lib_sysv
VERBOSE_LEVEL_TO_SHOW_IGNORED_FILES = 3
flags = None # Command line arguments.
@@ -27,14 +24,10 @@ def parse_args():
parser.add_argument("files", metavar="F", type=str, nargs="*",
help="list of files")
parser.add_argument("--br2-external", "-b", dest='intree_only', action="store_false",
help="do not apply the pathname filters used for intree files")
parser.add_argument("--manual-url", action="store",
default="http://nightly.buildroot.org/",
help="default: %(default)s")
parser.add_argument("--verbose", "-v", action="count", default=0)
parser.add_argument("--quiet", "-q", action="count", default=0)
# Now the debug options in the order they are processed.
parser.add_argument("--include-only", dest="include_list", action="append",
@@ -47,39 +40,13 @@ def parse_args():
return parser.parse_args()
CONFIG_IN_FILENAME = re.compile(r"Config\.\S*$")
DO_CHECK_INTREE = re.compile(r"|".join([
r"Config.in",
r"arch/",
r"boot/",
r"fs/",
r"linux/",
r"package/",
r"system/",
r"toolchain/",
]))
DO_NOT_CHECK_INTREE = re.compile(r"|".join([
r"boot/barebox/barebox\.mk$",
r"fs/common\.mk$",
r"package/doc-asciidoc\.mk$",
r"package/pkg-\S*\.mk$",
r"toolchain/helpers\.mk$",
r"toolchain/toolchain-external/pkg-toolchain-external\.mk$",
]))
SYSV_INIT_SCRIPT_FILENAME = re.compile(r"/S\d\d[^/]+$")
CONFIG_IN_FILENAME = re.compile("/Config\.\S*$")
FILE_IS_FROM_A_PACKAGE = re.compile("package/[^/]*/")
def get_lib_from_filename(fname):
if flags.intree_only:
if DO_CHECK_INTREE.match(fname) is None:
return None
if DO_NOT_CHECK_INTREE.match(fname):
return None
else:
if os.path.basename(fname) == "external.mk" and \
os.path.exists(fname[:-2] + "desc"):
return None
if FILE_IS_FROM_A_PACKAGE.search(fname) is None:
return None
if CONFIG_IN_FILENAME.search(fname):
return checkpackagelib.lib_config
if fname.endswith(".hash"):
@@ -88,12 +55,12 @@ def get_lib_from_filename(fname):
return checkpackagelib.lib_mk
if fname.endswith(".patch"):
return checkpackagelib.lib_patch
if SYSV_INIT_SCRIPT_FILENAME.search(fname):
return checkpackagelib.lib_sysv
return None
def common_inspect_rules(m):
def is_a_check_function(m):
if not inspect.isclass(m):
return False
# do not call the base class
if m.__name__.startswith("_"):
return False
@@ -104,22 +71,6 @@ def common_inspect_rules(m):
return True
def is_a_check_function(m):
if not inspect.isclass(m):
return False
if not issubclass(m, checkpackagelib.base._CheckFunction):
return False
return common_inspect_rules(m)
def is_external_tool(m):
if not inspect.isclass(m):
return False
if not issubclass(m, checkpackagelib.base._Tool):
return False
return common_inspect_rules(m)
def print_warnings(warnings):
# Avoid the need to use 'return []' at the end of every check function.
if warnings is None:
@@ -141,40 +92,24 @@ def check_file_using_lib(fname):
if flags.verbose >= VERBOSE_LEVEL_TO_SHOW_IGNORED_FILES:
print("{}: ignored".format(fname))
return nwarnings, nlines
internal_functions = inspect.getmembers(lib, is_a_check_function)
external_tools = inspect.getmembers(lib, is_external_tool)
all_checks = internal_functions + external_tools
classes = inspect.getmembers(lib, is_a_check_function)
if flags.dry_run:
functions_to_run = [c[0] for c in all_checks]
functions_to_run = [c[0] for c in classes]
print("{}: would run: {}".format(fname, functions_to_run))
return nwarnings, nlines
objects = [c[1](fname, flags.manual_url) for c in internal_functions]
objects = [c[1](fname, flags.manual_url) for c in classes]
for cf in objects:
nwarnings += print_warnings(cf.before())
if six.PY3:
f = open(fname, "r", errors="surrogateescape")
else:
f = open(fname, "r")
lastline = ""
for lineno, text in enumerate(f.readlines()):
for lineno, text in enumerate(open(fname, "r").readlines()):
nlines += 1
for cf in objects:
if cf.disable.search(lastline):
continue
nwarnings += print_warnings(cf.check_line(lineno + 1, text))
lastline = text
f.close()
for cf in objects:
nwarnings += print_warnings(cf.after())
tools = [c[1](fname) for c in external_tools]
for tool in tools:
nwarnings += print_warnings(tool.run())
return nwarnings, nlines
@@ -182,16 +117,7 @@ def __main__():
global flags
flags = parse_args()
if flags.intree_only:
# change all paths received to be relative to the base dir
base_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
files_to_check = [os.path.relpath(os.path.abspath(f), base_dir) for f in flags.files]
# move current dir so the script find the files
os.chdir(base_dir)
else:
files_to_check = flags.files
if len(files_to_check) == 0:
if len(flags.files) == 0:
print("No files to check style")
sys.exit(1)
@@ -199,7 +125,7 @@ def __main__():
total_warnings = 0
total_lines = 0
for fname in files_to_check:
for fname in flags.files:
nwarnings, nlines = check_file_using_lib(fname)
total_warnings += nwarnings
total_lines += nlines
@@ -208,10 +134,8 @@ def __main__():
# (e.g. counted by 'wc'), so for stats use stderr. Wait all warnings are
# printed, for the case there are many of them, before printing stats.
sys.stdout.flush()
if not flags.quiet:
print("{} lines processed".format(total_lines), file=sys.stderr)
print("{} warnings generated".format(total_warnings), file=sys.stderr)
print("{} lines processed".format(total_lines), file=sys.stderr)
print("{} warnings generated".format(total_warnings), file=sys.stderr)
if total_warnings > 0:
sys.exit(1)

View File

@@ -1,12 +1,10 @@
# See utils/checkpackagelib/readme.txt before editing this file.
import re
class _CheckFunction(object):
def __init__(self, filename, url_to_manual):
self.filename = filename
self.url_to_manual = url_to_manual
self.disable = re.compile(r"^\s*# check-package .*\b{}\b".format(self.__class__.__name__))
def before(self):
pass
@@ -16,14 +14,3 @@ class _CheckFunction(object):
def after(self):
pass
class _Tool(object):
def __init__(self, filename):
self.filename = filename
def run(self):
pass
def hint(self):
return ""

View File

@@ -1,6 +1,6 @@
# See utils/checkpackagelib/readme.txt before editing this file.
from checkpackagelib.base import _CheckFunction
from base import _CheckFunction
class ConsecutiveEmptyLines(_CheckFunction):
@@ -52,17 +52,3 @@ class TrailingSpace(_CheckFunction):
return ["{}:{}: line contains trailing whitespace"
.format(self.filename, lineno),
text]
class Utf8Characters(_CheckFunction):
def is_ascii(self, s):
try:
return all(ord(c) < 128 for c in s)
except TypeError:
return False
def check_line(self, lineno, text):
if not self.is_ascii(text):
return ["{}:{}: line contains UTF-8 characters"
.format(self.filename, lineno),
text]

View File

@@ -5,12 +5,11 @@
import re
from checkpackagelib.base import _CheckFunction
from checkpackagelib.lib import ConsecutiveEmptyLines # noqa: F401
from checkpackagelib.lib import EmptyLastLine # noqa: F401
from checkpackagelib.lib import NewlineAtEof # noqa: F401
from checkpackagelib.lib import TrailingSpace # noqa: F401
from checkpackagelib.tool import NotExecutable # noqa: F401
from base import _CheckFunction
from lib import ConsecutiveEmptyLines # noqa: F401
from lib import EmptyLastLine # noqa: F401
from lib import NewlineAtEof # noqa: F401
from lib import TrailingSpace # noqa: F401
def _empty_or_comment(text):
@@ -61,100 +60,9 @@ class AttributesOrder(_CheckFunction):
text]
class CommentsMenusPackagesOrder(_CheckFunction):
def before(self):
self.level = 0
self.menu_of_packages = ["The top level menu"]
self.new_package = ""
self.package = [""]
self.print_package_warning = [True]
self.state = ""
def get_level(self):
return len(self.state.split('-')) - 1
def initialize_package_level_elements(self, text):
try:
self.menu_of_packages[self.level] = text[:-1]
self.package[self.level] = ""
self.print_package_warning[self.level] = True
except IndexError:
self.menu_of_packages.append(text[:-1])
self.package.append("")
self.print_package_warning.append(True)
def initialize_level_elements(self, text):
self.level = self.get_level()
self.initialize_package_level_elements(text)
def check_line(self, lineno, text):
# We only want to force sorting for the top-level menus
if self.filename not in ["fs/Config.in",
"package/Config.in",
"package/Config.in.host",
"package/kodi/Config.in"]:
return
source_line = re.match(r'^\s*source ".*/([^/]*)/Config.in(.host)?"', text)
if text.startswith("comment "):
if not self.state.endswith("-comment"):
self.state += "-comment"
self.initialize_level_elements(text)
elif text.startswith("if "):
self.state += "-if"
self.initialize_level_elements(text)
elif text.startswith("menu "):
if self.state.endswith("-comment"):
self.state = self.state[:-8]
self.state += "-menu"
self.initialize_level_elements(text)
elif text.startswith("endif") or text.startswith("endmenu"):
if self.state.endswith("-comment"):
self.state = self.state[:-8]
if text.startswith("endif"):
self.state = self.state[:-3]
elif text.startswith("endmenu"):
self.state = self.state[:-5]
self.level = self.get_level()
elif source_line:
self.new_package = source_line.group(1)
# We order _ before A, so replace it with .
new_package_ord = self.new_package.replace('_', '.')
if self.package[self.level] != "" and \
self.print_package_warning[self.level] and \
new_package_ord < self.package[self.level]:
self.print_package_warning[self.level] = False
prefix = "{}:{}: ".format(self.filename, lineno)
spaces = " " * len(prefix)
return ["{prefix}Packages in: {menu},\n"
"{spaces}are not alphabetically ordered;\n"
"{spaces}correct order: '-', '_', digits, capitals, lowercase;\n"
"{spaces}first incorrect package: {package}"
.format(prefix=prefix, spaces=spaces,
menu=self.menu_of_packages[self.level],
package=self.new_package),
text]
self.package[self.level] = new_package_ord
class HelpText(_CheckFunction):
HELP_TEXT_FORMAT = re.compile(r"^\t .{,62}$")
URL_ONLY = re.compile(r"^(http|https|git)://\S*$")
HELP_TEXT_FORMAT = re.compile("^\t .{,62}$")
URL_ONLY = re.compile("^(http|https|git)://\S*$")
def before(self):
self.help_text = False
@@ -224,12 +132,6 @@ class Indent(_CheckFunction):
text]
elif entry in entries_that_should_not_be_indented:
if not text.startswith(entry):
# four Config.in files have a special but legitimate indentation rule
if self.filename in ["package/Config.in",
"package/Config.in.host",
"package/kodi/Config.in",
"package/x11r7/Config.in"]:
return
return ["{}:{}: should not be indented"
.format(self.filename, lineno),
text]

View File

@@ -5,12 +5,11 @@
import re
from checkpackagelib.base import _CheckFunction
from checkpackagelib.lib import ConsecutiveEmptyLines # noqa: F401
from checkpackagelib.lib import EmptyLastLine # noqa: F401
from checkpackagelib.lib import NewlineAtEof # noqa: F401
from checkpackagelib.lib import TrailingSpace # noqa: F401
from checkpackagelib.tool import NotExecutable # noqa: F401
from base import _CheckFunction
from lib import ConsecutiveEmptyLines # noqa: F401
from lib import EmptyLastLine # noqa: F401
from lib import NewlineAtEof # noqa: F401
from lib import TrailingSpace # noqa: F401
def _empty_line_or_comment(text):
@@ -42,6 +41,8 @@ class HashType(_CheckFunction):
return
htype, hexa = fields[:2]
if htype == "none":
return
if htype not in self.len_of_hash.keys():
return ["{}:{}: unexpected type of hash ({}#adding-packages-hash)"
.format(self.filename, lineno, self.url_to_manual),
@@ -52,19 +53,3 @@ class HashType(_CheckFunction):
.format(self.filename, lineno, self.url_to_manual),
text,
"expected {} hex digits".format(self.len_of_hash[htype])]
class HashSpaces(_CheckFunction):
def check_line(self, lineno, text):
if _empty_line_or_comment(text):
return
fields = text.split()
if len(fields) != 3:
# Handled by HashNumberOfFields
return
if not re.match(re.escape("{} {} {}".format(*fields)), text):
return ["{}:{}: separation does not match expectation "
"({}#adding-packages-hash)"
.format(self.filename, lineno, self.url_to_manual), text]

View File

@@ -4,30 +4,22 @@
# menu options using "make menuconfig" and by running "make" with appropriate
# packages enabled.
import os
import re
from checkpackagelib.base import _CheckFunction
from checkpackagelib.lib import ConsecutiveEmptyLines # noqa: F401
from checkpackagelib.lib import EmptyLastLine # noqa: F401
from checkpackagelib.lib import NewlineAtEof # noqa: F401
from checkpackagelib.lib import TrailingSpace # noqa: F401
from checkpackagelib.lib import Utf8Characters # noqa: F401
from checkpackagelib.tool import NotExecutable # noqa: F401
# used in more than one check
start_conditional = ["ifdef", "ifeq", "ifndef", "ifneq"]
continue_conditional = ["elif", "else"]
end_conditional = ["endif"]
from base import _CheckFunction
from lib import ConsecutiveEmptyLines # noqa: F401
from lib import EmptyLastLine # noqa: F401
from lib import NewlineAtEof # noqa: F401
from lib import TrailingSpace # noqa: F401
class Indent(_CheckFunction):
COMMENT = re.compile(r"^\s*#")
CONDITIONAL = re.compile(r"^\s*({})\s".format("|".join(start_conditional + end_conditional + continue_conditional)))
COMMENT = re.compile("^\s*#")
CONDITIONAL = re.compile("^\s*(ifeq|ifneq|endif)\s")
ENDS_WITH_BACKSLASH = re.compile(r"^[^#].*\\$")
END_DEFINE = re.compile(r"^\s*endef\s")
MAKEFILE_TARGET = re.compile(r"^[^# \t]+:\s")
START_DEFINE = re.compile(r"^\s*define\s")
END_DEFINE = re.compile("^\s*endef\s")
MAKEFILE_TARGET = re.compile("^[^# \t]+:\s")
START_DEFINE = re.compile("^\s*define\s")
def before(self):
self.define = False
@@ -45,7 +37,7 @@ class Indent(_CheckFunction):
expect_tabs = False
if self.define or self.backslash or self.makefile_target:
expect_tabs = True
if not self.backslash and self.CONDITIONAL.search(text):
if self.CONDITIONAL.search(text):
expect_tabs = False
# calculate for next line
@@ -77,75 +69,6 @@ class Indent(_CheckFunction):
text]
class OverriddenVariable(_CheckFunction):
CONCATENATING = re.compile(r"^([A-Z0-9_]+)\s*(\+|:|)=\s*\$\(\1\)")
END_CONDITIONAL = re.compile(r"^\s*({})".format("|".join(end_conditional)))
OVERRIDING_ASSIGNMENTS = [':=', "="]
START_CONDITIONAL = re.compile(r"^\s*({})".format("|".join(start_conditional)))
VARIABLE = re.compile(r"^([A-Z0-9_]+)\s*((\+|:|)=)")
USUALLY_OVERRIDDEN = re.compile(r"^[A-Z0-9_]+({})".format("|".join([
r"_ARCH\s*=\s*",
r"_CPU\s*=\s*",
r"_SITE\s*=\s*",
r"_SOURCE\s*=\s*",
r"_VERSION\s*=\s*"])))
FORBIDDEN_OVERRIDDEN = re.compile(r"^[A-Z0-9_]+({})".format("|".join([
r"_CONF_OPTS\s*=\s*",
r"_DEPENDENCIES\s*=\s*"])))
def before(self):
self.conditional = 0
self.unconditionally_set = []
self.conditionally_set = []
def check_line(self, lineno, text):
if self.START_CONDITIONAL.search(text):
self.conditional += 1
return
if self.END_CONDITIONAL.search(text):
self.conditional -= 1
return
m = self.VARIABLE.search(text)
if m is None:
return
variable, assignment = m.group(1, 2)
if self.conditional == 0:
if variable in self.conditionally_set:
self.unconditionally_set.append(variable)
if assignment in self.OVERRIDING_ASSIGNMENTS:
return ["{}:{}: unconditional override of variable {} previously conditionally set"
.format(self.filename, lineno, variable),
text]
if variable not in self.unconditionally_set:
self.unconditionally_set.append(variable)
return
if assignment in self.OVERRIDING_ASSIGNMENTS:
return ["{}:{}: unconditional override of variable {}"
.format(self.filename, lineno, variable),
text]
else:
if self.FORBIDDEN_OVERRIDDEN.search(text):
return ["{}:{}: conditional override of variable {}"
.format(self.filename, lineno, variable),
text]
if variable not in self.unconditionally_set:
self.conditionally_set.append(variable)
return
if self.CONCATENATING.search(text):
return ["{}:{}: immediate assignment to append to variable {}"
.format(self.filename, lineno, variable),
text]
if self.USUALLY_OVERRIDDEN.search(text):
return
if assignment in self.OVERRIDING_ASSIGNMENTS:
return ["{}:{}: conditional override of variable {}"
.format(self.filename, lineno, variable),
text]
class PackageHeader(_CheckFunction):
def before(self):
self.skip = False
@@ -177,13 +100,14 @@ class PackageHeader(_CheckFunction):
class RemoveDefaultPackageSourceVariable(_CheckFunction):
packages_that_may_contain_default_source = ["binutils", "gcc", "gdb"]
PACKAGE_NAME = re.compile("/([^/]+)\.mk")
def before(self):
package, _ = os.path.splitext(os.path.basename(self.filename))
package = self.PACKAGE_NAME.search(self.filename).group(1)
package_upper = package.replace("-", "_").upper()
self.package = package
self.FIND_SOURCE = re.compile(
r"^{}_SOURCE\s*=\s*{}-\$\({}_VERSION\)\.tar\.gz"
"^{}_SOURCE\s*=\s*{}-\$\({}_VERSION\)\.tar\.gz"
.format(package_upper, package, package_upper))
def check_line(self, lineno, text):
@@ -199,7 +123,7 @@ class RemoveDefaultPackageSourceVariable(_CheckFunction):
class SpaceBeforeBackslash(_CheckFunction):
TAB_OR_MULTIPLE_SPACES_BEFORE_BACKSLASH = re.compile(r"^.*( |\t ?)\\$")
TAB_OR_MULTIPLE_SPACES_BEFORE_BACKSLASH = re.compile(r"^.*( |\t)\\$")
def check_line(self, lineno, text):
if self.TAB_OR_MULTIPLE_SPACES_BEFORE_BACKSLASH.match(text.rstrip()):
@@ -231,39 +155,31 @@ class TrailingBackslash(_CheckFunction):
class TypoInPackageVariable(_CheckFunction):
ALLOWED = re.compile(r"|".join([
ALLOWED = re.compile("|".join([
"ACLOCAL_DIR",
"ACLOCAL_HOST_DIR",
"ACLOCAL_PATH",
"BR_CCACHE_INITIAL_SETUP",
"BR_LIBC",
"BR_NO_CHECK_HASH_FOR",
"GCC_TARGET",
"LINUX_EXTENSIONS",
"LINUX_POST_PATCH_HOOKS",
"LINUX_TOOLS",
"LUA_RUN",
"MKFS_JFFS2",
"MKIMAGE_ARCH",
"PACKAGES_PERMISSIONS_TABLE",
"PKG_CONFIG_HOST_BINARY",
"SUMTOOL",
"TARGET_FINALIZE_HOOKS",
"TARGETS_ROOTFS",
"XTENSA_CORE_NAME"]))
VARIABLE = re.compile(r"^(define\s+)?([A-Z0-9_]+_[A-Z0-9_]+)")
PACKAGE_NAME = re.compile("/([^/]+)\.mk")
VARIABLE = re.compile("^([A-Z0-9_]+_[A-Z0-9_]+)\s*(\+|)=")
def before(self):
package, _ = os.path.splitext(os.path.basename(self.filename))
package = self.PACKAGE_NAME.search(self.filename).group(1)
package = package.replace("-", "_").upper()
# linux tools do not use LINUX_TOOL_ prefix for variables
package = package.replace("LINUX_TOOL_", "")
# linux extensions do not use LINUX_EXT_ prefix for variables
package = package.replace("LINUX_EXT_", "")
self.package = package
self.REGEX = re.compile(r"(HOST_|ROOTFS_)?({}_[A-Z0-9_]+)".format(package))
self.REGEX = re.compile("^(HOST_)?({}_[A-Z0-9_]+)".format(package))
self.FIND_VIRTUAL = re.compile(
r"^{}_PROVIDES\s*(\+|)=\s*(.*)".format(package))
"^{}_PROVIDES\s*(\+|)=\s*(.*)".format(package))
self.virtual = []
def check_line(self, lineno, text):
@@ -271,7 +187,7 @@ class TypoInPackageVariable(_CheckFunction):
if m is None:
return
variable = m.group(2)
variable = m.group(1)
# allow to set variables for virtual package this package provides
v = self.FIND_VIRTUAL.search(text)
@@ -291,16 +207,16 @@ class TypoInPackageVariable(_CheckFunction):
class UselessFlag(_CheckFunction):
DEFAULT_AUTOTOOLS_FLAG = re.compile(r"^.*{}".format("|".join([
r"_AUTORECONF\s*=\s*NO",
r"_LIBTOOL_PATCH\s*=\s*YES"])))
DEFAULT_GENERIC_FLAG = re.compile(r"^.*{}".format("|".join([
r"_INSTALL_IMAGES\s*=\s*NO",
r"_INSTALL_REDISTRIBUTE\s*=\s*YES",
r"_INSTALL_STAGING\s*=\s*NO",
r"_INSTALL_TARGET\s*=\s*YES"])))
END_CONDITIONAL = re.compile(r"^\s*({})".format("|".join(end_conditional)))
START_CONDITIONAL = re.compile(r"^\s*({})".format("|".join(start_conditional)))
DEFAULT_AUTOTOOLS_FLAG = re.compile("^.*{}".format("|".join([
"_AUTORECONF\s*=\s*NO",
"_LIBTOOL_PATCH\s*=\s*YES"])))
DEFAULT_GENERIC_FLAG = re.compile("^.*{}".format("|".join([
"_INSTALL_IMAGES\s*=\s*NO",
"_INSTALL_REDISTRIBUTE\s*=\s*YES",
"_INSTALL_STAGING\s*=\s*NO",
"_INSTALL_TARGET\s*=\s*YES"])))
END_CONDITIONAL = re.compile("^\s*(endif)")
START_CONDITIONAL = re.compile("^\s*(ifeq|ifneq)")
def before(self):
self.conditional = 0
@@ -328,13 +244,3 @@ class UselessFlag(_CheckFunction):
"({}#_infrastructure_for_autotools_based_packages)"
.format(self.filename, lineno, self.url_to_manual),
text]
class VariableWithBraces(_CheckFunction):
VARIABLE_WITH_BRACES = re.compile(r"^[^#].*[^$]\${\w+}")
def check_line(self, lineno, text):
if self.VARIABLE_WITH_BRACES.match(text.rstrip()):
return ["{}:{}: use $() to delimit variables, not ${{}}"
.format(self.filename, lineno),
text]

View File

@@ -3,26 +3,24 @@
# functions don't need to check for things already checked by running
# "make package-dirclean package-patch".
import os
import re
from checkpackagelib.base import _CheckFunction
from checkpackagelib.lib import NewlineAtEof # noqa: F401
from checkpackagelib.tool import NotExecutable # noqa: F401
from base import _CheckFunction
from lib import NewlineAtEof # noqa: F401
class ApplyOrder(_CheckFunction):
APPLY_ORDER = re.compile(r"\d{1,4}-[^/]*$")
APPLY_ORDER = re.compile("/\d{1,4}-[^/]*$")
def before(self):
if not self.APPLY_ORDER.match(os.path.basename(self.filename)):
if not self.APPLY_ORDER.search(self.filename):
return ["{}:0: use name <number>-<description>.patch "
"({}#_providing_patches)"
.format(self.filename, self.url_to_manual)]
class NumberedSubject(_CheckFunction):
NUMBERED_PATCH = re.compile(r"Subject:\s*\[PATCH\s*\d+/\d+\]")
NUMBERED_PATCH = re.compile("Subject:\s*\[PATCH\s*\d+/\d+\]")
def before(self):
self.git_patch = False
@@ -45,7 +43,7 @@ class NumberedSubject(_CheckFunction):
class Sob(_CheckFunction):
SOB_ENTRY = re.compile(r"^Signed-off-by: .*$")
SOB_ENTRY = re.compile("^Signed-off-by: .*$")
def before(self):
self.found = False

View File

@@ -1,69 +0,0 @@
import os
import re
from checkpackagelib.base import _CheckFunction
from checkpackagelib.lib import ConsecutiveEmptyLines # noqa: F401
from checkpackagelib.lib import EmptyLastLine # noqa: F401
from checkpackagelib.lib import NewlineAtEof # noqa: F401
from checkpackagelib.lib import TrailingSpace # noqa: F401
import checkpackagelib.tool
from checkpackagelib.tool import Shellcheck # noqa: F401
class Indent(_CheckFunction):
INDENTED_WITH_SPACES = re.compile(r"^[\t]* ")
def check_line(self, lineno, text):
if self.INDENTED_WITH_SPACES.search(text.rstrip()):
return ["{}:{}: should be indented with tabs ({}#adding-packages-start-script)"
.format(self.filename, lineno, self.url_to_manual),
text]
class NotExecutable(checkpackagelib.tool.NotExecutable):
def hint(self):
return ", just make sure you use '$(INSTALL) -D -m 0755' in the .mk file"
class Variables(_CheckFunction):
DAEMON_VAR = re.compile(r"^DAEMON=[\"']{0,1}([^\"']*)[\"']{0,1}")
PIDFILE_PATTERN = re.compile(r"/var/run/(\$DAEMON|\$\{DAEMON\}).pid")
PIDFILE_VAR = re.compile(r"^PIDFILE=[\"']{0,1}([^\"']*)[\"']{0,1}")
def before(self):
self.name = None
def check_line(self, lineno, text):
name_found = self.DAEMON_VAR.search(text.rstrip())
if name_found:
if self.name:
return ["{}:{}: DAEMON variable redefined ({}#adding-packages-start-script)"
.format(self.filename, lineno, self.url_to_manual),
text]
self.name = name_found.group(1)
if '/' in self.name:
self.name = os.path.basename(self.name) # to be used in after() to check the expected filename
return ["{}:{}: Do not include path in DAEMON ({}#adding-packages-start-script)"
.format(self.filename, lineno, self.url_to_manual),
text,
'DAEMON="{}"'.format(self.name)]
return
pidfile_found = self.PIDFILE_VAR.search(text.rstrip())
if pidfile_found:
pidfile = pidfile_found.group(1)
if not self.PIDFILE_PATTERN.match(pidfile):
return ["{}:{}: Incorrect PIDFILE value ({}#adding-packages-start-script)"
.format(self.filename, lineno, self.url_to_manual),
text,
'PIDFILE="/var/run/$DAEMON.pid"']
def after(self):
if self.name is None:
return ["{}:0: DAEMON variable not defined ({}#adding-packages-start-script)"
.format(self.filename, self.url_to_manual)]
expected_filename = re.compile(r"S\d\d{}$".format(self.name))
if not expected_filename.match(os.path.basename(self.filename)):
return ["{}:0: filename should be S<number><number><daemon name> ({}#adding-packages-start-script)"
.format(self.filename, self.url_to_manual),
"expecting S<number><number>{}".format(self.name)]

View File

@@ -1,212 +0,0 @@
import pytest
import checkpackagelib.test_util as util
import checkpackagelib.lib as m
ConsecutiveEmptyLines = [
('1 line (no newline)',
'any',
'',
[]),
('1 line',
'any',
'\n',
[]),
('2 lines',
'any',
'\n'
'\n',
[['any:2: consecutive empty lines']]),
('more than 2 consecutive',
'any',
'\n'
'\n'
'\n',
[['any:2: consecutive empty lines'],
['any:3: consecutive empty lines']]),
('ignore whitespace 1',
'any',
'\n'
' ',
[['any:2: consecutive empty lines']]),
('ignore whitespace 2',
'any',
' \n'
'\t\n',
[['any:2: consecutive empty lines']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', ConsecutiveEmptyLines)
def test_ConsecutiveEmptyLines(testname, filename, string, expected):
warnings = util.check_file(m.ConsecutiveEmptyLines, filename, string)
assert warnings == expected
EmptyLastLine = [
('ignore empty file',
'any',
'',
[]),
('empty line (newline)',
'any',
'\n',
[['any:1: empty line at end of file']]),
('empty line (space, newline)',
'any',
' \n',
[['any:1: empty line at end of file']]),
('empty line (space, no newline)',
'any',
' ',
[['any:1: empty line at end of file']]),
('warn for the last of 2',
'any',
'\n'
'\n',
[['any:2: empty line at end of file']]),
('warn for the last of 3',
'any',
'\n'
'\n'
'\n',
[['any:3: empty line at end of file']]),
('ignore whitespace',
'any',
' \n'
'\t\n',
[['any:2: empty line at end of file']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', EmptyLastLine)
def test_EmptyLastLine(testname, filename, string, expected):
warnings = util.check_file(m.EmptyLastLine, filename, string)
assert warnings == expected
NewlineAtEof = [
('good',
'any',
'text\n',
[]),
('text (bad)',
'any',
'\n'
'text',
[['any:2: missing newline at end of file',
'text']]),
('space (bad)',
'any',
'\n'
' ',
[['any:2: missing newline at end of file',
' ']]),
('tab (bad)',
'any',
'\n'
'\t',
[['any:2: missing newline at end of file',
'\t']]),
('even for file with one line',
'any',
' ',
[['any:1: missing newline at end of file',
' ']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', NewlineAtEof)
def test_NewlineAtEof(testname, filename, string, expected):
warnings = util.check_file(m.NewlineAtEof, filename, string)
assert warnings == expected
TrailingSpace = [
('good',
'any',
'text\n',
[]),
('ignore missing newline',
'any',
'\n'
'text',
[]),
('spaces',
'any',
'text \n',
[['any:1: line contains trailing whitespace',
'text \n']]),
('tabs after text',
'any',
'text\t\t\n',
[['any:1: line contains trailing whitespace',
'text\t\t\n']]),
('mix of tabs and spaces',
'any',
' \n'
' ',
[['any:1: line contains trailing whitespace',
' \n'],
['any:2: line contains trailing whitespace',
' ']]),
('blank line with tabs',
'any',
'\n'
'\t',
[['any:2: line contains trailing whitespace',
'\t']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', TrailingSpace)
def test_TrailingSpace(testname, filename, string, expected):
warnings = util.check_file(m.TrailingSpace, filename, string)
assert warnings == expected
Utf8Characters = [
('usual',
'any',
'text\n',
[]),
('acceptable character',
'any',
'\x60',
[]),
('unacceptable character',
'any',
'\x81',
[['any:1: line contains UTF-8 characters',
'\x81']]),
('2 warnings',
'any',
'text\n'
'text \xc8 text\n'
'\xc9\n',
[['any:2: line contains UTF-8 characters',
'text \xc8 text\n'],
['any:3: line contains UTF-8 characters',
'\xc9\n']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', Utf8Characters)
def test_Utf8Characters(testname, filename, string, expected):
warnings = util.check_file(m.Utf8Characters, filename, string)
assert warnings == expected
def test_all_check_functions_are_used():
import inspect
import checkpackagelib.lib_config as lib_config
import checkpackagelib.lib_hash as lib_hash
import checkpackagelib.lib_mk as lib_mk
import checkpackagelib.lib_patch as lib_patch
c_config = [c[0] for c in inspect.getmembers(lib_config, inspect.isclass)]
c_hash = [c[0] for c in inspect.getmembers(lib_hash, inspect.isclass)]
c_mk = [c[0] for c in inspect.getmembers(lib_mk, inspect.isclass)]
c_patch = [c[0] for c in inspect.getmembers(lib_patch, inspect.isclass)]
c_all = c_config + c_hash + c_mk + c_patch
c_common = [c[0] for c in inspect.getmembers(m, inspect.isclass)]
assert set(c_common) <= set(c_all)

View File

@@ -1,387 +0,0 @@
import pytest
import checkpackagelib.test_util as util
import checkpackagelib.lib_config as m
AttributesOrder = [
('good example',
'any',
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'default y\n'
'depends on BR2_USE_BAR # runtime\n'
'select BR2_PACKAGE_BAZ\n'
'help\n'
'\t help text\n',
[]),
('depends before default',
'any',
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'depends on BR2_USE_BAR\n'
'default y\n',
[['any:4: attributes order: type, default, depends on, select, help (url#_config_files)',
'default y\n']]),
('select after help',
'any',
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'help\n'
'\t help text\n'
'select BR2_PACKAGE_BAZ\n',
[['any:5: attributes order: type, default, depends on, select, help (url#_config_files)',
'select BR2_PACKAGE_BAZ\n']]),
('string',
'any',
'config BR2_PACKAGE_FOO_PLUGINS\n'
'string "foo plugins"\n'
'default "all"\n',
[]),
('ignore tabs',
'any',
'config\tBR2_PACKAGE_FOO_PLUGINS\n'
'default\t"all"\n'
'string\t"foo plugins"\n',
[['any:3: attributes order: type, default, depends on, select, help (url#_config_files)',
'string\t"foo plugins"\n']]),
('choice',
'any',
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'if BR2_PACKAGE_FOO\n'
'\n'
'choice\n'
'prompt "type of foo"\n'
'default BR2_PACKAGE_FOO_STRING\n'
'\n'
'config BR2_PACKAGE_FOO_NONE\n'
'bool "none"\n'
'\n'
'config BR2_PACKAGE_FOO_STRING\n'
'bool "string"\n'
'\n'
'endchoice\n'
'\n'
'endif\n'
'\n',
[]),
('type after default',
'any',
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'if BR2_PACKAGE_FOO\n'
'\n'
'choice\n'
'default BR2_PACKAGE_FOO_STRING\n'
'prompt "type of foo"\n',
[['any:7: attributes order: type, default, depends on, select, help (url#_config_files)',
'prompt "type of foo"\n']]),
('menu',
'any',
'menuconfig BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'help\n'
'\t help text\n'
'\t help text\n'
'\n'
'if BR2_PACKAGE_FOO\n'
'\n'
'menu "foo plugins"\n'
'config BR2_PACKAGE_FOO_COUNTER\n'
'bool "counter"\n'
'\n'
'endmenu\n'
'\n'
'endif\n',
[]),
]
@pytest.mark.parametrize('testname,filename,string,expected', AttributesOrder)
def test_AttributesOrder(testname, filename, string, expected):
warnings = util.check_file(m.AttributesOrder, filename, string)
assert warnings == expected
CommentsMenusPackagesOrder = [
('top menu (good)',
'package/Config.in',
'menu "Target packages"\n'
'source "package/busybox/Config.in"\n'
'source "package/skeleton/Config.in"\n',
[]),
('top menu (bad)',
'package/Config.in',
'source "package/skeleton/Config.in"\n'
'source "package/busybox/Config.in"\n',
[['package/Config.in:2: Packages in: The top level menu,\n'
' are not alphabetically ordered;\n'
" correct order: '-', '_', digits, capitals, lowercase;\n"
' first incorrect package: busybox',
'source "package/busybox/Config.in"\n']]),
('menu (bad)',
'package/Config.in',
'menu "Target packages"\n'
'source "package/skeleton/Config.in"\n'
'source "package/busybox/Config.in"\n',
[['package/Config.in:3: Packages in: menu "Target packages",\n'
' are not alphabetically ordered;\n'
" correct order: '-', '_', digits, capitals, lowercase;\n"
' first incorrect package: busybox',
'source "package/busybox/Config.in"\n']]),
('underscore (good)',
'package/Config.in.host',
'menu "Hardware handling"\n'
'menu "Firmware"\n'
'endmenu\n'
'source "package/usb_modeswitch/Config.in"\n'
'source "package/usbmount/Config.in"\n',
[]),
('underscore (bad)',
'package/Config.in.host',
'menu "Hardware handling"\n'
'menu "Firmware"\n'
'endmenu\n'
'source "package/usbmount/Config.in"\n'
'source "package/usb_modeswitch/Config.in"\n',
[['package/Config.in.host:5: Packages in: menu "Hardware handling",\n'
' are not alphabetically ordered;\n'
" correct order: '-', '_', digits, capitals, lowercase;\n"
' first incorrect package: usb_modeswitch',
'source "package/usb_modeswitch/Config.in"\n']]),
('ignore other files',
'any other file',
'menu "Hardware handling"\n'
'source "package/bbb/Config.in"\n'
'source "package/aaa/Config.in"\n',
[]),
('dash (bad)',
'package/Config.in',
'menu "packages"\n'
'source "package/a_a/Config.in"\n'
'source "package/a-a/Config.in"\n'
'source "package/a1a/Config.in"\n'
'source "package/aAa/Config.in"\n'
'source "package/aaa/Config.in"\n',
[['package/Config.in:3: Packages in: menu "packages",\n'
' are not alphabetically ordered;\n'
" correct order: '-', '_', digits, capitals, lowercase;\n"
' first incorrect package: a-a',
'source "package/a-a/Config.in"\n']]),
('underscore (bad)',
'package/Config.in',
'menu "packages"\n'
'source "package/a-a/Config.in"\n'
'source "package/a1a/Config.in"\n'
'source "package/a_a/Config.in"\n'
'source "package/aAa/Config.in"\n'
'source "package/aaa/Config.in"\n',
[['package/Config.in:4: Packages in: menu "packages",\n'
' are not alphabetically ordered;\n'
" correct order: '-', '_', digits, capitals, lowercase;\n"
' first incorrect package: a_a',
'source "package/a_a/Config.in"\n']]),
('digit (bad)',
'package/Config.in',
'menu "packages"\n'
'source "package/a-a/Config.in"\n'
'source "package/a_a/Config.in"\n'
'source "package/aAa/Config.in"\n'
'source "package/a1a/Config.in"\n'
'source "package/aaa/Config.in"\n',
[['package/Config.in:5: Packages in: menu "packages",\n'
' are not alphabetically ordered;\n'
" correct order: '-', '_', digits, capitals, lowercase;\n"
' first incorrect package: a1a',
'source "package/a1a/Config.in"\n']]),
('capitals (bad)',
'package/Config.in',
'menu "packages"\n'
'source "package/a-a/Config.in"\n'
'source "package/a_a/Config.in"\n'
'source "package/a1a/Config.in"\n'
'source "package/aaa/Config.in"\n'
'source "package/aAa/Config.in"\n',
[['package/Config.in:6: Packages in: menu "packages",\n'
' are not alphabetically ordered;\n'
" correct order: '-', '_', digits, capitals, lowercase;\n"
' first incorrect package: aAa',
'source "package/aAa/Config.in"\n']]),
('digits, capitals, underscore (good)',
'package/Config.in',
'menu "packages"\n'
'source "package/a-a/Config.in"\n'
'source "package/a_a/Config.in"\n'
'source "package/a1a/Config.in"\n'
'source "package/aAa/Config.in"\n'
'source "package/aaa/Config.in"\n',
[]),
('conditional menu (good)',
'package/Config.in',
'menu "Other"\n'
'source "package/linux-pam/Config.in"\n'
'if BR2_PACKAGE_LINUX_PAM\n'
'comment "linux-pam plugins"\n'
'source "package/libpam-radius-auth/Config.in"\n'
'source "package/libpam-tacplus/Config.in"\n'
'endif\n'
'source "package/liquid-dsp/Config.in"\n',
[]),
('conditional menu (bad)',
'package/Config.in',
'menu "Other"\n'
'source "package/linux-pam/Config.in"\n'
'if BR2_PACKAGE_LINUX_PAM\n'
'comment "linux-pam plugins"\n'
'source "package/libpam-tacplus/Config.in"\n'
'source "package/libpam-radius-auth/Config.in"\n'
'endif\n'
'source "package/liquid-dsp/Config.in"\n',
[['package/Config.in:6: Packages in: comment "linux-pam plugins",\n'
' are not alphabetically ordered;\n'
" correct order: '-', '_', digits, capitals, lowercase;\n"
' first incorrect package: libpam-radius-auth',
'source "package/libpam-radius-auth/Config.in"\n']]),
('no conditional (bad)',
'package/Config.in',
'menu "Other"\n'
'source "package/linux-pam/Config.in"\n'
'source "package/libpam-radius-auth/Config.in"\n'
'source "package/libpam-tacplus/Config.in"\n'
'source "package/liquid-dsp/Config.in"\n',
[['package/Config.in:3: Packages in: menu "Other",\n'
' are not alphabetically ordered;\n'
" correct order: '-', '_', digits, capitals, lowercase;\n"
' first incorrect package: libpam-radius-auth',
'source "package/libpam-radius-auth/Config.in"\n']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', CommentsMenusPackagesOrder)
def test_CommentsMenusPackagesOrder(testname, filename, string, expected):
warnings = util.check_file(m.CommentsMenusPackagesOrder, filename, string)
assert warnings == expected
HelpText = [
('single line',
'any',
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'default y\n'
'depends on BR2_USE_BAR # runtime\n'
'select BR2_PACKAGE_BAZ\n'
'help\n'
'\t help text\n',
[]),
('larger than 72',
'any',
'help\n'
'\t 123456789 123456789 123456789 123456789 123456789 123456789 12\n'
'\t 123456789 123456789 123456789 123456789 123456789 123456789 123\n'
'\t help text\n',
[['any:3: help text: <tab><2 spaces><62 chars> (url#writing-rules-config-in)',
'\t 123456789 123456789 123456789 123456789 123456789 123456789 123\n',
'\t 123456789 123456789 123456789 123456789 123456789 123456789 12']]),
('long url at beginning of line',
'any',
'help\n'
'\t 123456789 123456789 123456789 123456789 123456789 123456789 12\n'
'\t http://url.that.is.longer.than.seventy.two.characthers/folder_name\n'
'\t https://url.that.is.longer.than.seventy.two.characthers/folder_name\n'
'\t git://url.that.is.longer.than.seventy.two.characthers/folder_name\n',
[]),
('long url not at beginning of line',
'any',
'help\n'
'\t 123456789 123456789 123456789 123456789 123456789 123456789 12\n'
'\t refer to http://url.that.is.longer.than.seventy.two.characthers/folder_name\n'
'\n'
'\t http://url.that.is.longer.than.seventy.two.characthers/folder_name\n',
[['any:3: help text: <tab><2 spaces><62 chars> (url#writing-rules-config-in)',
'\t refer to http://url.that.is.longer.than.seventy.two.characthers/folder_name\n',
'\t 123456789 123456789 123456789 123456789 123456789 123456789 12']]),
('allow beautified items',
'any',
'help\n'
'\t 123456789 123456789 123456789 123456789 123456789 123456789 12\n'
'\t summary:\n'
'\t - enable that config\n'
'\t - built it\n',
[]),
]
@pytest.mark.parametrize('testname,filename,string,expected', HelpText)
def test_HelpText(testname, filename, string, expected):
warnings = util.check_file(m.HelpText, filename, string)
assert warnings == expected
Indent = [
('good example',
'any',
'config BR2_PACKAGE_FOO\n'
'\tbool "foo"\n'
'\tdefault y\n'
'\tdepends on BR2_TOOLCHAIN_HAS_THREADS\n'
'\tdepends on BR2_INSTALL_LIBSTDCPP\n'
'# very useful comment\n'
'\tselect BR2_PACKAGE_BAZ\n'
'\thelp\n'
'\t help text\n'
'\n'
'comment "foo needs toolchain w/ C++, threads"\n'
'\tdepends on !BR2_INSTALL_LIBSTDCPP || \\\n'
'\t\t!BR2_TOOLCHAIN_HAS_THREADS\n'
'\n'
'source "package/foo/bar/Config.in"\n',
[]),
('spaces',
'any',
'config BR2_PACKAGE_FOO\n'
' bool "foo"\n',
[['any:2: should be indented with one tab (url#_config_files)',
' bool "foo"\n']]),
('without indent',
'any',
'config BR2_PACKAGE_FOO\n'
'default y\n',
[['any:2: should be indented with one tab (url#_config_files)',
'default y\n']]),
('too much tabs',
'any',
'config BR2_PACKAGE_FOO\n'
'\t\tdepends on BR2_TOOLCHAIN_HAS_THREADS\n',
[['any:2: should be indented with one tab (url#_config_files)',
'\t\tdepends on BR2_TOOLCHAIN_HAS_THREADS\n']]),
('help',
'any',
'config BR2_PACKAGE_FOO\n'
' help\n',
[['any:2: should be indented with one tab (url#_config_files)',
' help\n']]),
('continuation line',
'any',
'comment "foo needs toolchain w/ C++, threads"\n'
'\tdepends on !BR2_INSTALL_LIBSTDCPP || \\\n'
' !BR2_TOOLCHAIN_HAS_THREADS\n',
[['any:3: continuation line should be indented using tabs',
' !BR2_TOOLCHAIN_HAS_THREADS\n']]),
('comment with tabs',
'any',
'\tcomment "foo needs toolchain w/ C++, threads"\n',
[['any:1: should not be indented',
'\tcomment "foo needs toolchain w/ C++, threads"\n']]),
('comment with spaces',
'any',
' comment "foo needs toolchain w/ C++, threads"\n',
[['any:1: should not be indented',
' comment "foo needs toolchain w/ C++, threads"\n']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', Indent)
def test_Indent(testname, filename, string, expected):
warnings = util.check_file(m.Indent, filename, string)
assert warnings == expected

View File

@@ -1,183 +0,0 @@
import pytest
import checkpackagelib.test_util as util
import checkpackagelib.lib_hash as m
HashNumberOfFields = [
('empty file',
'any',
'',
[]),
('empty line',
'any',
'\n',
[]),
('ignore whitespace',
'any',
'\t\n',
[]),
('ignore comments',
'any',
'# text\n',
[]),
('1 field',
'any',
'field1\n',
[['any:1: expected three fields (url#adding-packages-hash)',
'field1\n']]),
('2 fields',
'any',
'field1 field2\n',
[['any:1: expected three fields (url#adding-packages-hash)',
'field1 field2\n']]),
('4 fields',
'any',
'field1 field2 field3 field4\n',
[['any:1: expected three fields (url#adding-packages-hash)',
'field1 field2 field3 field4\n']]),
('with 1 space',
'any',
'field1 field2 field3\n',
[]),
('many spaces',
'any',
' field1 field2 field3\n',
[]),
('tabs',
'any',
'field1\tfield2\tfield3\n',
[]),
('mix of tabs and spaces',
'any',
'\tfield1\t field2\t field3 \n',
[]),
]
@pytest.mark.parametrize('testname,filename,string,expected', HashNumberOfFields)
def test_HashNumberOfFields(testname, filename, string, expected):
warnings = util.check_file(m.HashNumberOfFields, filename, string)
assert warnings == expected
HashType = [
('ignore empty files',
'any',
'',
[]),
('ignore 1 field',
'any',
'text\n',
[]),
('wrong type',
'any',
'text text\n',
[['any:1: unexpected type of hash (url#adding-packages-hash)',
'text text\n']]),
('md5 (good)',
'any',
'md5 12345678901234567890123456789012\n',
[]),
('md5 (short)',
'any',
'md5 123456\n',
[['any:1: hash size does not match type (url#adding-packages-hash)',
'md5 123456\n',
'expected 32 hex digits']]),
('ignore space before',
'any',
' md5 12345678901234567890123456789012\n',
[]),
('2 spaces',
'any',
'md5 12345678901234567890123456789012\n',
[]),
('ignore tabs',
'any',
'md5\t12345678901234567890123456789012\n',
[]),
('common typo',
'any',
'md5sum 12345678901234567890123456789012\n',
[['any:1: unexpected type of hash (url#adding-packages-hash)',
'md5sum 12345678901234567890123456789012\n']]),
('md5 (too long)',
'any',
'md5 123456789012345678901234567890123\n',
[['any:1: hash size does not match type (url#adding-packages-hash)',
'md5 123456789012345678901234567890123\n',
'expected 32 hex digits']]),
('sha1 (good)',
'any',
'sha1 1234567890123456789012345678901234567890\n',
[]),
('sha256',
'any',
'sha256 1234567890123456789012345678901234567890123456789012345678901234\n',
[]),
('sha384',
'any',
'sha384 123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456\n',
[]),
('sha512',
'any',
'sha512 1234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678'
'9012345678\n',
[]),
]
@pytest.mark.parametrize('testname,filename,string,expected', HashType)
def test_HashType(testname, filename, string, expected):
warnings = util.check_file(m.HashType, filename, string)
assert warnings == expected
HashSpaces = [
('ignore empty files',
'any',
'',
[]),
('ignore 1 field',
'any',
'text\n',
[]),
('ignore comments',
'any',
'# type 1234567890123456789012345678901234567890 file\n',
[]),
('ignore trailing space',
'any',
'type 1234567890123456789012345678901234567890 file\t \n',
[]),
('2 spaces',
'any',
'type 1234567890123456789012345678901234567890 file\n',
[]),
('1 space',
'any',
'type 1234567890123456789012345678901234567890 file\n',
[['any:1: separation does not match expectation (url#adding-packages-hash)',
'type 1234567890123456789012345678901234567890 file\n']]),
('3 spaces',
'any',
'type 1234567890123456789012345678901234567890 file\n',
[['any:1: separation does not match expectation (url#adding-packages-hash)',
'type 1234567890123456789012345678901234567890 file\n']]),
('tabs',
'any',
'type\t1234567890123456789012345678901234567890\tfile\n',
[['any:1: separation does not match expectation (url#adding-packages-hash)',
'type\t1234567890123456789012345678901234567890\tfile\n']]),
('mixed tabs and spaces',
'any',
'type\t 1234567890123456789012345678901234567890 \tfile\n',
[['any:1: separation does not match expectation (url#adding-packages-hash)',
'type\t 1234567890123456789012345678901234567890 \tfile\n']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', HashSpaces)
def test_HashSpaces(testname, filename, string, expected):
warnings = util.check_file(m.HashSpaces, filename, string)
assert warnings == expected

View File

@@ -1,590 +0,0 @@
import pytest
import checkpackagelib.test_util as util
import checkpackagelib.lib_mk as m
Indent = [
('ignore comment at beginning of line',
'any',
'# very useful comment\n',
[]),
('ignore comment at end of line',
'any',
' # very useful comment\n',
[]),
('do not indent on conditional (good)',
'any',
'ifeq ($(BR2_TOOLCHAIN_HAS_THREADS),y)\n'
'FOO_CONF_OPTS += something\n'
'endef\n',
[]),
('do not indent on conditional (bad)',
'any',
'ifeq ($(BR2_TOOLCHAIN_HAS_THREADS),y)\n'
'\tFOO_CONF_OPTS += something\n'
'endef\n',
[['any:2: unexpected indent with tabs',
'\tFOO_CONF_OPTS += something\n']]),
('indent after line that ends in backslash (good)',
'any',
'FOO_CONF_OPTS += \\\n'
'\tsomething\n',
[]),
('indent after line that ends in backslash (bad)',
'any',
'FOO_CONF_OPTS += \\\n'
'something\n',
[['any:2: expected indent with tabs',
'something\n']]),
('indent after 2 lines that ends in backslash (good)',
'any',
'FOO_CONF_OPTS += \\\n'
'\tsomething \\\n'
'\tsomething_else\n',
[]),
('indent after 2 lines that ends in backslash (bad)',
'any',
'FOO_CONF_OPTS += \\\n'
'\tsomething \\\n'
'\tsomething_else \\\n'
'FOO_CONF_OPTS += another_thing\n',
[['any:4: expected indent with tabs',
'FOO_CONF_OPTS += another_thing\n']]),
('indent inside define (good)',
'any',
'define FOO_SOMETHING\n'
'\tcommand\n'
'\tcommand \\\n'
'\t\targuments\n'
'endef\n'
'FOO_POST_PATCH_HOOKS += FOO_SOMETHING\n',
[]),
('indent inside define (bad, no indent)',
'any',
'define FOO_SOMETHING\n'
'command\n'
'endef\n',
[['any:2: expected indent with tabs',
'command\n']]),
('indent inside define (bad, spaces)',
'any',
'define FOO_SOMETHING\n'
' command\n'
'endef\n',
[['any:2: expected indent with tabs',
' command\n']]),
('indent make target (good)',
'any',
'make_target:\n'
'\tcommand\n'
'\n',
[]),
('indent make target (bad)',
'any',
'make_target:\n'
' command\n'
'\n',
[['any:2: expected indent with tabs',
' command\n']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', Indent)
def test_Indent(testname, filename, string, expected):
warnings = util.check_file(m.Indent, filename, string)
assert warnings == expected
OverriddenVariable = [
('simple assignment',
'any.mk',
'VAR_1 = VALUE1\n',
[]),
('unconditional override (variable without underscore)',
'any.mk',
'VAR1 = VALUE1\n'
'VAR1 = VALUE1\n',
[['any.mk:2: unconditional override of variable VAR1',
'VAR1 = VALUE1\n']]),
('unconditional override (variable with underscore, same value)',
'any.mk',
'VAR_1 = VALUE1\n'
'VAR_1 = VALUE1\n',
[['any.mk:2: unconditional override of variable VAR_1',
'VAR_1 = VALUE1\n']]),
('unconditional override (variable with underscore, different value)',
'any.mk',
'VAR_1 = VALUE1\n'
'VAR_1 = VALUE2\n',
[['any.mk:2: unconditional override of variable VAR_1',
'VAR_1 = VALUE2\n']]),
('warn for unconditional override even with wrong number of spaces',
'any.mk',
'VAR_1= VALUE1\n'
'VAR_1 =VALUE2\n',
[['any.mk:2: unconditional override of variable VAR_1',
'VAR_1 =VALUE2\n']]),
('warn for := override',
'any.mk',
'VAR_1 = VALUE1\n'
'VAR_1 := VALUE2\n',
[['any.mk:2: unconditional override of variable VAR_1',
'VAR_1 := VALUE2\n']]),
('append values outside conditional (good)',
'any.mk',
'VAR_1 = VALUE1\n'
'VAR_1 += VALUE2\n',
[]),
('append values outside conditional (bad)',
'any.mk',
'VAR_1 = VALUE1\n'
'VAR_1 := $(VAR_1), VALUE2\n',
[['any.mk:2: unconditional override of variable VAR_1',
'VAR_1 := $(VAR_1), VALUE2\n']]),
('immediate assignment inside conditional',
'any.mk',
'VAR_1 = VALUE1\n'
'ifeq (condition)\n'
'VAR_1 := $(VAR_1), VALUE2\n',
[['any.mk:3: immediate assignment to append to variable VAR_1',
'VAR_1 := $(VAR_1), VALUE2\n']]),
('immediate assignment inside conditional and unconditional override outside',
'any.mk',
'VAR_1 = VALUE1\n'
'ifeq (condition)\n'
'VAR_1 := $(VAR_1), VALUE2\n'
'endif\n'
'VAR_1 := $(VAR_1), VALUE2\n',
[['any.mk:3: immediate assignment to append to variable VAR_1',
'VAR_1 := $(VAR_1), VALUE2\n'],
['any.mk:5: unconditional override of variable VAR_1',
'VAR_1 := $(VAR_1), VALUE2\n']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', OverriddenVariable)
def test_OverriddenVariable(testname, filename, string, expected):
warnings = util.check_file(m.OverriddenVariable, filename, string)
assert warnings == expected
PackageHeader = [
('first line (good)',
'any',
80 * '#' + '\n',
[]),
('first line (bad)',
'any',
'# very useful comment\n',
[['any:1: should be 80 hashes (url#writing-rules-mk)',
'# very useful comment\n',
80 * '#']]),
('second line (bad)',
'any',
80 * '#' + '\n'
'# package\n',
[['any:2: should be 1 hash (url#writing-rules-mk)',
'# package\n']]),
('full header (good)',
'any',
80 * '#' + '\n'
'#\n'
'# package\n'
'#\n' +
80 * '#' + '\n'
'\n',
[]),
('blank line after header (good)',
'any',
80 * '#' + '\n'
'#\n'
'# package\n'
'#\n' +
80 * '#' + '\n'
'\n'
'FOO_VERSION = 1\n',
[]),
('blank line after header (bad)',
'any',
80 * '#' + '\n'
'#\n'
'# package\n'
'#\n' +
80 * '#' + '\n'
'FOO_VERSION = 1\n',
[['any:6: should be a blank line (url#writing-rules-mk)',
'FOO_VERSION = 1\n']]),
('wrong number of hashes',
'any',
79 * '#' + '\n'
'#\n'
'# package\n'
'#\n' +
81 * '#' + '\n'
'\n',
[['any:1: should be 80 hashes (url#writing-rules-mk)',
79 * '#' + '\n',
80 * '#'],
['any:5: should be 80 hashes (url#writing-rules-mk)',
81 * '#' + '\n',
80 * '#']]),
('allow include without header',
'any',
'include $(sort $(wildcard package/foo/*/*.mk))\n',
[]),
]
@pytest.mark.parametrize('testname,filename,string,expected', PackageHeader)
def test_PackageHeader(testname, filename, string, expected):
warnings = util.check_file(m.PackageHeader, filename, string)
assert warnings == expected
RemoveDefaultPackageSourceVariable = [
('bad',
'any.mk',
'ANY_SOURCE = any-$(ANY_VERSION).tar.gz\n',
[['any.mk:1: remove default value of _SOURCE variable (url#generic-package-reference)',
'ANY_SOURCE = any-$(ANY_VERSION).tar.gz\n']]),
('bad with path',
'./any.mk',
'ANY_SOURCE = any-$(ANY_VERSION).tar.gz\n',
[['./any.mk:1: remove default value of _SOURCE variable (url#generic-package-reference)',
'ANY_SOURCE = any-$(ANY_VERSION).tar.gz\n']]),
('warn for correct line',
'./any.mk',
'\n'
'\n'
'\n'
'ANY_SOURCE = any-$(ANY_VERSION).tar.gz\n',
[['./any.mk:4: remove default value of _SOURCE variable (url#generic-package-reference)',
'ANY_SOURCE = any-$(ANY_VERSION).tar.gz\n']]),
('warn ignoring missing spaces',
'./any.mk',
'ANY_SOURCE=any-$(ANY_VERSION).tar.gz\n',
[['./any.mk:1: remove default value of _SOURCE variable (url#generic-package-reference)',
'ANY_SOURCE=any-$(ANY_VERSION).tar.gz\n']]),
('good',
'./any.mk',
'ANY_SOURCE = aNy-$(ANY_VERSION).tar.gz\n',
[]),
('gcc exception',
'gcc.mk',
'GCC_SOURCE = gcc-$(GCC_VERSION).tar.gz\n',
[]),
('binutils exception',
'./binutils.mk',
'BINUTILS_SOURCE = binutils-$(BINUTILS_VERSION).tar.gz\n',
[]),
('gdb exception',
'gdb/gdb.mk',
'GDB_SOURCE = gdb-$(GDB_VERSION).tar.gz\n',
[]),
('package name with dash',
'python-subprocess32.mk',
'PYTHON_SUBPROCESS32_SOURCE = python-subprocess32-$(PYTHON_SUBPROCESS32_VERSION).tar.gz\n',
[['python-subprocess32.mk:1: remove default value of _SOURCE variable (url#generic-package-reference)',
'PYTHON_SUBPROCESS32_SOURCE = python-subprocess32-$(PYTHON_SUBPROCESS32_VERSION).tar.gz\n']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', RemoveDefaultPackageSourceVariable)
def test_RemoveDefaultPackageSourceVariable(testname, filename, string, expected):
warnings = util.check_file(m.RemoveDefaultPackageSourceVariable, filename, string)
assert warnings == expected
SpaceBeforeBackslash = [
('no backslash',
'any.mk',
'\n',
[]),
('ignore missing indent',
'any.mk',
'define ANY_SOME_FIXUP\n'
'for i in $$(find $(STAGING_DIR)/usr/lib* -name "any*.la"); do \\\n',
[]),
('ignore missing space',
'any.mk',
'ANY_CONF_ENV= \\\n'
'\tap_cv_void_ptr_lt_long=no \\\n',
[]),
('variable',
'any.mk',
'\n'
'ANY = \\\n',
[]),
('2 spaces',
'any.mk',
'ANY = \\\n',
[['any.mk:1: use only one space before backslash',
'ANY = \\\n']]),
('warn about correct line',
'any.mk',
'\n'
'ANY = \\\n',
[['any.mk:2: use only one space before backslash',
'ANY = \\\n']]),
('tab',
'any.mk',
'ANY =\t\\\n',
[['any.mk:1: use only one space before backslash',
'ANY =\t\\\n']]),
('tabs',
'any.mk',
'ANY =\t\t\\\n',
[['any.mk:1: use only one space before backslash',
'ANY =\t\t\\\n']]),
('spaces and tabs',
'any.mk',
'ANY = \t\t\\\n',
[['any.mk:1: use only one space before backslash',
'ANY = \t\t\\\n']]),
('mixed spaces and tabs 1',
'any.mk',
'ANY = \t \t\\\n',
[['any.mk:1: use only one space before backslash',
'ANY = \t \t\\\n']]),
('mixed spaces and tabs 2',
'any.mk',
'ANY = \t \\\n',
[['any.mk:1: use only one space before backslash',
'ANY = \t \\\n']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', SpaceBeforeBackslash)
def test_SpaceBeforeBackslash(testname, filename, string, expected):
warnings = util.check_file(m.SpaceBeforeBackslash, filename, string)
assert warnings == expected
TrailingBackslash = [
('no backslash',
'any.mk',
'ANY = \n',
[]),
('one line',
'any.mk',
'ANY = \\\n',
[]),
('2 lines',
'any.mk',
'ANY = \\\n'
'\\\n',
[]),
('empty line after',
'any.mk',
'ANY = \\\n'
'\n',
[['any.mk:1: remove trailing backslash',
'ANY = \\\n']]),
('line with spaces after',
'any.mk',
'ANY = \\\n'
' \n',
[['any.mk:1: remove trailing backslash',
'ANY = \\\n']]),
('line with tabs after',
'any.mk',
'ANY = \\\n'
'\t\n',
[['any.mk:1: remove trailing backslash',
'ANY = \\\n']]),
('ignore if commented',
'any.mk',
'# ANY = \\\n'
'\n',
[]),
('real example',
'any.mk',
'ANY_CONF_ENV= \t\\\n'
'\tap_cv_void_ptr_lt_long=no \\\n'
'\n',
[['any.mk:2: remove trailing backslash',
'\tap_cv_void_ptr_lt_long=no \\\n']]),
('ignore whitespace 1',
'any.mk',
'ANY = \t\t\\\n',
[]),
('ignore whitespace 2',
'any.mk',
'ANY = \t \t\\\n',
[]),
('ignore whitespace 3',
'any.mk',
'ANY = \t \\\n',
[]),
]
@pytest.mark.parametrize('testname,filename,string,expected', TrailingBackslash)
def test_TrailingBackslash(testname, filename, string, expected):
warnings = util.check_file(m.TrailingBackslash, filename, string)
assert warnings == expected
TypoInPackageVariable = [
('good',
'any.mk',
'ANY_VAR = \n',
[]),
('good with path 1',
'./any.mk',
'ANY_VAR += \n',
[]),
('good with path 2',
'any/any.mk',
'ANY_VAR = \n',
[]),
('bad =',
'any.mk',
'OTHER_VAR = \n',
[['any.mk:1: possible typo: OTHER_VAR -> *ANY*',
'OTHER_VAR = \n']]),
('bad +=',
'any.mk',
'OTHER_VAR += \n',
[['any.mk:1: possible typo: OTHER_VAR -> *ANY*',
'OTHER_VAR += \n']]),
('ignore missing space',
'any.mk',
'OTHER_VAR= \n',
[['any.mk:1: possible typo: OTHER_VAR -> *ANY*',
'OTHER_VAR= \n']]),
('use path in the warning',
'./any.mk',
'OTHER_VAR = \n',
[['./any.mk:1: possible typo: OTHER_VAR -> *ANY*',
'OTHER_VAR = \n']]),
('another name',
'other.mk',
'ANY_VAR = \n',
[['other.mk:1: possible typo: ANY_VAR -> *OTHER*',
'ANY_VAR = \n']]),
('libc exception',
'./any.mk',
'BR_LIBC = \n',
[]),
('rootfs exception',
'any.mk',
'ROOTFS_ANY_VAR += \n',
[]),
('host (good)',
'any.mk',
'HOST_ANY_VAR += \n',
[]),
('host (bad)',
'any.mk',
'HOST_OTHER_VAR = \n',
[['any.mk:1: possible typo: HOST_OTHER_VAR -> *ANY*',
'HOST_OTHER_VAR = \n']]),
('provides',
'any.mk',
'ANY_PROVIDES = other thing\n'
'OTHER_VAR = \n',
[]),
('ignore space',
'any.mk',
'ANY_PROVIDES = thing other \n'
'OTHER_VAR = \n',
[]),
('wrong provides',
'any.mk',
'ANY_PROVIDES = other\n'
'OTHERS_VAR = \n',
[['any.mk:2: possible typo: OTHERS_VAR -> *ANY*',
'OTHERS_VAR = \n']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', TypoInPackageVariable)
def test_TypoInPackageVariable(testname, filename, string, expected):
warnings = util.check_file(m.TypoInPackageVariable, filename, string)
assert warnings == expected
UselessFlag = [
('autoreconf no',
'any.mk',
'ANY_AUTORECONF=NO\n',
[['any.mk:1: useless default value (url#_infrastructure_for_autotools_based_packages)',
'ANY_AUTORECONF=NO\n']]),
('host autoreconf no',
'any.mk',
'HOST_ANY_AUTORECONF\n',
[]),
('autoreconf yes',
'any.mk',
'ANY_AUTORECONF=YES\n',
[]),
('libtool_patch yes',
'any.mk',
'ANY_LIBTOOL_PATCH\t= YES\n',
[['any.mk:1: useless default value (url#_infrastructure_for_autotools_based_packages)',
'ANY_LIBTOOL_PATCH\t= YES\n']]),
('libtool_patch no',
'any.mk',
'ANY_LIBTOOL_PATCH= \t NO\n',
[]),
('generic',
'any.mk',
'ANY_INSTALL_IMAGES = NO\n'
'ANY_INSTALL_REDISTRIBUTE = YES\n'
'ANY_INSTALL_STAGING = NO\n'
'ANY_INSTALL_TARGET = YES\n',
[['any.mk:1: useless default value (url#_infrastructure_for_packages_with_specific_build_systems)',
'ANY_INSTALL_IMAGES = NO\n'],
['any.mk:2: useless default value (url#_infrastructure_for_packages_with_specific_build_systems)',
'ANY_INSTALL_REDISTRIBUTE = YES\n'],
['any.mk:3: useless default value (url#_infrastructure_for_packages_with_specific_build_systems)',
'ANY_INSTALL_STAGING = NO\n'],
['any.mk:4: useless default value (url#_infrastructure_for_packages_with_specific_build_systems)',
'ANY_INSTALL_TARGET = YES\n']]),
('conditional',
'any.mk',
'ifneq (condition)\n'
'ANY_INSTALL_IMAGES = NO\n'
'endif\n'
'ANY_INSTALL_REDISTRIBUTE = YES\n',
[['any.mk:4: useless default value (url#_infrastructure_for_packages_with_specific_build_systems)',
'ANY_INSTALL_REDISTRIBUTE = YES\n']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', UselessFlag)
def test_UselessFlag(testname, filename, string, expected):
warnings = util.check_file(m.UselessFlag, filename, string)
assert warnings == expected
VariableWithBraces = [
('good',
'xmlstarlet.mk',
'XMLSTARLET_CONF_OPTS += \\\n'
'\t--with-libxml-prefix=$(STAGING_DIR)/usr \\\n',
[]),
('bad',
'xmlstarlet.mk',
'XMLSTARLET_CONF_OPTS += \\\n'
'\t--with-libxml-prefix=${STAGING_DIR}/usr \\\n',
[['xmlstarlet.mk:2: use $() to delimit variables, not ${}',
'\t--with-libxml-prefix=${STAGING_DIR}/usr \\\n']]),
('expanded by the shell',
'sg3_utils.mk',
'\tfor prog in xcopy zone; do \\\n'
'\t\t$(RM) $(TARGET_DIR)/usr/bin/sg_$${prog} ; \\\n'
'\tdone\n',
[]),
('comments',
'any.mk',
'#\t--with-libxml-prefix=${STAGING_DIR}/usr \\\n',
[]),
]
@pytest.mark.parametrize('testname,filename,string,expected', VariableWithBraces)
def test_VariableWithBraces(testname, filename, string, expected):
warnings = util.check_file(m.VariableWithBraces, filename, string)
assert warnings == expected

View File

@@ -1,96 +0,0 @@
import pytest
import checkpackagelib.test_util as util
import checkpackagelib.lib_patch as m
ApplyOrder = [
('standard', # catches https://bugs.busybox.net/show_bug.cgi?id=11271
'0001-description.patch',
'',
[]),
('standard with path',
'path/0001-description.patch',
'',
[]),
('acceptable format',
'1-description.patch',
'',
[]),
('acceptable format with path',
'path/1-description.patch',
'',
[]),
('old format',
'package-0001-description.patch',
'',
[['package-0001-description.patch:0: use name <number>-<description>.patch (url#_providing_patches)']]),
('old format with path',
'path/package-0001-description.patch',
'',
[['path/package-0001-description.patch:0: use name <number>-<description>.patch (url#_providing_patches)']]),
('missing number',
'description.patch',
'',
[['description.patch:0: use name <number>-<description>.patch (url#_providing_patches)']]),
('missing number with path',
'path/description.patch',
'',
[['path/description.patch:0: use name <number>-<description>.patch (url#_providing_patches)']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', ApplyOrder)
def test_ApplyOrder(testname, filename, string, expected):
warnings = util.check_file(m.ApplyOrder, filename, string)
assert warnings == expected
NumberedSubject = [
('no subject',
'patch',
'',
[]),
('acceptable because it is not a git patch',
'patch',
'Subject: [PATCH 24/105] text\n',
[]),
('good',
'patch',
'Subject: [PATCH] text\n'
'diff --git a/configure.ac b/configure.ac\n',
[]),
('bad',
'patch',
'Subject: [PATCH 24/105] text\n'
'diff --git a/configure.ac b/configure.ac\n',
[["patch:1: generate your patches with 'git format-patch -N'",
'Subject: [PATCH 24/105] text\n']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', NumberedSubject)
def test_NumberedSubject(testname, filename, string, expected):
warnings = util.check_file(m.NumberedSubject, filename, string)
assert warnings == expected
Sob = [
('good',
'patch',
'Signed-off-by: John Doe <johndoe@example.com>\n',
[]),
('empty',
'patch',
'',
[['patch:0: missing Signed-off-by in the header (url#_format_and_licensing_of_the_package_patches)']]),
('bad',
'patch',
'Subject: [PATCH 24/105] text\n',
[['patch:0: missing Signed-off-by in the header (url#_format_and_licensing_of_the_package_patches)']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', Sob)
def test_Sob(testname, filename, string, expected):
warnings = util.check_file(m.Sob, filename, string)
assert warnings == expected

View File

@@ -1,131 +0,0 @@
import os
import pytest
import re
import tempfile
import checkpackagelib.test_util as util
import checkpackagelib.lib_sysv as m
from checkpackagelib.test_tool import check_file as tool_check_file
workdir = os.path.join(tempfile.mkdtemp(suffix='-checkpackagelib-test-sysv'))
workdir_regex = re.compile(r'/tmp/tmp[^/]*-checkpackagelib-test-sysv')
Indent = [
('empty file',
'any',
'',
[]),
('empty line',
'any',
'\n',
[]),
('ignore whitespace',
'any',
' \n',
[]),
('spaces',
'any',
'case "$1" in\n'
' start)',
[['any:2: should be indented with tabs (url#adding-packages-start-script)',
' start)']]),
('tab',
'any',
'case "$1" in\n'
'\tstart)',
[]),
('tabs and spaces',
'any',
'case "$1" in\n'
'\t start)',
[['any:2: should be indented with tabs (url#adding-packages-start-script)',
'\t start)']]),
('spaces and tabs',
'any',
'case "$1" in\n'
' \tstart)',
[['any:2: should be indented with tabs (url#adding-packages-start-script)',
' \tstart)']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', Indent)
def test_Indent(testname, filename, string, expected):
warnings = util.check_file(m.Indent, filename, string)
assert warnings == expected
NotExecutable = [
('SysV',
'sh-shebang.sh',
0o775,
'#!/bin/sh',
["dir/sh-shebang.sh:0: This file does not need to be executable,"
" just make sure you use '$(INSTALL) -D -m 0755' in the .mk file"]),
]
@pytest.mark.parametrize('testname,filename,permissions,string,expected', NotExecutable)
def test_NotExecutable(testname, filename, permissions, string, expected):
warnings = tool_check_file(m.NotExecutable, filename, string, permissions)
assert warnings == expected
Variables = [
('empty file',
'any',
'',
[['any:0: DAEMON variable not defined (url#adding-packages-start-script)']]),
('daemon and pidfile ok',
'package/busybox/S01syslogd',
'DAEMON="syslogd"\n'
'PIDFILE="/var/run/$DAEMON.pid"\n',
[]),
('wrong filename',
'package/busybox/S01syslog',
'DAEMON="syslogd"\n'
'PIDFILE="/var/run/${DAEMON}.pid"\n',
[['package/busybox/S01syslog:0: filename should be S<number><number><daemon name> (url#adding-packages-start-script)',
'expecting S<number><number>syslogd']]),
('no pidfile ok',
'S99something',
'DAEMON="something"\n',
[]),
('hardcoded pidfile',
'S99something',
'DAEMON="something"\n'
'PIDFILE="/var/run/something.pid"\n',
[['S99something:2: Incorrect PIDFILE value (url#adding-packages-start-script)',
'PIDFILE="/var/run/something.pid"\n',
'PIDFILE="/var/run/$DAEMON.pid"']]),
('redefined daemon',
'S50any',
'DAEMON="any"\n'
'DAEMON="other"\n',
[['S50any:2: DAEMON variable redefined (url#adding-packages-start-script)',
'DAEMON="other"\n']]),
('daemon name with dash',
'S82cups-browsed',
'DAEMON="cups-browsed"',
[]),
('daemon with path',
'S50avahi-daemon',
'DAEMON=/usr/sbin/avahi-daemon',
[['S50avahi-daemon:1: Do not include path in DAEMON (url#adding-packages-start-script)',
'DAEMON=/usr/sbin/avahi-daemon',
'DAEMON="avahi-daemon"']]),
('daemon with path and wrong filename',
'S50avahi',
'DAEMON=/usr/sbin/avahi-daemon',
[['S50avahi:1: Do not include path in DAEMON (url#adding-packages-start-script)',
'DAEMON=/usr/sbin/avahi-daemon',
'DAEMON="avahi-daemon"'],
['S50avahi:0: filename should be S<number><number><daemon name> (url#adding-packages-start-script)',
'expecting S<number><number>avahi-daemon']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', Variables)
def test_Variables(testname, filename, string, expected):
warnings = util.check_file(m.Variables, filename, string)
assert warnings == expected

View File

@@ -1,112 +0,0 @@
import os
import pytest
import re
import tempfile
import checkpackagelib.tool as m
workdir_regex = re.compile(r'/tmp/tmp[^/]*-checkpackagelib-test-tool')
def check_file(tool, filename, string, permissions=None):
with tempfile.TemporaryDirectory(suffix='-checkpackagelib-test-tool') as workdir:
script = os.path.join(workdir, filename)
with open(script, 'wb') as f:
f.write(string.encode())
if permissions:
os.chmod(script, permissions)
obj = tool(script)
result = obj.run()
if result is None:
return []
return [workdir_regex.sub('dir', r) for r in result]
NotExecutable = [
('664',
'package.mk',
0o664,
'',
[]),
('775',
'package.mk',
0o775,
'',
["dir/package.mk:0: This file does not need to be executable"]),
]
@pytest.mark.parametrize('testname,filename,permissions,string,expected', NotExecutable)
def test_NotExecutable(testname, filename, permissions, string, expected):
warnings = check_file(m.NotExecutable, filename, string, permissions)
assert warnings == expected
NotExecutable_hint = [
('no hint',
"",
'sh-shebang.sh',
0o775,
'#!/bin/sh',
["dir/sh-shebang.sh:0: This file does not need to be executable"]),
('hint',
", very special hint",
'sh-shebang.sh',
0o775,
'#!/bin/sh',
["dir/sh-shebang.sh:0: This file does not need to be executable, very special hint"]),
]
@pytest.mark.parametrize('testname,hint,filename,permissions,string,expected', NotExecutable_hint)
def test_NotExecutable_hint(testname, hint, filename, permissions, string, expected):
class NotExecutable(m.NotExecutable):
def hint(self):
return hint
warnings = check_file(NotExecutable, filename, string, permissions)
assert warnings == expected
Shellcheck = [
('missing shebang',
'empty.sh',
'',
["dir/empty.sh:0: run 'shellcheck' and fix the warnings",
"In dir/empty.sh line 1:\n"
"^-- SC2148: Tips depend on target shell and yours is unknown. Add a shebang or a 'shell' directive.\n"
"For more information:\n"
" https://www.shellcheck.net/wiki/SC2148 -- Tips depend on target shell and y..."]),
('sh shebang',
'sh-shebang.sh',
'#!/bin/sh',
[]),
('bash shebang',
'bash-shebang.sh',
'#!/bin/bash',
[]),
('2 warnings',
'unused.sh',
'unused=""',
["dir/unused.sh:0: run 'shellcheck' and fix the warnings",
"In dir/unused.sh line 1:\n"
'unused=""\n'
"^-- SC2148: Tips depend on target shell and yours is unknown. Add a shebang or a 'shell' directive.\n"
"^----^ SC2034: unused appears unused. Verify use (or export if used externally).\n"
"For more information:\n"
" https://www.shellcheck.net/wiki/SC2148 -- Tips depend on target shell and y...\n"
" https://www.shellcheck.net/wiki/SC2034 -- unused appears unused. Verify use..."]),
('tab',
'tab.sh',
'\t#!/bin/sh',
["dir/tab.sh:0: run 'shellcheck' and fix the warnings",
"In dir/tab.sh line 1:\n"
'\t#!/bin/sh\n'
"^-- SC1114: Remove leading spaces before the shebang.\n"
"For more information:\n"
" https://www.shellcheck.net/wiki/SC1114 -- Remove leading spaces before the ..."]),
]
@pytest.mark.parametrize('testname,filename,string,expected', Shellcheck)
def test_Shellcheck(testname, filename, string, expected):
warnings = check_file(m.Shellcheck, filename, string)
assert warnings == expected

View File

@@ -1,8 +0,0 @@
def check_file(check_function, filename, string):
obj = check_function(filename, 'url')
result = []
result.append(obj.before())
for i, line in enumerate(string.splitlines(True)):
result.append(obj.check_line(i + 1, line))
result.append(obj.after())
return [r for r in result if r is not None]

View File

@@ -1,24 +0,0 @@
import os
import subprocess
from checkpackagelib.base import _Tool
class NotExecutable(_Tool):
def run(self):
if os.access(self.filename, os.X_OK):
return ["{}:0: This file does not need to be executable{}".format(self.filename, self.hint())]
class Shellcheck(_Tool):
def run(self):
cmd = ['shellcheck', self.filename]
try:
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = p.communicate()[0]
processed_output = [str(line.decode().rstrip()) for line in stdout.splitlines() if line]
if p.returncode == 0:
return
return ["{}:0: run 'shellcheck' and fix the warnings".format(self.filename),
'\n'.join(processed_output)]
except FileNotFoundError:
return ["{}:0: failed to call 'shellcheck'".format(self.filename)]

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python3
#!/usr/bin/python
#
# diffconfig - a tool to compare .config files.
#

View File

@@ -1,13 +0,0 @@
#!/usr/bin/env bash
set -o errexit -o pipefail
DIR=$(dirname "${0}")
MAIN_DIR=$(readlink -f "${DIR}/..")
# shellcheck disable=SC2016
IMAGE=$(grep ^image: "${MAIN_DIR}/.gitlab-ci.yml" | \
sed -e 's,^image: ,,g' | sed -e 's,\$CI_REGISTRY,registry.gitlab.com,g')
exec docker run -it --rm \
--user $(id -u):$(id -g) \
--mount "type=bind,src=${MAIN_DIR},dst=${MAIN_DIR}" \
--workdir "${MAIN_DIR}" \
"${IMAGE}" "${@}"

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python3
#!/usr/bin/env python
# Copyright (C) 2014 by Thomas Petazzoni <thomas.petazzoni@free-electrons.com>
#
@@ -18,13 +18,14 @@
# This script generates a random configuration for testing Buildroot.
from __future__ import print_function
import contextlib
import csv
import os
from random import randint
import subprocess
import sys
import traceback
from distutils.version import StrictVersion
import platform
@@ -38,9 +39,17 @@ def urlopen_closing(uri):
return contextlib.closing(_urllib.urlopen(uri))
if sys.hexversion >= 0x3000000:
def decode_byte_list(bl):
return [b.decode() for b in bl]
else:
def decode_byte_list(e):
return e
class SystemInfo:
DEFAULT_NEEDED_PROGS = ["make", "git", "gcc", "timeout"]
DEFAULT_OPTIONAL_PROGS = ["bzr", "java", "javac", "jar", "diffoscope"]
DEFAULT_OPTIONAL_PROGS = ["bzr", "java", "javac", "jar"]
def __init__(self):
self.needed_progs = list(self.__class__.DEFAULT_NEEDED_PROGS)
@@ -118,8 +127,8 @@ def get_toolchain_configs(toolchains_csv, buildrootdir):
with open(toolchains_csv) as r:
# filter empty lines and comments
lines = [t for t in r.readlines() if len(t.strip()) > 0 and t[0] != '#']
toolchains = lines
lines = [ t for t in r.readlines() if len(t.strip()) > 0 and t[0] != '#' ]
toolchains = decode_byte_list(lines)
configs = []
(_, _, _, _, hostarch) = os.uname()
@@ -178,10 +187,9 @@ def is_toolchain_usable(configfile, config):
if platform.machine() == 'x86_64':
if 'BR2_TOOLCHAIN_EXTERNAL_LINARO_ARM=y\n' in configlines or \
'BR2_TOOLCHAIN_EXTERNAL_LINARO_AARCH64=y\n' in configlines or \
'BR2_TOOLCHAIN_EXTERNAL_LINARO_AARCH64_BE=y\n' in configlines or \
'BR2_TOOLCHAIN_EXTERNAL_LINARO_ARMEB=y\n' in configlines:
ldd_version_output = subprocess.check_output(['ldd', '--version'])
glibc_version = ldd_version_output.decode().splitlines()[0].split()[-1]
glibc_version = ldd_version_output.splitlines()[0].split()[-1]
if StrictVersion('2.14') > StrictVersion(glibc_version):
print("WARN: ignoring the Linaro ARM toolchains because too old host glibc", file=sys.stderr)
return False
@@ -189,7 +197,7 @@ def is_toolchain_usable(configfile, config):
return True
def fixup_config(sysinfo, configfile):
def fixup_config(configfile):
"""Finalize the configuration and reject any problematic combinations
This function returns 'True' when the configuration has been
@@ -198,53 +206,59 @@ def fixup_config(sysinfo, configfile):
generated).
"""
sysinfo = SystemInfo()
with open(configfile) as configf:
configlines = configf.readlines()
BR2_TOOLCHAIN_EXTERNAL_URL = 'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/'
if "BR2_NEEDS_HOST_JAVA=y\n" in configlines and not sysinfo.has("java"):
return False
if "BR2_NEEDS_HOST_JAVAC=y\n" in configlines and not sysinfo.has("javac"):
return False
if "BR2_NEEDS_HOST_JAR=y\n" in configlines and not sysinfo.has("jar"):
return False
# python-nfc needs bzr
if 'BR2_PACKAGE_PYTHON_NFC=y\n' in configlines and not sysinfo.has("bzr"):
return False
# The ctng toolchain is affected by PR58854
if 'BR2_PACKAGE_LTTNG_TOOLS=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'armv5-ctng-linux-gnueabi.tar.xz"\n' in configlines:
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/armv5-ctng-linux-gnueabi.tar.xz"\n' in configlines:
return False
# The ctng toolchain tigger an assembler error with guile package when compiled with -Os (same issue as for CS ARM 2014.05-29)
if 'BR2_PACKAGE_GUILE=y\n' in configlines and \
'BR2_OPTIMIZE_S=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'armv5-ctng-linux-gnueabi.tar.xz"\n' in configlines:
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/armv5-ctng-linux-gnueabi.tar.xz"\n' in configlines:
return False
# The ctng toolchain is affected by PR58854
if 'BR2_PACKAGE_LTTNG_TOOLS=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'armv6-ctng-linux-uclibcgnueabi.tar.xz"\n' in configlines:
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/armv6-ctng-linux-uclibcgnueabi.tar.xz"\n' in configlines:
return False
# The ctng toolchain is affected by PR58854
if 'BR2_PACKAGE_LTTNG_TOOLS=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'armv7-ctng-linux-gnueabihf.tar.xz"\n' in configlines:
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/armv7-ctng-linux-gnueabihf.tar.xz"\n' in configlines:
return False
# The ctng toolchain is affected by PR60155
if 'BR2_PACKAGE_SDL=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'powerpc-ctng-linux-uclibc.tar.xz"\n' in configlines:
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/powerpc-ctng-linux-uclibc.tar.xz"\n' in configlines:
return False
# The ctng toolchain is affected by PR60155
if 'BR2_PACKAGE_LIBMPEG2=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'powerpc-ctng-linux-uclibc.tar.xz"\n' in configlines:
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/powerpc-ctng-linux-uclibc.tar.xz"\n' in configlines:
return False
# This MIPS toolchain uses eglibc-2.18 which lacks SYS_getdents64
if 'BR2_PACKAGE_STRONGSWAN=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'mips64el-ctng_n64-linux-gnu.tar.xz"\n' in configlines:
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mips64el-ctng_n64-linux-gnu.tar.xz"\n' in configlines:
return False
# This MIPS toolchain uses eglibc-2.18 which lacks SYS_getdents64
if 'BR2_PACKAGE_PYTHON3=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'mips64el-ctng_n64-linux-gnu.tar.xz"\n' in configlines:
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mips64el-ctng_n64-linux-gnu.tar.xz"\n' in configlines:
return False
# libffi not available on ARMv7-M, but propagating libffi arch
# dependencies in Buildroot is really too much work, so we handle
# this here.
if 'BR2_ARM_CPU_ARMV7M=y\n' in configlines and \
# libffi not available on sh2a and ARMv7-M, but propagating libffi
# arch dependencies in Buildroot is really too much work, so we
# handle this here.
if 'BR2_sh2a=y\n' in configlines and \
'BR2_PACKAGE_LIBFFI=y\n' in configlines:
return False
if 'BR2_nds32=y\n' in configlines and \
if 'BR2_ARM_CPU_ARMV7M=y\n' in configlines and \
'BR2_PACKAGE_LIBFFI=y\n' in configlines:
return False
if 'BR2_PACKAGE_SUNXI_BOARDS=y\n' in configlines:
@@ -252,37 +266,37 @@ def fixup_config(sysinfo, configfile):
configlines.append('BR2_PACKAGE_SUNXI_BOARDS_FEX_FILE="a10/hackberry.fex"\n')
# This MIPS uClibc toolchain fails to build the gdb package
if 'BR2_PACKAGE_GDB=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
return False
# This MIPS uClibc toolchain fails to build the rt-tests package
if 'BR2_PACKAGE_RT_TESTS=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
return False
# This MIPS uClibc toolchain fails to build the civetweb package
if 'BR2_PACKAGE_CIVETWEB=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
return False
# This MIPS ctng toolchain fails to build the python3 package
if 'BR2_PACKAGE_PYTHON3=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'mips64el-ctng_n64-linux-gnu.tar.xz"\n' in configlines:
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mips64el-ctng_n64-linux-gnu.tar.xz"\n' in configlines:
return False
# This MIPS uClibc toolchain fails to build the strace package
if 'BR2_PACKAGE_STRACE=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
return False
# This MIPS uClibc toolchain fails to build the cdrkit package
if 'BR2_PACKAGE_CDRKIT=y\n' in configlines and \
'BR2_STATIC_LIBS=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
return False
# uClibc vfork static linking issue
if 'BR2_PACKAGE_ALSA_LIB=y\n' in configlines and \
'BR2_STATIC_LIBS=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'i486-ctng-linux-uclibc.tar.xz"\n' in configlines:
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/i486-ctng-linux-uclibc.tar.xz"\n' in configlines:
return False
# This MIPS uClibc toolchain fails to build the weston package
if 'BR2_PACKAGE_WESTON=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
return False
# The cs nios2 2017.02 toolchain is affected by binutils PR19405
if 'BR2_TOOLCHAIN_EXTERNAL_CODESOURCERY_NIOSII=y\n' in configlines and \
@@ -293,30 +307,17 @@ def fixup_config(sysinfo, configfile):
'BR2_PACKAGE_QT5BASE_GUI=y\n' in configlines:
return False
# The cs nios2 2017.02 toolchain is affected by binutils PR19405
if 'BR2_TOOLCHAIN_EXTERNAL_CODESOURCERY_NIOSII=y\n' in configlines and \
'BR2_PACKAGE_QT_GUI_MODULE=y\n' in configlines:
return False
# The cs nios2 2017.02 toolchain is affected by binutils PR19405
if 'BR2_TOOLCHAIN_EXTERNAL_CODESOURCERY_NIOSII=y\n' in configlines and \
'BR2_PACKAGE_FLANN=y\n' in configlines:
return False
if 'BR2_PACKAGE_AUFS_UTIL=y\n' in configlines and \
'BR2_PACKAGE_AUFS_UTIL_VERSION=""\n' in configlines:
# or1k affected by binutils PR21464
if 'BR2_or1k=y\n' in configlines and \
'BR2_PACKAGE_QT_GUI_MODULE=y\n' in configlines:
return False
if 'BR2_PACKAGE_A10DISP=y\n' in configlines:
return False
if 'BR2_PACKAGE_HOST_UBOOT_TOOLS_ENVIMAGE=y\n' in configlines:
bootenv = os.path.join(args.outputdir, "boot_env.txt")
with open(bootenv, "w+") as bootenvf:
bootenvf.write("prop=value")
configlines.remove('BR2_PACKAGE_HOST_UBOOT_TOOLS_ENVIMAGE_SOURCE=""\n')
configlines.append('BR2_PACKAGE_HOST_UBOOT_TOOLS_ENVIMAGE_SOURCE="%s"\n' % bootenv)
configlines.remove('BR2_PACKAGE_HOST_UBOOT_TOOLS_ENVIMAGE_SIZE=""\n')
configlines.append('BR2_PACKAGE_HOST_UBOOT_TOOLS_ENVIMAGE_SIZE="0x1000"\n')
if 'BR2_PACKAGE_HOST_UBOOT_TOOLS_BOOT_SCRIPT=y\n' in configlines:
bootscr = os.path.join(args.outputdir, "boot_script.txt")
with open(bootscr, "w+") as bootscrf:
bootscrf.write("prop=value")
configlines.remove('BR2_PACKAGE_HOST_UBOOT_TOOLS_BOOT_SCRIPT_SOURCE=""\n')
configlines.append('BR2_PACKAGE_HOST_UBOOT_TOOLS_BOOT_SCRIPT_SOURCE="%s"\n' % bootscr)
with open(configfile, "w+") as configf:
configf.writelines(configlines)
@@ -332,8 +333,6 @@ def gen_config(args):
packages.
"""
sysinfo = SystemInfo()
# Select a random toolchain configuration
configs = get_toolchain_configs(args.toolchains_csv, args.buildrootdir)
@@ -351,15 +350,9 @@ def gen_config(args):
# Allow hosts with old certificates to download over https
configlines.append("BR2_WGET=\"wget --passive-ftp -nd -t 3 --no-check-certificate\"\n")
# Per-package folder
if randint(0, 15) == 0:
configlines.append("BR2_PER_PACKAGE_DIRECTORIES=y\n")
# Amend the configuration with a few things.
if randint(0, 20) == 0:
configlines.append("BR2_ENABLE_DEBUG=y\n")
if randint(0, 20) == 0:
configlines.append("BR2_ENABLE_RUNTIME_DEBUG=y\n")
if randint(0, 1) == 0:
configlines.append("BR2_INIT_BUSYBOX=y\n")
elif randint(0, 15) == 0:
@@ -369,19 +362,7 @@ def gen_config(args):
if randint(0, 20) == 0:
configlines.append("BR2_STATIC_LIBS=y\n")
if randint(0, 20) == 0:
configlines.append("BR2_PACKAGE_PYTHON3_PY_ONLY=y\n")
if randint(0, 5) == 0:
configlines.append("BR2_OPTIMIZE_2=y\n")
if randint(0, 4) == 0:
configlines.append("BR2_SYSTEM_ENABLE_NLS=y\n")
if randint(0, 4) == 0:
configlines.append("BR2_FORTIFY_SOURCE_2=y\n")
# Randomly enable BR2_REPRODUCIBLE 10% of times
# also enable tar filesystem images for testing
if sysinfo.has("diffoscope") and randint(0, 10) == 0:
configlines.append("BR2_REPRODUCIBLE=y\n")
configlines.append("BR2_TARGET_ROOTFS_TAR=y\n")
configlines.append("BR2_PACKAGE_PYTHON_PY_ONLY=y\n")
# Write out the configuration file
if not os.path.exists(args.outputdir):
@@ -411,10 +392,10 @@ def gen_config(args):
return 1
bounded_loop -= 1
subprocess.check_call(["make", "O=%s" % args.outputdir, "-C", args.buildrootdir,
"KCONFIG_PROBABILITY=%d" % randint(1, 20),
"KCONFIG_PROBABILITY=%d" % randint(1, 30),
"randpackageconfig"])
if fixup_config(sysinfo, configfile):
if fixup_config(configfile):
break
subprocess.check_call(["make", "O=%s" % args.outputdir, "-C", args.buildrootdir,
@@ -424,7 +405,7 @@ def gen_config(args):
"savedefconfig"])
return subprocess.call(["make", "O=%s" % args.outputdir, "-C", args.buildrootdir,
"dependencies"])
"core-dependencies"])
if __name__ == '__main__':
@@ -449,7 +430,7 @@ if __name__ == '__main__':
try:
ret = gen_config(args)
except Exception:
traceback.print_exc()
except Exception as e:
print(str(e), file=sys.stderr)
parser.exit(1)
parser.exit(ret)

View File

@@ -1,8 +1,9 @@
#!/usr/bin/env python3
#!/usr/bin/env python
import argparse
import getdeveloperlib
import sys
import os
def parse_args():
@@ -23,6 +24,11 @@ def parse_args():
def __main__():
# DEVELOPERS is one level up from here
devs_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..')
devs = getdeveloperlib.parse_developers(devs_dir)
if devs is None:
sys.exit(1)
args = parse_args()
# Check that only one action is given
@@ -44,13 +50,9 @@ def __main__():
print("No action specified")
return
devs = getdeveloperlib.parse_developers()
if devs is None:
sys.exit(1)
# Handle the check action
if args.check:
files = getdeveloperlib.check_developers(devs)
files = getdeveloperlib.check_developers(devs, devs_dir)
for f in files:
print(f)
@@ -70,9 +72,11 @@ def __main__():
# Handle the files action
if args.files is not None:
args.files = [os.path.abspath(f) for f in args.files]
for dev in devs:
for f in args.files:
if dev.hasfile(f):
for devfile in dev.files:
commonfiles = [f for f in args.files if f.startswith(devfile)]
if commonfiles:
print(dev.name)
break

View File

@@ -1,18 +1,13 @@
from io import open
import os
import re
import glob
import subprocess
import sys
import unittest
brpath = os.path.normpath(os.path.join(os.path.dirname(__file__), ".."))
#
# Patch parsing functions
#
FIND_INFRA_IN_PATCH = re.compile(r"^\+\$\(eval \$\((host-)?([^-]*)-package\)\)$")
FIND_INFRA_IN_PATCH = re.compile("^\+\$\(eval \$\((host-)?([^-]*)-package\)\)$")
def analyze_patch(patch):
@@ -35,7 +30,7 @@ def analyze_patch(patch):
return (files, infras)
FIND_INFRA_IN_MK = re.compile(r"^\$\(eval \$\((host-)?([^-]*)-package\)\)$")
FIND_INFRA_IN_MK = re.compile("^\$\(eval \$\((host-)?([^-]*)-package\)\)$")
def fname_get_package_infra(fname):
@@ -56,6 +51,17 @@ def fname_get_package_infra(fname):
return None
def get_infras(files):
"""Search in the list of files for .mk files, and collect the package
infrastructures used by those .mk files."""
infras = set()
for fname in files:
infra = fname_get_package_infra(fname)
if infra:
infras.add(infra)
return infras
def analyze_patches(patches):
"""Parse a list of patches and returns the list of files modified,
added or removed by the patches, as well as the list of package
@@ -66,39 +72,10 @@ def analyze_patches(patches):
(files, infras) = analyze_patch(patch)
allfiles = allfiles | files
allinfras = allinfras | infras
allinfras = allinfras | get_infras(allfiles)
return (allfiles, allinfras)
#
# Unit-test parsing functions
#
def get_all_test_cases(suite):
"""Generate all test-cases from a given test-suite.
:return: (test.module, test.name)"""
if issubclass(type(suite), unittest.TestSuite):
for test in suite:
for res in get_all_test_cases(test):
yield res
else:
yield (suite.__module__, suite.__class__.__name__)
def list_unittests():
"""Use the unittest module to retreive all test cases from a given
directory"""
loader = unittest.TestLoader()
suite = loader.discover(os.path.join(brpath, "support", "testing"))
tests = {}
for module, test in get_all_test_cases(suite):
module_path = os.path.join("support", "testing", *module.split('.'))
tests.setdefault(module_path, []).append('%s.%s' % (module, test))
return tests
unittests = {}
#
# DEVELOPERS file parsing functions
#
@@ -110,35 +87,14 @@ class Developer:
self.packages = parse_developer_packages(files)
self.architectures = parse_developer_architectures(files)
self.infras = parse_developer_infras(files)
self.runtime_tests = parse_developer_runtime_tests(files)
self.defconfigs = parse_developer_defconfigs(files)
def hasfile(self, f):
f = os.path.abspath(f)
for fs in self.files:
if f.startswith(fs):
return True
return False
def __repr__(self):
name = '\'' + self.name.split(' <')[0][:20] + '\''
things = []
if len(self.files):
things.append('{} files'.format(len(self.files)))
if len(self.packages):
things.append('{} pkgs'.format(len(self.packages)))
if len(self.architectures):
things.append('{} archs'.format(len(self.architectures)))
if len(self.infras):
things.append('{} infras'.format(len(self.infras)))
if len(self.runtime_tests):
things.append('{} tests'.format(len(self.runtime_tests)))
if len(self.defconfigs):
things.append('{} defconfigs'.format(len(self.defconfigs)))
if things:
return 'Developer <{} ({})>'.format(name, ', '.join(things))
else:
return 'Developer <' + name + '>'
def parse_developer_packages(fnames):
"""Given a list of file patterns, travel through the Buildroot source
@@ -146,7 +102,7 @@ def parse_developer_packages(fnames):
patterns, and return a list of those packages."""
packages = set()
for fname in fnames:
for root, dirs, files in os.walk(os.path.join(brpath, fname)):
for root, dirs, files in os.walk(fname):
for f in files:
path = os.path.join(root, f)
if fname_get_package_infra(path):
@@ -167,7 +123,7 @@ def parse_arches_from_config_in(fname):
parsing_arches = True
continue
if parsing_arches:
m = re.match(r"^\s*default \"([^\"]*)\".*", line)
m = re.match("^\s*default \"([^\"]*)\".*", line)
if m:
arches.add(m.group(1))
else:
@@ -181,7 +137,7 @@ def parse_developer_architectures(fnames):
developer is working on."""
arches = set()
for fname in fnames:
if not re.match(r"^.*/arch/Config\.in\..*$", fname):
if not re.match("^.*/arch/Config\.in\..*$", fname):
continue
arches = arches | parse_arches_from_config_in(fname)
return arches
@@ -190,49 +146,21 @@ def parse_developer_architectures(fnames):
def parse_developer_infras(fnames):
infras = set()
for fname in fnames:
m = re.match(r"^package/pkg-([^.]*).mk$", fname)
m = re.match("^package/pkg-([^.]*).mk$", fname)
if m:
infras.add(m.group(1))
return infras
def parse_developer_defconfigs(fnames):
"""Given a list of file names, returns the config names
corresponding to defconfigs."""
return {os.path.basename(fname[:-10])
for fname in fnames
if fname.endswith('_defconfig')}
def parse_developer_runtime_tests(fnames):
"""Given a list of file names, returns the runtime tests
corresponding to the file."""
all_files = []
# List all files recursively
for fname in fnames:
if os.path.isdir(fname):
for root, _dirs, files in os.walk(os.path.join(brpath, fname)):
all_files += [os.path.join(root, f) for f in files]
else:
all_files.append(fname)
# Get all runtime tests
runtimes = set()
for f in all_files:
name = os.path.splitext(f)[0]
if name in unittests:
runtimes |= set(unittests[name])
return runtimes
def parse_developers():
def parse_developers(basepath=None):
"""Parse the DEVELOPERS file and return a list of Developer objects."""
developers = []
linen = 0
global unittests
unittests = list_unittests()
developers_fname = os.path.join(brpath, 'DEVELOPERS')
with open(developers_fname, mode='r', encoding='utf_8') as f:
if basepath is None:
basepath = os.getcwd()
else:
basepath = os.path.abspath(basepath)
with open(os.path.join(basepath, "DEVELOPERS"), "r") as f:
files = []
name = None
for line in f:
@@ -241,21 +169,14 @@ def parse_developers():
continue
elif line.startswith("N:"):
if name is not None or len(files) != 0:
print("Syntax error in DEVELOPERS file, line %d" % linen,
file=sys.stderr)
print("Syntax error in DEVELOPERS file, line %d" % linen)
name = line[2:].strip()
elif line.startswith("F:"):
fname = line[2:].strip()
dev_files = glob.glob(os.path.join(brpath, fname))
dev_files = glob.glob(os.path.join(basepath, fname))
if len(dev_files) == 0:
print("WARNING: '%s' doesn't match any file" % fname,
file=sys.stderr)
for f in dev_files:
dev_file = os.path.relpath(f, brpath)
dev_file = dev_file.replace(os.sep, '/') # force unix sep
if f[-1] == '/': # relpath removes the trailing /
dev_file = dev_file + '/'
files.append(dev_file)
print("WARNING: '%s' doesn't match any file" % fname)
files += dev_files
elif line == "":
if not name:
continue
@@ -263,8 +184,7 @@ def parse_developers():
files = []
name = None
else:
print("Syntax error in DEVELOPERS file, line %d: '%s'" % (linen, line),
file=sys.stderr)
print("Syntax error in DEVELOPERS file, line %d: '%s'" % (linen, line))
return None
linen += 1
# handle last developer
@@ -279,12 +199,12 @@ def check_developers(developers, basepath=None):
if basepath is None:
basepath = os.getcwd()
cmd = ["git", "--git-dir", os.path.join(basepath, ".git"), "ls-files"]
files = subprocess.check_output(cmd).decode(sys.stdout.encoding).strip().split("\n")
files = subprocess.check_output(cmd).strip().split("\n")
unhandled_files = []
for f in files:
handled = False
for d in developers:
if d.hasfile(f):
if d.hasfile(os.path.join(basepath, f)):
handled = True
break
if not handled:

View File

@@ -20,7 +20,7 @@ genrandconfig
(http://autobuild.buildroot.org). It selects a random toolchain from
support/config-fragments/autobuild and randomly selects packages to build.
get-developers
get-developpers
a script to return the list of people interested in a specific part
of Buildroot, so they can be Cc:ed on a mail. Accepts a patch as
input, a package name or and architecture name.

View File

@@ -31,7 +31,7 @@ $fatpacked{"MetaCPAN/API/Tiny.pm"} = <<'METACPAN_API_TINY';
if $params{ua_args} && ref($params{ua_args}) ne 'ARRAY';
my $self = +{
base_url => $params{base_url} || 'https://api.metacpan.org/v0',
base_url => $params{base_url} || 'http://api.metacpan.org/v0',
ua => $params{ua} || HTTP::Tiny->new(
$params{ua_args}
? @{$params{ua_args}}
@@ -479,18 +479,14 @@ use Fatal qw(open close);
use Getopt::Long;
use Pod::Usage;
use File::Basename;
use File::Path qw(make_path);
use Module::CoreList;
use HTTP::Tiny;
use Safe;
use MetaCPAN::API::Tiny;
use Digest::SHA qw(sha256_hex);
use Text::Wrap;
$Text::Wrap::columns = 62;
# Below, 5.034 should be aligned with the version of perl actually
# Below, 5.026 should be aligned with the version of perl actually
# bundled in Buildroot:
die <<"MSG" if $] < 5.034;
die <<"MSG" if $] < 5.026;
This script needs a host perl with the same major version as Buildroot target perl.
Your current host perl is:
@@ -498,7 +494,7 @@ Your current host perl is:
version $]
You may install a local one by running:
perlbrew install perl-5.34.0
perlbrew install perl-5.26.0
MSG
my ($help, $man, $quiet, $force, $recommend, $test, $host);
@@ -520,25 +516,14 @@ my %dist; # name -> metacpan data
my %need_target; # name -> 1 if target package is needed
my %need_host; # name -> 1 if host package is needed
my %need_dlopen; # name -> 1 if requires dynamic library
my %is_xs; # name -> 1 if XS module
my %deps_build; # name -> list of host dependencies
my %deps_runtime; # name -> list of target dependencies
my %license_files; # name -> hash of license files
my %deps_optional; # name -> list of optional target dependencies
my %license_files; # name -> list of license files
my %checksum; # author -> list of checksum
my $mirror = 'https://cpan.metacpan.org'; # a CPAN mirror
my $mcpan = MetaCPAN::API::Tiny->new(base_url => 'https://fastapi.metacpan.org/v1');
my $mirror = 'http://cpan.metacpan.org'; # a CPAN mirror
my $mcpan = MetaCPAN::API::Tiny->new(base_url => 'http://fastapi.metacpan.org/v1');
my $ua = HTTP::Tiny->new();
my $new_pkgs;
my %white_list = (
'ExtUtils-Config' => 1,
'ExtUtils-InstallPaths' => 1,
'ExtUtils-Helpers' => 1,
'File-ShareDir-Install' => 1,
'Module-Build' => 1,
'Module-Build-Tiny' => 1,
);
my @info = ();
sub get_checksum {
my ($url) = @_;
@@ -566,45 +551,12 @@ sub find_license_files {
my @license_files;
foreach (split /\n/, $manifest) {
next if m|/|;
s|\s+.*$||;
push @license_files, $_ if m/(ARTISTIC|COPYING|COPYRIGHT|GPL\S*|LICENSE|LICENCE)/i;
push @license_files, $_ if m/(ARTISTIC|COPYING|COPYRIGHT|LICENSE)/i;
}
if (scalar @license_files == 0 && $manifest =~ m/(README)[\n\s]/i) {
@license_files = ($1);
}
if (scalar @license_files == 0 && $manifest =~ m/(README\.md)[\n\s]/i) {
@license_files = ($1);
}
if (scalar @license_files == 0 && $manifest =~ m/(README\.pod)[\n\s]/i) {
@license_files = ($1);
}
return @license_files;
}
sub want_test {
my ($distname) = @_;
return 1 if $need_dlopen{$distname} && scalar @{$deps_runtime{$distname}} > 0;
}
sub get_dependencies {
my ($distname) = @_;
my %dep = map { $_ => 1 } @{$deps_runtime{$distname}};
for my $direct (@{$deps_runtime{$distname}}) {
for (get_dependencies( $direct )) {
$dep{$_} = 1;
}
}
return keys %dep;
}
sub get_indirect_dependencies {
my ($distname) = @_;
my %indirect;
my %direct = map { $_ => 1 } @{$deps_runtime{$distname}};
for my $dep (get_dependencies( $distname )) {
$indirect{$dep} = 1 unless exists $direct{$dep};
}
return keys %indirect;
return \@license_files;
}
sub fetch {
@@ -614,23 +566,17 @@ sub fetch {
unless ($dist{$name} && !$top) {
say qq{fetch ${name}} unless $quiet;
my $result = $mcpan->release( distribution => $name );
my $main_module = $result->{main_module};
push @info, qq{[$name] $main_module is a core module}
if $top && Module::CoreList::is_core( $main_module, undef, $] );
$dist{$name} = $result;
$license_files{$name} = {};
eval {
my $author = $result->{author};
my $release = $name . q{-} . $result->{version};
my $manifest = $mcpan->source( author => $author, release => $release, path => 'MANIFEST' );
$need_dlopen{$name} = $is_xs{$name} = is_xs( $manifest );
foreach my $fname (find_license_files( $manifest )) {
my $license = $mcpan->source( author => $author, release => $release, path => $fname );
$license_files{$name}->{$fname} = sha256_hex( $license );
}
my $manifest = $mcpan->source( author => $result->{author},
release => $name . q{-} . $result->{version},
path => 'MANIFEST' );
$need_dlopen{$name} = is_xs( $manifest );
$license_files{$name} = find_license_files( $manifest );
};
if ($@) {
warn $@;
$license_files{$name} = [];
}
my %build = ();
my %runtime = ();
@@ -645,7 +591,6 @@ sub fetch {
# we could use the host Module::CoreList data, because host perl and
# target perl have the same major version
next if ${$dep}{phase} eq q{develop};
next if ${$dep}{phase} eq q{x_Dist_Zilla};
next if !($test && $top) && ${$dep}{phase} eq q{test};
my $distname = $mcpan->module( $modname )->{distribution};
if (${$dep}{phase} eq q{runtime}) {
@@ -658,12 +603,11 @@ sub fetch {
}
else { # configure, build
$build{$distname} = 1;
push @info, qq{[$name] suspicious dependency on $distname}
unless exists $white_list{$distname};
}
}
$deps_build{$name} = [keys %build];
$deps_runtime{$name} = [keys %runtime];
$deps_optional{$name} = [keys %optional];
foreach my $distname (@{$deps_build{$name}}) {
fetch( $distname, 0, 1 );
}
@@ -671,7 +615,7 @@ sub fetch {
fetch( $distname, $need_target, $need_host );
$need_dlopen{$name} ||= $need_dlopen{$distname};
}
foreach my $distname (keys %optional) {
foreach my $distname (@{$deps_optional{$name}}) {
fetch( $distname, $need_target, $need_host );
}
}
@@ -698,24 +642,6 @@ sub brname {
return uc $name;
}
# Buildroot requires license name as in https://spdx.org/licenses/
sub brlicense {
my $license = shift;
$license =~ s|apache_1_1|Apache-1.1|;
$license =~ s|apache_2_0|Apache-2.0|;
$license =~ s|artistic_2|Artistic-2.0|;
$license =~ s|artistic|Artistic-1.0|;
$license =~ s|lgpl_2_1|LGPL-2.1|;
$license =~ s|lgpl_3_0|LGPL-3.0|;
$license =~ s|gpl_2|GPL-2.0|;
$license =~ s|gpl_3|GPL-3.0|;
$license =~ s|mit|MIT|;
$license =~ s|mozilla_1_1|Mozilla-1.1|;
$license =~ s|openssl|OpenSSL|;
$license =~ s|perl_5|Artistic or GPL-1.0+|;
return $license;
}
while (my ($distname, $dist) = each %dist) {
my $fsname = fsname( $distname );
my $dirname = q{package/} . $fsname;
@@ -723,16 +649,9 @@ while (my ($distname, $dist) = each %dist) {
my $mkname = $dirname . q{/} . $fsname . q{.mk};
my $hashname = $dirname . q{/} . $fsname . q{.hash};
my $brname = brname( $fsname );
my $testdir = q{support/testing/tests/package};
my $testname = $testdir . q{/test_} . lc $brname . q{.py};
unless (-d $dirname) {
make_path $dirname;
$new_pkgs = 1;
}
mkdir $dirname unless -d $dirname;
if ($need_target{$distname} && ($force || !-f $cfgname)) {
$dist->{abstract} =~ s|\s+$||;
$dist->{abstract} .= q{.} unless $dist->{abstract} =~ m|\.$|;
my $abstract = wrap( q{}, qq{\t }, $dist->{abstract} );
my $abstract = $dist->{abstract};
my $homepage = $dist->{resources}->{homepage} || qq{https://metacpan.org/release/${distname}};
say qq{write ${cfgname}} unless $quiet;
open my $fh, q{>}, $cfgname;
@@ -741,7 +660,7 @@ while (my ($distname, $dist) = each %dist) {
say {$fh} qq{\tdepends on !BR2_STATIC_LIBS} if $need_dlopen{$distname};
foreach my $dep (sort @{$deps_runtime{$distname}}) {
my $brdep = brname( fsname( $dep ) );
say {$fh} qq{\tselect BR2_PACKAGE_${brdep} # runtime};
say {$fh} qq{\tselect BR2_PACKAGE_${brdep}};
}
say {$fh} qq{\thelp};
say {$fh} qq{\t ${abstract}\n} if $abstract;
@@ -760,21 +679,20 @@ while (my ($distname, $dist) = each %dist) {
# the auth part is not used, because we use $(BR2_CPAN_MIRROR)
my ($filename, $directories, $suffix) = fileparse( $path, q{tar.gz}, q{tgz} );
$directories =~ s|/$||;
my @dependencies = map( { q{host-} . fsname( $_ ); } sort @{$deps_build{$distname}} );
my $dependencies = join qq{ \\\n\t}, @dependencies;
$dependencies = qq{\\\n\t} . $dependencies if scalar @dependencies > 1;
my @host_dependencies = map { q{host-} . fsname( $_ ); } sort( @{$deps_build{$distname}},
@{$deps_runtime{$distname}} );
my $host_dependencies = join qq{ \\\n\t}, @host_dependencies;
$host_dependencies = qq{\\\n\t} . $host_dependencies if scalar @host_dependencies > 1;
my $license = brlicense( ref $dist->{license} eq 'ARRAY'
? join q{ or }, @{$dist->{license}}
: $dist->{license} );
my $license_files = join q{ }, sort keys %{$license_files{$distname}};
if ($license_files && (!$license || $license eq q{unknown})) {
push @info, qq{[$distname] undefined LICENSE, see $license_files};
$license = q{???};
}
my $dependencies = join q{ }, map( { q{host-} . fsname( $_ ); } sort @{$deps_build{$distname}} ),
map( { fsname( $_ ); } sort @{$deps_runtime{$distname}} );
my $host_dependencies = join q{ }, map { q{host-} . fsname( $_ ); } sort( @{$deps_build{$distname}},
@{$deps_runtime{$distname}} );
my $license = ref $dist->{license} eq 'ARRAY'
? join q{ or }, @{$dist->{license}}
: $dist->{license};
# BR requires license name as in http://spdx.org/licenses/
$license =~ s|apache_2_0|Apache-2.0|;
$license =~ s|artistic_2|Artistic-2.0|;
$license =~ s|mit|MIT|;
$license =~ s|openssl|OpenSSL|;
$license =~ s|perl_5|Artistic or GPL-1.0+|;
my $license_files = join q{ }, @{$license_files{$distname}};
say qq{write ${mkname}} unless $quiet;
open my $fh, q{>}, $mkname;
say {$fh} qq{################################################################################};
@@ -788,10 +706,18 @@ while (my ($distname, $dist) = each %dist) {
say {$fh} qq{${brname}_SITE = \$(BR2_CPAN_MIRROR)${directories}};
say {$fh} qq{${brname}_DEPENDENCIES = ${dependencies}} if $need_target{$distname} && $dependencies;
say {$fh} qq{HOST_${brname}_DEPENDENCIES = ${host_dependencies}} if $need_host{$distname} && $host_dependencies;
say {$fh} qq{${brname}_LICENSE = ${license}} if $license;
say {$fh} qq{${brname}_LICENSE = ${license}} if $license && $license ne q{unknown};
say {$fh} qq{${brname}_LICENSE_FILES = ${license_files}} if $license_files;
say {$fh} qq{${brname}_DISTNAME = ${distname}};
say {$fh} qq{};
foreach (sort @{$deps_optional{$distname}}) {
next if grep { $_ eq $distname; } @{$deps_runtime{$_}}; # avoid cyclic dependencies
my $opt_brname = brname( $_ );
my $opt_fsname = fsname( $_ );
say {$fh} qq{ifeq (\$(BR2_PACKAGE_PERL_${opt_brname}),y)};
say {$fh} qq{${brname}_DEPENDENCIES += ${opt_fsname}};
say {$fh} qq{endif};
say {$fh} qq{};
}
say {$fh} qq{\$(eval \$(perl-package))} if $need_target{$distname};
say {$fh} qq{\$(eval \$(host-perl-package))} if $need_host{$distname};
close $fh;
@@ -803,93 +729,30 @@ while (my ($distname, $dist) = each %dist) {
say qq{write ${hashname}} unless $quiet;
open my $fh, q{>}, $hashname;
say {$fh} qq{# retrieved by scancpan from ${mirror}/};
say {$fh} qq{md5 ${md5} ${filename}};
say {$fh} qq{sha256 ${sha256} ${filename}};
my %license_files = %{$license_files{$distname}};
if (scalar keys %license_files) {
say {$fh} q{};
say {$fh} qq{# computed by scancpan};
foreach my $license (sort keys %license_files) {
my $digest = $license_files{$license};
say {$fh} qq{sha256 ${digest} ${license}};
}
}
close $fh;
}
if (want_test( $distname ) && ($force || !-f $testname)) {
my $classname = $distname;
$classname =~ s|-||g;
my $modname = $distname;
$modname =~ s|-|::|g;
my $mark = $is_xs{$distname} ? q{ XS} : q{};
my @indirect = (get_indirect_dependencies( $distname ));
say qq{write ${testname}} unless $quiet;
make_path $testdir unless -d $testdir;
open my $fh, q{>}, $testname;
say {$fh} qq{from tests.package.test_perl import TestPerlBase};
say {$fh} qq{};
say {$fh} qq{};
say {$fh} qq{class TestPerl${classname}(TestPerlBase):};
say {$fh} qq{ """};
say {$fh} qq{ package:};
say {$fh} qq{ ${distname}${mark}};
say {$fh} qq{ direct dependencies:};
foreach my $dep (sort @{$deps_runtime{$distname}}) {
$mark = $is_xs{$dep} ? q{ XS} : q{};
say {$fh} qq{ ${dep}${mark}};
}
if (scalar @indirect > 0) {
say {$fh} qq{ indirect dependencies:};
foreach my $dep (sort @indirect) {
$mark = $is_xs{$dep} ? q{ XS} : q{};
say {$fh} qq{ ${dep}${mark}};
}
}
say {$fh} qq{ """};
say {$fh} qq{};
say {$fh} qq{ config = TestPerlBase.config + \\};
say {$fh} qq{ """};
say {$fh} qq{ BR2_PACKAGE_PERL=y};
say {$fh} qq{ BR2_PACKAGE_${brname}=y};
say {$fh} qq{ """};
say {$fh} qq{};
say {$fh} qq{ def test_run(self):};
say {$fh} qq{ self.login()};
foreach my $dep (sort grep { $is_xs{$_} } @indirect) {
$dep =~ s|-|::|g;
say {$fh} qq{ self.module_test("${dep}")};
}
foreach my $dep (sort grep { $is_xs{$_} } @{$deps_runtime{$distname}}) {
$dep =~ s|-|::|g;
say {$fh} qq{ self.module_test("${dep}")};
}
say {$fh} qq{ self.module_test("${modname}")};
say {$fh} qq{md5 ${md5} ${filename}};
say {$fh} qq{sha256 ${sha256} ${filename}};
close $fh;
}
}
if ($new_pkgs) {
my %pkg;
my $cfgname = q{package/Config.in};
if (-f $cfgname) {
open my $fh, q{<}, $cfgname;
while (<$fh>) {
chomp;
$pkg{$_} = 1 if m|package/perl-|;
}
close $fh;
my %pkg;
my $cfgname = q{package/Config.in};
if (-f $cfgname) {
open my $fh, q{<}, $cfgname;
while (<$fh>) {
chomp;
$pkg{$_} = 1 if m|package/perl-|;
}
foreach my $distname (keys %need_target) {
my $fsname = fsname( $distname );
$pkg{qq{\tsource "package/${fsname}/Config.in"}} = 1;
}
say qq{${cfgname} must contain the following lines:};
say join qq{\n}, sort keys %pkg;
close $fh;
}
say join qq{\n}, @info;
foreach my $distname (keys %need_target) {
my $fsname = fsname( $distname );
$pkg{qq{\tsource "package/${fsname}/Config.in"}} = 1;
}
say qq{${cfgname} must contain the following lines:};
say join qq{\n}, sort keys %pkg;
__END__
@@ -899,7 +762,7 @@ utils/scancpan Try-Tiny Moo
=head1 SYNOPSIS
utils/scancpan [options] [distname ...]
supports/scripts/scancpan [options] [distname ...]
Options:
-help
@@ -956,6 +819,7 @@ Perl/CPAN distributions required by the specified distnames. The
dependencies and metadata are fetched from https://metacpan.org/.
After running this script, it is necessary to check the generated files.
You have to manually add the license files (PERL_FOO_LICENSE_FILES variable).
For distributions that link against a target library, you have to add the
buildroot package name for that library to the DEPENDENCIES variable.
@@ -967,7 +831,7 @@ in order to work with the right CoreList data.
=head1 LICENSE
Copyright (C) 2013-2020 by Francois Perrad <francois.perrad@gadz.org>
Copyright (C) 2013-2017 by Francois Perrad <francois.perrad@gadz.org>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
@@ -988,9 +852,9 @@ This script is a part of Buildroot.
This script requires the module C<MetaCPAN::API::Tiny> (version 1.131730)
which was included at the beginning of this file by the tool C<fatpack>.
See L<https://metacpan.org/release/NPEREZ/MetaCPAN-API-Tiny-1.131730>.
See L<http://search.cpan.org/~nperez/MetaCPAN-API-Tiny-1.131730/>.
See L<https://metacpan.org/release/App-FatPacker>.
See L<http://search.cpan.org/search?query=App-FatPacker&mode=dist>.
These both libraries are free software and may be distributed under the same
terms as perl itself.

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python3
#!/usr/bin/env python2
"""
Utility for building Buildroot packages for existing PyPI packages
@@ -6,11 +6,14 @@ Utility for building Buildroot packages for existing PyPI packages
Any package built by scanpypi should be manually checked for
errors.
"""
from __future__ import print_function
import argparse
import json
import urllib2
import sys
import os
import shutil
import StringIO
import tarfile
import zipfile
import errno
@@ -20,16 +23,6 @@ import textwrap
import tempfile
import imp
from functools import wraps
import six.moves.urllib.request
import six.moves.urllib.error
import six.moves.urllib.parse
from six.moves import map
from six.moves import zip
from six.moves import input
if six.PY2:
import StringIO
else:
import io
BUF_SIZE = 65536
@@ -94,10 +87,9 @@ def pkg_buildroot_name(pkg_name):
Keyword arguments:
pkg_name -- String to rename
"""
name = re.sub(r'[^\w-]', '', pkg_name.lower())
name = name.replace('_', '-')
name = re.sub('[^\w-]', '', pkg_name.lower())
prefix = 'python-'
pattern = re.compile(r'^(?!' + prefix + ')(.+?)$')
pattern = re.compile('^(?!' + prefix + ')(.+?)$')
name = pattern.sub(r'python-\1', name)
return name
@@ -155,15 +147,15 @@ class BuildrootPackage():
self.metadata_url = 'https://pypi.org/pypi/{pkg}/json'.format(
pkg=self.real_name)
try:
pkg_json = six.moves.urllib.request.urlopen(self.metadata_url).read().decode()
except six.moves.urllib.error.HTTPError as error:
pkg_json = urllib2.urlopen(self.metadata_url).read().decode()
except urllib2.HTTPError as error:
print('ERROR:', error.getcode(), error.msg, file=sys.stderr)
print('ERROR: Could not find package {pkg}.\n'
'Check syntax inside the python package index:\n'
'https://pypi.python.org/pypi/ '
.format(pkg=self.real_name))
raise
except six.moves.urllib.error.URLError:
except urllib2.URLError:
print('ERROR: Could not find package {pkg}.\n'
'Check syntax inside the python package index:\n'
'https://pypi.python.org/pypi/ '
@@ -177,7 +169,6 @@ class BuildrootPackage():
"""
Download a package using metadata from pypi
"""
download = None
try:
self.metadata['urls'][0]['filename']
except IndexError:
@@ -190,7 +181,7 @@ class BuildrootPackage():
'digests': None}]
# In this case, we can't get the name of the downloaded file
# from the pypi api, so we need to find it, this should work
urlpath = six.moves.urllib.parse.urlparse(
urlpath = urllib2.urlparse.urlparse(
self.metadata['info']['download_url']).path
# urlparse().path give something like
# /path/to/file-version.tar.gz
@@ -201,9 +192,9 @@ class BuildrootPackage():
continue
try:
print('Downloading package {pkg} from {url}...'.format(
pkg=self.real_name, url=download_url['url']))
download = six.moves.urllib.request.urlopen(download_url['url'])
except six.moves.urllib.error.HTTPError as http_error:
pkg=self.real_name, url=download_url['url']))
download = urllib2.urlopen(download_url['url'])
except urllib2.HTTPError as http_error:
download = http_error
else:
self.used_url = download_url
@@ -213,33 +204,14 @@ class BuildrootPackage():
self.md5_sum = hashlib.md5(self.as_string).hexdigest()
if self.md5_sum == download_url['digests']['md5']:
break
if download is None:
raise DownloadFailed('Failed to download package {pkg}: '
'No source archive available'
else:
if download.__class__ == urllib2.HTTPError:
raise download
raise DownloadFailed('Failed to download package {pkg}'
.format(pkg=self.real_name))
elif download.__class__ == six.moves.urllib.error.HTTPError:
raise download
self.filename = self.used_url['filename']
self.url = self.used_url['url']
def check_archive(self, members):
"""
Check archive content before extracting
Keyword arguments:
members -- list of archive members
"""
# Protect against https://github.com/snyk/zip-slip-vulnerability
# Older python versions do not validate that the extracted files are
# inside the target directory. Detect and error out on evil paths
evil = [e for e in members if os.path.relpath(e).startswith(('/', '..'))]
if evil:
print('ERROR: Refusing to extract {} with suspicious members {}'.format(
self.filename, evil))
sys.exit(1)
def extract_package(self, tmp_path):
"""
Extract the package contents into a directrory
@@ -247,10 +219,7 @@ class BuildrootPackage():
Keyword arguments:
tmp_path -- directory where you want the package to be extracted
"""
if six.PY2:
as_file = StringIO.StringIO(self.as_string)
else:
as_file = io.BytesIO(self.as_string)
as_file = StringIO.StringIO(self.as_string)
if self.filename[-3:] == 'zip':
with zipfile.ZipFile(as_file) as as_zipfile:
tmp_pkg = os.path.join(tmp_path, self.buildroot_name)
@@ -264,7 +233,6 @@ class BuildrootPackage():
print('Removing {pkg}...'.format(pkg=tmp_pkg))
shutil.rmtree(tmp_pkg)
os.makedirs(tmp_pkg)
self.check_archive(as_zipfile.namelist())
as_zipfile.extractall(tmp_pkg)
pkg_filename = self.filename.split(".zip")[0]
else:
@@ -280,7 +248,6 @@ class BuildrootPackage():
print('Removing {pkg}...'.format(pkg=tmp_pkg))
shutil.rmtree(tmp_pkg)
os.makedirs(tmp_pkg)
self.check_archive(as_tarfile.getnames())
as_tarfile.extractall(tmp_pkg)
pkg_filename = self.filename.split(".tar")[0]
@@ -295,26 +262,27 @@ class BuildrootPackage():
"""
current_dir = os.getcwd()
os.chdir(self.tmp_extract)
sys.path.insert(0, self.tmp_extract)
sys.path.append(self.tmp_extract)
s_file, s_path, s_desc = imp.find_module('setup', [self.tmp_extract])
setup = imp.load_module('setup', s_file, s_path, s_desc)
try:
s_file, s_path, s_desc = imp.find_module('setup', [self.tmp_extract])
imp.load_module('__main__', s_file, s_path, s_desc)
if self.metadata_name in self.setup_args:
pass
elif self.metadata_name.replace('_', '-') in self.setup_args:
self.metadata_name = self.metadata_name.replace('_', '-')
elif self.metadata_name.replace('-', '_') in self.setup_args:
self.metadata_name = self.metadata_name.replace('-', '_')
try:
self.setup_metadata = self.setup_args[self.metadata_name]
except KeyError:
# This means setup was not called
print('ERROR: Could not determine package metadata for {pkg}.\n'
.format(pkg=self.real_name))
raise
finally:
os.chdir(current_dir)
sys.path.remove(self.tmp_extract)
self.setup_metadata = self.setup_args[self.metadata_name]
except KeyError:
# This means setup was not called which most likely mean that it is
# called through the if __name__ == '__main__' directive.
# In this case, we can only pray that it is called through a
# function called main() in setup.py.
setup.main() # Will raise AttributeError if not found
self.setup_metadata = self.setup_args[self.metadata_name]
# Here we must remove the module the hard way.
# We must do this because of a very specific case: if a package calls
# setup from the __main__ but does not come with a 'main()' function,
# for some reason setup.main() will successfully call the main
# function of a previous package...
sys.modules.pop('setup', None)
del setup
os.chdir(current_dir)
sys.path.remove(self.tmp_extract)
def get_requirements(self, pkg_folder):
"""
@@ -328,7 +296,7 @@ class BuildrootPackage():
self.pkg_req = None
return set()
self.pkg_req = self.setup_metadata['install_requires']
self.pkg_req = [re.sub(r'([-.\w]+).*', r'\1', req)
self.pkg_req = [re.sub('([-.\w]+).*', r'\1', req)
for req in self.pkg_req]
# get rid of commented lines and also strip the package strings
@@ -336,8 +304,8 @@ class BuildrootPackage():
if len(item) > 0 and item[0] != '#']
req_not_found = self.pkg_req
self.pkg_req = list(map(pkg_buildroot_name, self.pkg_req))
pkg_tuples = list(zip(req_not_found, self.pkg_req))
self.pkg_req = map(pkg_buildroot_name, self.pkg_req)
pkg_tuples = zip(req_not_found, self.pkg_req)
# pkg_tuples is a list of tuples that looks like
# ('werkzeug','python-werkzeug') because I need both when checking if
# dependencies already exist or are already in the download list
@@ -370,14 +338,13 @@ class BuildrootPackage():
version=self.version)
lines.append(version_line)
if self.buildroot_name != self.real_name:
targz = self.filename.replace(
self.version,
'$({name}_VERSION)'.format(name=self.mk_name))
targz_line = '{name}_SOURCE = {filename}\n'.format(
name=self.mk_name,
filename=targz)
lines.append(targz_line)
targz = self.filename.replace(
self.version,
'$({name}_VERSION)'.format(name=self.mk_name))
targz_line = '{name}_SOURCE = {filename}\n'.format(
name=self.mk_name,
filename=targz)
lines.append(targz_line)
if self.filename not in self.url:
# Sometimes the filename is in the url, sometimes it's not
@@ -442,17 +409,17 @@ class BuildrootPackage():
"Mozilla Public License 2.0": "MPL-2.0",
"Zope Public License": "ZPL"
}
regexp = re.compile(r'^License :* *.* *:+ (.*)( \(.*\))?$')
regexp = re.compile('^License :* *.* *:+ (.*)( \(.*\))?$')
classifiers_licenses = [regexp.sub(r"\1", lic)
for lic in self.metadata['info']['classifiers']
if regexp.match(lic)]
licenses = [license_dict[x] if x in license_dict else x for x in classifiers_licenses]
licenses = map(lambda x: license_dict[x] if x in license_dict else x,
classifiers_licenses)
if not len(licenses):
print('WARNING: License has been set to "{license}". It is most'
' likely wrong, please change it if need be'.format(
license=', '.join(licenses)))
licenses = [self.metadata['info']['license']]
licenses = set(licenses)
license_line = '{name}_LICENSE = {license}\n'.format(
name=self.mk_name,
license=', '.join(licenses))
@@ -461,11 +428,8 @@ class BuildrootPackage():
for license_file in license_files:
with open(license_file) as lic_file:
match = liclookup.match(lic_file.read())
if match is not None and match.confidence >= 90.0:
if match.confidence >= 90.0:
license_names.append(match.license.id)
else:
license_names.append("FIXME: license id couldn't be detected")
license_names = set(license_names)
if len(license_names) > 0:
license_line = ('{name}_LICENSE ='
@@ -486,8 +450,8 @@ class BuildrootPackage():
"""
lines = []
filenames = ['LICENCE', 'LICENSE', 'LICENSE.MD', 'LICENSE.RST',
'LICENCE.TXT', 'LICENSE.TXT', 'COPYING', 'COPYING.TXT']
filenames = ['LICENCE', 'LICENSE', 'LICENSE.RST', 'LICENSE.TXT',
'COPYING', 'COPYING.TXT']
self.license_files = list(find_file_upper_case(filenames, self.tmp_extract))
lines.append(self.__get_license_names(self.license_files))
@@ -559,12 +523,12 @@ class BuildrootPackage():
hash_header = '# md5, sha256 from {url}\n'.format(
url=self.metadata_url)
lines.append(hash_header)
hash_line = '{method} {digest} {filename}\n'.format(
hash_line = '{method}\t{digest} {filename}\n'.format(
method='md5',
digest=self.used_url['digests']['md5'],
filename=self.filename)
lines.append(hash_line)
hash_line = '{method} {digest} {filename}\n'.format(
hash_line = '{method}\t{digest} {filename}\n'.format(
method='sha256',
digest=self.used_url['digests']['sha256'],
filename=self.filename)
@@ -580,10 +544,10 @@ class BuildrootPackage():
if not data:
break
sha256.update(data)
hash_line = '{method} {digest} {filename}\n'.format(
hash_line = '{method}\t{digest} {filename}\n'.format(
method='sha256',
digest=sha256.hexdigest(),
filename=license_file.replace(self.tmp_extract, '')[1:])
filename=os.path.basename(license_file))
lines.append(hash_line)
with open(path_to_hash, 'w') as hash_file:
@@ -603,7 +567,6 @@ class BuildrootPackage():
bool_line = '\tbool "{name}"\n'.format(name=self.buildroot_name)
lines.append(bool_line)
if self.pkg_req:
self.pkg_req.sort()
for dep in self.pkg_req:
dep_line = '\tselect BR2_PACKAGE_{req} # runtime\n'.format(
req=dep.upper().replace('-', '_'))
@@ -611,7 +574,7 @@ class BuildrootPackage():
lines.append('\thelp\n')
help_lines = textwrap.wrap(self.metadata['info']['summary'], 62,
help_lines = textwrap.wrap(self.metadata['info']['summary'],
initial_indent='\t ',
subsequent_indent='\t ')
@@ -622,7 +585,7 @@ class BuildrootPackage():
# \t + two spaces is 3 char long
help_lines.append('')
help_lines.append('\t ' + self.metadata['info']['home_page'])
help_lines = [x + '\n' for x in help_lines]
help_lines = map(lambda x: x + '\n', help_lines)
lines += help_lines
with open(path_to_config, 'w') as config_file:
@@ -663,7 +626,7 @@ def main():
print('Fetching package', package.real_name)
try:
package.fetch_package_info()
except (six.moves.urllib.error.URLError, six.moves.urllib.error.HTTPError):
except (urllib2.URLError, urllib2.HTTPError):
continue
if package.metadata_name.lower() == 'setuptools':
# setuptools imports itself, that does not work very well
@@ -673,7 +636,7 @@ def main():
try:
package.download_package()
except six.moves.urllib.error.HTTPError as error:
except urllib2.HTTPError as error:
print('Error: {code} {reason}'.format(code=error.code,
reason=error.reason))
print('Error downloading package :', package.buildroot_name)
@@ -692,12 +655,12 @@ def main():
try:
package.load_setup()
except ImportError as err:
if 'buildutils' in str(err):
if 'buildutils' in err.message:
print('This package needs buildutils')
else:
raise
continue
except (AttributeError, KeyError) as error:
except AttributeError as error:
print('Error: Could not install package {pkg}: {error}'.format(
pkg=package.real_name, error=error))
continue
@@ -721,7 +684,7 @@ def main():
continue
print('Error: Package {name} already exists'
.format(name=package.pkg_dir))
del_pkg = input(
del_pkg = raw_input(
'Do you want to delete existing package ? [y/N]')
if del_pkg.lower() == 'y':
shutil.rmtree(package.pkg_dir)
@@ -733,8 +696,6 @@ def main():
package.create_hash_file()
package.create_config_in()
print("NOTE: Remember to also make an update to the DEVELOPERS file")
print(" and include an entry for the pkg in packages/Config.in")
print()
# printing an empty line for visual confort
finally:

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python3
#!/usr/bin/env python
# Copyright (C) 2016 Thomas De Schampheleire <thomas.de.schampheleire@gmail.com>
@@ -24,15 +24,14 @@ import csv
import argparse
import sys
def read_file_size_csv(inputf, detail=None):
"""Extract package or file sizes from CSV file into size dictionary"""
sizes = {}
reader = csv.reader(inputf)
header = next(reader)
if header[0] != 'File name' or header[1] != 'Package name' or \
header[2] != 'File size' or header[3] != 'Package size':
if (header[0] != 'File name' or header[1] != 'Package name' or
header[2] != 'File size' or header[3] != 'Package size'):
print(("Input file %s does not contain the expected header. Are you "
"sure this file corresponds to the file-size-stats.csv "
"file created by 'make graph-size'?") % inputf.name)
@@ -40,13 +39,12 @@ def read_file_size_csv(inputf, detail=None):
for row in reader:
if detail:
sizes[(row[0], row[1])] = int(row[2])
sizes[row[0]] = int(row[2])
else:
sizes[(None, row[1])] = int(row[3])
sizes[row[1]] = int(row[3])
return sizes
def compare_sizes(old, new):
"""Return delta/added/removed dictionaries based on two input size
dictionaries"""
@@ -66,34 +64,19 @@ def compare_sizes(old, new):
return delta
def print_results(result, threshold):
"""Print the given result dictionary sorted by size, ignoring any entries
below or equal to threshold"""
from six import iteritems
list_result = list(iteritems(result))
# result is a dictionary: (filename, pkgname) -> (flag, size difference)
# list_result is a list of tuples: ((filename, pkgname), (flag, size difference))
# filename may be None if no detail is requested.
maxpkgname = max(len(pkgname) for filename, pkgname in result)
# result is a dictionary: name -> (flag, size difference)
# list_result is a list of tuples: (name, (flag, size difference))
for entry in sorted(list_result, key=lambda entry: entry[1][1]):
data = dict(
filename=entry[0][0],
pkgname=entry[0][1],
action=entry[1][0],
size=entry[1][1],
maxpkgname=maxpkgname,
)
if threshold is not None and abs(data['size']) <= threshold:
if threshold is not None and abs(entry[1][1]) <= threshold:
continue
if data['filename']:
print('{size:12d} {action:7s} {pkgname:{maxpkgname}s} {filename}'.format(**data))
else:
print('{size:12d} {action:7s} {pkgname}'.format(**data))
print('%12s %7s %s' % (entry[1][1], entry[1][0], entry[0]))
# main #########################################################################
@@ -140,5 +123,5 @@ print('Size difference per %s (bytes), threshold = %s' % (keyword, args.threshol
print(80*'-')
print_results(delta, args.threshold)
print(80*'-')
print_results({(None, 'TOTAL'): ('', sum(new_sizes.values()) - sum(old_sizes.values()))},
print_results({'TOTAL': ('', sum(new_sizes.values()) - sum(old_sizes.values()))},
threshold=None)

View File

@@ -2,56 +2,31 @@
set -e
TOOLCHAINS_CSV='support/config-fragments/autobuild/toolchain-configs.csv'
TEMP_CONF=""
do_clean() {
if [ ! -z "${TEMP_CONF}" ]; then
rm -f "${TEMP_CONF}"
fi
}
main() {
local o O opts
local cfg dir pkg random toolchains_csv toolchain all number mode prepare_only
local ret nb nb_skip nb_fail nb_legal nb_show nb_tc build_dir keep
local cfg dir pkg random toolchains_dir toolchain
local ret nb nb_skip nb_fail nb_legal nb_tc build_dir
local -a toolchains
local pkg_br_name
o='hakc:d:n:p:r:t:'
O='help,all,keep,prepare-only,config-snippet:,build-dir:,number:,package:,random:,toolchains-csv:'
o='hc:d:p:r:t:'
O='help,config-snippet:build-dir:package:,random:,toolchains-dir:'
opts="$(getopt -n "${my_name}" -o "${o}" -l "${O}" -- "${@}")"
eval set -- "${opts}"
random=0
all=0
keep=0
number=0
mode=0
prepare_only=0
toolchains_csv="${TOOLCHAINS_CSV}"
while [ ${#} -gt 0 ]; do
case "${1}" in
(-h|--help)
help; exit 0
;;
(-a|--all)
all=1; shift 1
;;
(-k|--keep)
keep=1; shift 1
;;
(--prepare-only)
prepare_only=1; shift 1
;;
(-c|--config-snippet)
cfg="${2}"; shift 2
;;
(-d|--build-dir)
dir="${2}"; shift 2
;;
(-n|--number)
number="${2}"; shift 2
;;
(-p|--package)
pkg="${2}"; shift 2
;;
@@ -66,15 +41,8 @@ main() {
;;
esac
done
trap do_clean INT TERM HUP EXIT
if [ -z "${cfg}" ]; then
pkg_br_name="${pkg//-/_}"
pkg_br_name="BR2_PACKAGE_${pkg_br_name^^}"
TEMP_CONF=$(mktemp /tmp/test-${pkg}-config.XXXXXX)
echo "${pkg_br_name}=y" > ${TEMP_CONF}
cfg="${TEMP_CONF}"
printf "error: no config snippet specified\n" >&2; exit 1
fi
if [ ! -e "${cfg}" ]; then
printf "error: %s: no such file\n" "${cfg}" >&2; exit 1
@@ -83,37 +51,15 @@ main() {
dir="${HOME}/br-test-pkg"
fi
if [ ${random} -gt 0 ]; then
mode=$((mode+1))
fi
if [ ${number} -gt 0 ]; then
mode=$((mode+1))
fi
if [ ${all} -eq 1 ]; then
mode=$((mode+1))
fi
# Default mode is to test the N first toolchains, which have been
# chosen to be a good selection of toolchains.
if [ ${mode} -eq 0 ] ; then
number=6
elif [ ${mode} -gt 1 ] ; then
printf "error: --all, --number and --random are mutually exclusive\n" >&2; exit 1
fi
# Extract the URLs of the toolchains; drop internal toolchains
# E.g.: http://server/path/to/name.config,arch,libc
# --> http://server/path/to/name.config
toolchains=($(sed -r -e 's/,.*//; /internal/d; /^#/d; /^$/d;' "${toolchains_csv}" \
|if [ ${random} -gt 0 ]; then \
sort -R |head -n ${random}
elif [ ${number} -gt 0 ]; then \
head -n ${number}
else
sort
fi
else
cat
fi |sort
)
)
@@ -126,26 +72,22 @@ main() {
nb_skip=0
nb_fail=0
nb_legal=0
nb_show=0
for toolchainconfig in "${toolchains[@]}"; do
: $((nb++))
toolchain="$(basename "${toolchainconfig}" .config)"
build_dir="${dir}/${toolchain}"
printf "%40s [%*d/%d]: " "${toolchain}" ${#nb_tc} ${nb} ${nb_tc}
build_one "${build_dir}" "${toolchainconfig}" "${cfg}" "${pkg}" "${prepare_only}" && ret=0 || ret=${?}
build_one "${build_dir}" "${toolchainconfig}" "${cfg}" "${pkg}" && ret=0 || ret=${?}
case ${ret} in
(0) printf "OK\n";;
(1) : $((nb_skip++)); printf "SKIPPED\n";;
(2) : $((nb_fail++)); printf "FAILED\n";;
(3) : $((nb_legal++)); printf "FAILED\n";;
(4) : $((nb_show++)); printf "FAILED\n";;
esac
done
printf "%d builds, %d skipped, %d build failed, %d legal-info failed, %d show-info failed\n" \
${nb} ${nb_skip} ${nb_fail} ${nb_legal} ${nb_show}
return $((nb_fail + nb_legal))
printf "%d builds, %d skipped, %d build failed, %d legal-info failed\n" \
${nb} ${nb_skip} ${nb_fail} ${nb_legal}
}
build_one() {
@@ -153,11 +95,10 @@ build_one() {
local toolchainconfig="${2}"
local cfg="${3}"
local pkg="${4}"
local prepare_only="${5}"
mkdir -p "${dir}"
CONFIG_= support/kconfig/merge_config.sh -O "${dir}" \
support/kconfig/merge_config.sh -O "${dir}" \
"${toolchainconfig}" "support/config-fragments/minimal.config" "${cfg}" \
>> "${dir}/logfile" 2>&1
# We want all the options from the snippet to be present as-is (set
@@ -168,20 +109,11 @@ build_one() {
# done in the same locale.
comm -23 <(sort "${cfg}") <(sort "${dir}/.config") >"${dir}/missing.config"
if [ -s "${dir}/missing.config" ]; then
if [ ${keep} -ne 1 ]; then
# Invalid configuration, drop it
rm -f "${dir}/.config"
fi
return 1
fi
# Remove file, it's empty anyway.
rm -f "${dir}/missing.config"
# Defer building the job to the caller (e.g. a gitlab pipeline)
if [ ${prepare_only} -eq 1 ]; then
return 0
fi
if [ -n "${pkg}" ]; then
if ! make O="${dir}" "${pkg}-dirclean" >> "${dir}/logfile" 2>&1; then
return 2
@@ -189,7 +121,7 @@ build_one() {
fi
# shellcheck disable=SC2086
if ! BR_FORCE_CHECK_DEPENDENCIES=YES make O="${dir}" ${pkg} >> "${dir}/logfile" 2>&1; then
if ! make O="${dir}" ${pkg} >> "${dir}/logfile" 2>&1; then
return 2
fi
@@ -198,26 +130,6 @@ build_one() {
if ! make O="${dir}" legal-info >> "${dir}/logfile" 2>&1; then
return 3
fi
# Validate that we generate proper json as show-info
{ tput smso; printf '>>> Running show-info\n'; tput rmso; } >> "${dir}/logfile" 2> /dev/null;
JQ="$(which jq)"
if [ -z "${JQ}" ]; then
make O="${dir}" host-jq >> "${dir}/logfile" 2>&1
JQ="${dir}/host/bin/jq"
fi
if ! make O="${dir}" "${pkg:+${pkg}-}show-info" > "${dir}/info.json" 2>> "${dir}/logfile"; then
return 4
fi
if ! "${JQ}" . < "${dir}/info.json" >> "${dir}/logfile" 2>&1; then
return 4
fi
# If we get here, the build was successful. Clean up the build/host
# directories to save disk space, unless 'keep' was set.
if [ ${keep} -ne 1 ]; then
make O="${dir}" clean >> "${dir}/logfile" 2>&1
fi
}
help() {
@@ -242,10 +154,6 @@ toolchain config fragment and the required host architecture, separated by a
comma. The config fragments should contain only the toolchain and architecture
settings.
By default, a useful subset of toolchains is tested. If needed, all
toolchains can be tested (-a), an arbitrary number of toolchains (-n
in order, -r for random).
Options:
-h, --help
@@ -257,37 +165,20 @@ Options:
-d DIR, --build-dir DIR
Do the builds in directory DIR, one sub-dir per toolchain.
If not specified, defaults to \${HOME}/br-test-pkg
-p PKG, --package PKG
Test-build the package PKG, by running 'make PKG'; if not specified,
just runs 'make'.
-a, --all
Test all toolchains, instead of the default subset defined by
Buildroot developers.
-n N, --number N
Test N toolchains, in the order defined in the toolchain CSV
file.
-r N, --random N
Limit the tests to the N randomly selected toolchains.
Limit the tests to the N randomly selected toolchains, instead of
building with all toolchains.
-t CSVFILE, --toolchains-csv CSVFILE
CSV file containing the paths to config fragments of toolchains to
try. If not specified, the toolchains in ${TOOLCHAINS_CSV} will be
used.
-k, --keep
Keep the build directories even if the build succeeds.
Note: the logfile and configuration is always retained, even without
this option.
--prepare-only
Only prepare the .config files, but do not build them. Output the
list of build directories to stdout, and the status on stderr.
Example:
Testing libcec would require a config snippet that contains: