from pygnulib.config import BaseConfig
from pygnulib.config import CachedConfig
+from pygnulib.config import LGPL_LICENSES
from pygnulib.generator import gnulib_cache
from pygnulib.generator import gnulib_comp
from pygnulib.parser import CommandLine as CommandLineParser
-from pygnulib.tools import Executable
+from pygnulib.misc import Executable
from pygnulib.vfs import BaseVFS
from pygnulib.vfs import GnulibGitVFS
class GnulibExecutable(Executable):
- def __init__(self, name, encoding=None, shell_name=None, shell_path=None):
- path = None
- if shell_name is None:
- shell_name = shell_name.upper()
- if shell_path is None:
- shell_path = "{}PATH".format(shell_name)
- environ = dict(ENVIRON)
- environ.update(os.environ)
- if shell_name in environ:
- path = shell_name
- elif shell_path in environ:
- path = "{}{}".format(shell_path)
- super().__init__(name, path)
-
-
-
-AC_VERSION_PATTERN = re.compile(r"AC_PREREQ\(\[(.*?)\]\)", re.S | re.M)
-IGNORED_LICENSES = {
- "GPLed build tool",
- "public domain",
- "unlimited",
- "unmodifiable license text",
-}
+ def __init__(self, name, var, prefix, environ=os.environ, encoding=None):
+ if not isinstance(var, str):
+ raise TypeError("var: str expected")
+ if not isinstance(prefix, str):
+ raise TypeError("prefix: str expected")
+ var = environ.get(var, None)
+ prefix = environ.get(prefix, None)
+ path = os.path.normpath(name)
+ if not var and prefix:
+ path = f"{prefix}{path}"
+ elif var and not prefix:
+ path = os.path.normpath(var)
+ super().__init__(path=path, encoding=encoding)
+
+
+
+AUTOCONF = GnulibExecutable("autoconf", "AUTOCONF", "AUTOCONFPATH")
+AUTOHEADER = GnulibExecutable("autoheader", "AUTOHEADER", "AUTOCONFPATH")
+ACLOCAL = GnulibExecutable("aclocal", "ACLOCAL", "AUTOMAKEPATH")
+AUTOMAKE = GnulibExecutable("automake", "AUTOMAKE", "AUTOMAKEPATH")
+AUTORECONF = GnulibExecutable("autoreconf", "AUTORECONF", "AUTOCONFPATH")
+LIBTOOLIZE = GnulibExecutable("libtoolize", "LIBTOOLIZE", "LIBTOOLPATH")
+RSYNC = GnulibExecutable("rsync", "RSYNC", "RSYNCPATH")
+WGET = GnulibExecutable("wget", "WGET", "WGETPATH")
+PATCH = GnulibExecutable("patch", "PATCH", "PATCHPATH")
+
+
+
TRANSFER_MODES = {
None: vfs_copy,
"hardlink": vfs_hardlink,
"po": "po_base",
"top": "",
}
+EXECUTABLES = {
+ "AUTOCONF": "AUTOCONFPATH",
+ "AUTOHEADER": "AUTOCONFPATH",
+ "ACLOCAL": "AUTOMAKEPATH",
+ "AUTOMAKE": "AUTOMAKEPATH",
+ "AUTORECONF": "AUTOCONFPATH",
+ "LIBTOOLIZE": "LIBTOOLPATH",
+}
+for (_var, _prefix) in tuple(EXECUTABLES.items()):
+ _name = _var.lower()
+ del EXECUTABLES[_var]
+ EXECUTABLES[_name] = GnulibExecutable(_name, _var, _prefix)
TP_URL = "http://translationproject.org/latest/"
TP_RSYNC_URI = "translationproject.org::tp/latest/"
# Determine license incompatibilities, if any.
incompatibilities = set()
- if set(config.licenses) & {"LGPLv2", "LGPLv2+", "LGPLv3", "LGPLv3+"}:
- acceptable = IGNORED_LICENSES | config.licenses
+ if config.licenses & LGPL_LICENSES:
+ acceptable = {
+ "GPLed build tool",
+ "public domain",
+ "unlimited",
+ "unmodifiable license text",
+ }
+ acceptable |= set(config.licenses)
for (name, licenses) in ((module.name, module.licenses) for module in main):
if not (acceptable & licenses):
incompatibilities.add((name, licenses))
for dst in sorted(files):
match = tuple(override for override in overrides if dst in override)
override = match[0] if match else gnulib
- (vfs, src) = vfs_lookup(dst, gnulib, override, patch="patch")
+ (vfs, src) = vfs_lookup(dst, gnulib, override, patch=PATCH)
action = update_file if vfs_exists(project, dst) else add_file
action(bool(match), vfs, src, project, dst, present)
"path": path,
"config": config,
"explicit": explicit,
- "modules": database.main_modules,
+ "database": database,
"mkedits": mkedits,
"testing": False,
}
- for line in lib_makefile(**arguments):
+ for line in lib_makefile(**arguments, autoconf=AUTOCONF):
print(line, file=tmp)
(src, dst) = (tmp.name, path)
present = vfs_exists(project, dst)
path = os.path.join("build-aux", "po", file)
match = tuple(override for override in overrides if file in override)
override = match[0] if match else gnulib
- (vfs, src) = vfs_lookup(path, gnulib, override, patch="patch")
+ (vfs, src) = vfs_lookup(path, gnulib, override, patch=PATCH)
dst = os.path.join("po", file)
present = vfs_exists(project, dst)
if present:
LGPLv3_LICENSE = frozenset({"LGPLv2+", "LGPLv3", "LGPLv3+", "LGPL"})
GPLv2_LICENSE = frozenset({"GPLv2", "GPLv2+"})
GPLv3_LICENSE = frozenset({"GPLv2+", "GPLv3", "GPLv3+", "GPL"})
-LGPL_LICENSE = frozenset(LGPLv3_LICENSE)
-GPL_LICENSE = frozenset(GPLv3_LICENSE)
+LGPL_LICENSES = frozenset(LGPLv3_LICENSE)
+GPL_LICENSES = frozenset(GPLv3_LICENSE)
OTHER_LICENSES = frozenset({
"GPLed build tool",
"public domain",
licenses = _StringListProperty(
sorted=True,
unique=True,
- fget=lambda self: self.__get_option("licenses"),
+ fget=lambda self: set(self.__get_option("licenses")),
fset=lambda self, name: self.__set_option("licenses", name),
doc="acceptable licenses for modules",
)
self[key] = {
"2": LGPLv2_LICENSE,
"3": LGPLv3_LICENSE,
- "yes": LGPL_LICENSE,
+ "yes": LGPL_LICENSES,
"3orGPLv2": (GPLv2_LICENSE | LGPLv3_LICENSE),
}[match[-1]]
elif typeid is bool:
from .config import BaseConfig as _BaseConfig
-from .module import BaseModule as _BaseModule
-from .module import Database as _Database
from .config import LGPLv2_LICENSE as _LGPLv2_LICENSE
from .config import LGPLv3_LICENSE as _LGPLv3_LICENSE
from .config import GPLv2_LICENSE as _GPLv2_LICENSE
-from .config import LGPL_LICENSE as _LGPL_LICENSE
-
+from .config import LGPL_LICENSES as _LGPL_LICENSES
+from .misc import Executable as _Executable
+from .module import BaseModule as _BaseModule
+from .module import Database as _Database
_LGPL = {
_LGPLv2_LICENSE: "2",
_LGPLv3_LICENSE: "3",
- _LGPL_LICENSE: "yes",
+ _LGPL_LICENSES: "yes",
(_GPLv2_LICENSE | _LGPLv3_LICENSE): "3orGPLv2",
}
__DISCLAIMER = (
for (dependency, condition) in sorted(database.dependencies(demander)):
if database.conditional(dependency):
shellfunc = dependency.shell_function(macro_prefix)
- if condition is not None:
+ if condition:
yield f" if {condition}; then"
yield f" {shellfunc}"
yield f" fi"
if dependency in modules and database.conditional(dependency):
condname = dependency.conditional_name(macro_prefix)
shellfunc = dependency.shell_function(macro_prefix)
- if condition is not None:
+ if condition:
yield f" if {condition}; then"
yield f" {shellfunc}"
yield f" fi"
__MAKEFILE_SUBDIRS = _re.compile(r"lib/.*/.*\.c", _re.S)
__MAKEFILE_LDFLAGS = _re.compile(r"^lib_LDFLAGS\s*\+\=.*?$", _re.S)
__MAKEFILE_LIBNAME = _re.compile(r"lib_([A-Z][A-Z]*)", _re.S)
-__MAKEFILE_GNUMAKE = _re.compile(r"^if (.*?)$", _re.S)
+__MAKEFILE_GNUMAKE = _re.compile(r"^if\s(.*?)$", _re.S | _re.M)
-def _lib_makefile_callback(conditionals, gnumake):
+def _lib_makefile_callback(database, macro_prefix, conditionals, gnumake):
- def _automake_conditional(module, conditional, unconditional, macro_prefix):
+ def _automake_conditional(module, conditional, unconditional):
yield ""
- yield "if {}".format(module.conditional_name(macro_prefix))
- yield conditional
- yield "endif"
+ if database.conditional(module):
+ yield "if {}".format(module.conditional_name(macro_prefix))
+ yield conditional
+ yield "endif"
+ else:
+ yield conditional
yield unconditional
- def _automake_unconditional(module, conditional, unconditional, macro_prefix):
+ def _automake_unconditional(module, conditional, unconditional):
yield ""
yield conditional
yield unconditional
- def _gnumake_conditional(module, conditional, unconditional, macro_prefix):
+ def _gnumake_conditional(module, conditional, unconditional):
yield "ifeq (,$(OMIT_GNULIB_MODULE_{}))".format(module.name)
yield ""
- yield "ifneq (,$({}))".format(module.conditional_name(macro_prefix))
- yield __MAKEFILE_GNUMAKE.sub("ifneq (,$(\\1))", conditional)
- yield "endif"
+ if database.conditional(module):
+ yield "ifneq (,$({}))".format(module.conditional_name(macro_prefix))
+ yield __MAKEFILE_GNUMAKE.sub("ifneq (,$(\\1))", conditional)
+ yield "endif"
+ else:
+ yield __MAKEFILE_GNUMAKE.sub("ifneq (,$(\\1))", conditional)
yield "endif"
yield __MAKEFILE_GNUMAKE.sub("ifneq (,$(\\1))", unconditional)
- def _gnumake_unconditional(module, conditional, unconditional, macro_prefix):
+ def _gnumake_unconditional(module, conditional, unconditional):
yield ""
yield __MAKEFILE_GNUMAKE.sub("ifneq (,$(\\1))", conditional)
yield __MAKEFILE_GNUMAKE.sub("ifneq (,$(\\1))", unconditional)
return callbacks[conditionals][gnumake]
-def lib_makefile(path, config, explicit, modules, mkedits, testing, **override):
+def lib_makefile(path, config, explicit, database, mkedits, testing, autoconf, **override):
"""Generate library Makefile.am file."""
+ if not isinstance(autoconf, _Executable):
+ raise TypeError("autoconf: executable expected")
+
date = _datetime.now()
libname = config.libname
po_domain = config.po_domain
yield f"# Reproduce by: {actioncmd}"
yield ""
- callback = _lib_makefile_callback(config.conditionals, config.gnumake)
+ callback = _lib_makefile_callback(database, macro_prefix, config.conditionals, config.gnumake)
def _snippet():
lines = []
subdirs = False
- for module in modules:
+ for module in database.main_modules:
if module.test:
continue
conditional = module.conditional_automake_snippet
if module.name == "alloca":
lines.append(f"{libname}_{libext}_LIBADD += @{perhaps_LT}ALLOCA@")
lines.append(f"{libname}_{libext}_DEPENDENCIES += @{perhaps_LT}ALLOCA@")
- lines += list(callback(module, conditional, unconditional, config.macro_prefix))
+ lines += list(callback(module, conditional, unconditional))
lines.append(f"## end gnulib module {module.name}")
lines.append("")
subdirs |= any(__MAKEFILE_SUBDIRS.match(file) for file in module.files)
if config.gnumake:
yield "# Start of GNU Make output."
- autoconf = "autoconf"
- cmdargs = (autoconf, "-t", "AC_SUBST:$1 = @$1@", config.ac_file)
- with _sp.Popen(cmdargs, stdout=_sp.PIPE, stderr=_sp.PIPE) as process:
+ with autoconf("-t", "AC_SUBST:$1 = @$1@", config.ac_file) as process:
(stdout, stderr) = process.communicate()
stdout = stdout.decode("UTF-8")
stderr = stderr.decode("UTF-8")
if process.returncode == 0:
- for line in sorted(stdout.splitlines()):
+ for line in sorted(set(stdout.splitlines())):
yield line
else:
yield "== gnulib-tool GNU Make output failed as follows =="
if index != -1:
directive = directive[:index].strip(" ")
yield directive
- for directive in sorted(set(_directives(modules))):
+ for directive in sorted(set(_directives(database.main_modules))):
yield f"{libname}_{libext}_LDFLAGS += {directive}"
yield ""
import os as _os
+import subprocess as _sp
from collections import OrderedDict as _OrderedDict
item = _os.path.normpath(item)
res.append(item)
return super().__set__(obj, res)
+
+
+
+class _PipeMeta(type):
+ __INSTANCE = None
+ def __call__(cls, *args, **kwargs):
+ if _PipeMeta.__INSTANCE is None:
+ _PipeMeta.__INSTANCE = super(_PipeMeta, cls).__call__(*args, **kwargs)
+ return _PipeMeta.__INSTANCE
+
+
+class Pipe(metaclass=_PipeMeta):
+ """pipe handle singleton"""
+ def __repr__(self):
+ module = self.__class__.__module__
+ name = self.__class__.__name__
+ return f"{module}.{name}"
+
+
+
+class Executable:
+ """command-line program or script"""
+ def __init__(self, path=None, encoding=None):
+ if not isinstance(path, str):
+ raise TypeError("path: str expected")
+ if not path:
+ raise ValueError("path: empty string")
+ if encoding is not None and not isinstance(encoding, str):
+ raise TypeError("encoding: str or None expected")
+ self.__path = path
+ self.__encoding = encoding
+
+
+ path = Property(
+ fget=lambda self: self.__path,
+ doc="executable path",
+ )
+ encoding = Property(
+ fget=lambda self: self.__encoding,
+ doc="stdin/stdout/stderr encoding",
+ )
+
+
+ def __repr__(self):
+ module = self.__class__.__module__
+ name = self.__class__.__name__
+ path = self.__path
+ encoding = self.__encoding
+ return f"{module}.{name}(path={path}, encoding={encoding})"
+
+
+ def __call__(self, *args, **kwargs):
+ """
+ Invoke command-line tool with the given arguments.
+ Upon execution subprocess.Popen instance is returned.
+ """
+ args = ([self.path] + list(args))
+ for key in ("stdin", "stdout", "stderr"):
+ if isinstance(kwargs.get(key, Pipe()), Pipe):
+ kwargs[key] = _sp.PIPE
+ kwargs.setdefault("encoding", self.__encoding)
+ return _sp.Popen(args, **kwargs)
yield file
yield "Depends-on:"
for (module, condition) in self.dependencies:
- yield "{} {}".format(module, condition)
+ if condition:
+ yield f"{module} {condition}"
+ else:
+ yield f"{module}"
yield "configure.ac-early:"
yield self.early_autoconf_snippet
yield "configure.ac:"
raise TypeError("module: str expected")
if condition is not None and not isinstance(condition, str):
raise TypeError("condition: str or None expected")
+ condition = "" if condition is None else condition
result.append((module, condition))
self.__options["dependencies"] = tuple(result)
result += ("EXTRA_lib_SOURCES += {}".format(" ".join(sorted(extra_files))) + "\n")
# Synthesize an EXTRA_DIST augmentation also for the files in build-aux/.
- prefix = _os.path.join("$(top_srcdir)", "{auxdir}")
+ prefix = "$(top_srcdir)/{auxdir}"
buildaux_files = (file for file in all_files if file.startswith("build-aux/"))
buildaux_files = tuple(_os.path.join(prefix, file[len("build-aux/"):]) for file in buildaux_files)
if buildaux_files:
def shell_variable(self, macro_prefix="gl"):
"""Get the name of the shell variable set to true once m4 macros have been executed."""
- module = self.name
- if any(filter(lambda rune: not (rune.isalnum() or rune == "_"), module)):
- module = (module + "\n").encode("UTF-8")
- module = _hashlib.md5(module).hexdigest()
- return "{}_gnulib_enabled_{}".format(macro_prefix, module)
+ name = self.name
+ if any(filter(lambda rune: not (rune.isalnum() or rune == "_"), name)):
+ name = (name + "\n").encode("UTF-8")
+ name = _hashlib.md5(name).hexdigest()
+ return f"{macro_prefix}_gnulib_enabled_{name}"
def shell_function(self, macro_prefix="gl"):
"""Get the name of the shell function containing the m4 macros."""
- module = self.name
- if any(filter(lambda rune: not (rune.isalnum() or rune == "_"), module)):
- module = (module + "\n").encode("UTF-8")
- module = _hashlib.md5(module).hexdigest()
- return "func_{}_gnulib_m4code_{}".format(macro_prefix, module)
+ name = self.name
+ if any(filter(lambda rune: not (rune.isalnum() or rune == "_"), name)):
+ name = (name + "\n").encode("UTF-8")
+ name = _hashlib.md5(name).hexdigest()
+ return f"func_{macro_prefix}_gnulib_m4code_{name}"
def conditional_name(self, macro_prefix="gl"):
"""Get the automake conditional name."""
- module = self.name
- if any(filter(lambda rune: not (rune.isalnum() or rune == "_"), module)):
- module = (module + "\n").encode("UTF-8")
- module = _hashlib.md5(module).hexdigest()
- return "{}_GNULIB_ENABLED_{}".format(macro_prefix, module)
+ name = self.name
+ if any(filter(lambda rune: not (rune.isalnum() or rune == "_"), name)):
+ name = (name + "\n").encode("UTF-8")
+ name = _hashlib.md5(name).hexdigest()
+ return f"{macro_prefix}_GNULIB_ENABLED_{name}"
def items(self):
__slots__ = ("__path")
- __DEPENDENCY = _re.compile(r"(\S+)(?:\s+(\[.*?\]))?$", _re.M)
+ __DEPENDENCY = _re.compile(r"(\S+)(?:\s+\[(.*?)\])?$", _re.M)
__STRING = lambda text: text.strip()
__MULTILINE = lambda text: tuple(filter(
lambda line: line.strip() and not line.strip().startswith("#"),
super().__init__(name="dummy")
+ def __repr__(self):
+ return "pygnulib.module.DummyModule"
+
+
class _GnulibModuleMeta(type):
def __new__(mcs, name, parents, attributes):
def __repr__(self):
+ return f"{self.name}"
module = self.__class__.__module__
name = self.__class__.__name__
return f"{module}.{name}{{{self.name}}}"
class TransitiveClosure:
"""transitive closure table"""
- def __init__(self, lookup, modules, mask, gnumake, tests=False):
+ __slots__ = ("__lookup", "__dependencies", "__demanders", "__paths", "__conditionals")
+
+
+ __AUTOMAKE_CONDITION = _re.compile("^if\\s+", _re.S | _re.M)
+
+
+ def __init__(self, lookup, modules, mask, gnumake, conditionals, tests=False, error=True):
if not callable(lookup):
raise TypeError("lookup: callable expected")
- demanders = _collections.defaultdict(dict)
- dependencies = _collections.defaultdict(dict)
-
- def _exclude(module):
- return mask != module.mask
+ table = {None: None}
def _lookup(module):
- if not (module is None or isinstance(module, BaseModule)):
- if isinstance(module, str):
- if lookup is None:
- raise TypeError("cannot instantiate {} module".format(module))
- module = lookup(module)
- if not isinstance(module, BaseModule):
- raise TypeError("module: pygnulib.module.BaseModule expected")
- return module
-
- def _update(demander, dependency, condition):
- demanders[demander][dependency] = condition
- dependencies[dependency][demander] = condition
- current.add(dependency)
+ return table.setdefault(module, lookup(module))
- testdb = {}
- mapping = {}
current = set()
previous = set()
+ demanders = _collections.defaultdict(dict)
+ dependencies = _collections.defaultdict(dict)
+ def _update(demander, dependency, condition):
+ table[dependency.name] = dependency
+ if dependency.mask == mask:
+ # A module whose Makefile.am snippet contains a reference to an
+ # automake conditional. If we were to use it conditionally, we
+ # would get an error
+ # configure: error: conditional "..." was never defined.
+ # because automake 1.11.1 does not handle nested conditionals
+ # correctly. As a workaround, make the module unconditional.
+ snippet = dependency.automake_snippet
+ pattern = TransitiveClosure.__AUTOMAKE_CONDITION
+ if condition and pattern.findall(snippet):
+ condition = None
+ demander = None
+ if not condition:
+ condition = None
+ if demander is not None:
+ demander = demander.name
+ dependency = dependency.name
+ demanders[demander][dependency] = condition
+ dependencies[dependency][demander] = condition
+ current.add(dependency)
+
for module in modules:
dependency = lookup(module)
_update(None, dependency, None)
- while current != previous:
+ while True:
+ modules = current.difference(previous)
+ if not modules:
+ break
previous.update(current)
- for demander in previous:
- if tests and not demander.test and testdb.get(demander.name, None) is None:
- try:
- name = (demander.name + "-tests")
- path = (demander.path + "-tests")
- dependency = GnulibModule(name=name, path=path)
- if not _exclude(dependency):
- _update(demander, dependency, None)
- testdb[demander.name] = True
- else:
- testdb[demander.name] = False
- except _UnknownModuleError:
- testdb[demander.name] = False
+ for demander in modules:
+ demander = _lookup(demander)
+ if tests and not demander.test:
+ dependency = _lookup(demander.name + "-tests")
+ if dependency is not None:
+ _update(None, dependency, bool(dependencies[demander]))
for (dependency, condition) in demander.dependencies:
- dependency = lookup(dependency)
- if gnumake and condition and condition.startswith("if "):
- # A module whose Makefile.am snippet contains a reference to an
- # automake conditional. If we were to use it conditionally, we
- # would get an error
- # configure: error: conditional "..." was never defined.
- # because automake 1.11.1 does not handle nested conditionals
- # correctly. As a workaround, make the module unconditional.
- _update(None, dependency, None)
- elif not _exclude(dependency):
- _update(demander, dependency, condition)
+ dependency = _lookup(dependency)
+ _update(demander, dependency, condition)
self.__lookup = _lookup
- self.__demanders = demanders
- self.__dependencies = dependencies
-
- conditional = set()
- unconditional = set()
- for (dependency, demanders) in self.__dependencies.items():
- for demander in demanders:
- if demander is None:
- unconditional.add(dependency)
- break
-
- previous = set()
- current = set(unconditional)
- while previous != current:
- previous.update(current)
- for demander in previous:
- dependencies = self.__demanders.get(demander, {})
- for (dependency, condition) in dependencies.items():
- if condition is not None:
- conditional.add(dependency)
- elif demander in conditional:
- conditional.add(dependency)
- else:
- conditional.discard(dependency)
- unconditional.add(dependency)
- current.add(demander)
- self.__conditional = conditional
+ self.__paths = dict()
+ self.__conditionals = dict()
+ self.__demanders = dict(demanders)
+ self.__dependencies = dict(dependencies)
def __iter__(self):
for dependency in self.__dependencies:
- yield dependency
+ yield self.__lookup(dependency)
+
+
+ def paths(self, module):
+ graph = self.__dependencies
+ module = self.__lookup(module).name
+ if module in self.__paths:
+ return self.__paths[module]
+ def _paths():
+ path = [module]
+ seen = {module}
+ def search():
+ dead_end = True
+ for neighbour in graph.get(path[-1], []):
+ if neighbour not in seen:
+ dead_end = False
+ seen.add(neighbour)
+ path.append(neighbour)
+ yield from search()
+ path.pop()
+ seen.remove(neighbour)
+ if dead_end:
+ yield tuple(path)
+ yield from search()
+ result = self.__paths[module] = tuple(path[:-1] for path in _paths())
+ return result
+
+
+ def conditional(self, module):
+ """
+ Test whether module is a conditional dependency.
+ Note that this check also takes all parent modules into account.
+ """
+ table = self.__dependencies
+ module = self.__lookup(module).name
+ def _conditional():
+ if module in self.__demanders[None]:
+ return False
+ if module in self.__conditionals:
+ return self.__conditionals[module]
+ conditions = set()
+ paths = self.paths(module)
+ for path in paths:
+ conditions.add(any({bool(table[dep][dem]) for (dep, dem) in zip(path, path[1:])}))
+ return all(conditions)
+ return self.__conditionals.setdefault(module, _conditional())
+
+
+ def unconditional(self, module):
+ """
+ Test whether module is an unconditional dependency.
+ Note that this check also takes all parent modules into account.
+ """
+ return not self.conditional(module)
def dump(self, indent=" "):
unconditional = set()
storage = _collections.defaultdict(dict)
yield "{{".format()
- for (key, value) in self.__dependencies.items():
- for (subkey, subvalue) in value.items():
- dependency = key.name
- demander = subkey.name if subkey else ""
- condition = subvalue if subvalue else ""
+ for (dependency, entries) in self.__dependencies.items():
+ for (demander, condition) in entries.items():
+ if condition is None:
+ condition = ""
if not demander and not condition:
unconditional.add(dependency)
condition = condition.replace("\"", "\\\"")
value = collection[key]
for (subkey, subvalue) in value.items():
(dependency, demander, condition) = (key, subkey, subvalue)
- dependency = self.__lookup(dependency)
- demander = self.__lookup(demander)
if not condition:
condition = None
demanders[demander][dependency] = condition
self.__dependencies = dict(dependencies)
- def conditional(self, module):
- """
- Test whether module is a conditional dependency.
- Note that this check also takes all parent modules into account.
- Any module with an unconditional demander is also unconditional.
- """
- module = self.__lookup(module)
- if module not in self.__dependencies:
- fmt = "dependency {} not found"
- raise KeyError(fmt.format(module))
- return module in self.__conditional
-
-
- def unconditional(self, module):
- """
- Test whether module is an unconditional dependency.
- Note that this check also takes all parent modules into account.
- Any module with an unconditional demander is also unconditional.
- """
- return not self.conditional(module)
-
-
def demanders(self, module):
"""For each demander which requires the module yield the demander and the corresponding condition."""
- module = self.__lookup(module)
+ module = self.__lookup(module).name
if module in self.__dependencies:
for (demander, condition) in self.__dependencies.get(module, {}).items():
- yield (demander, condition)
+ yield (self.__lookup(demander), condition)
def dependencies(self, module):
"""For each dependency of the module yield this dependency and the corresponding condition."""
- module = self.__lookup(module)
+ module = self.__lookup(module).name
if module in self.__demanders:
for (dependency, condition) in self.__demanders.get(module, {}).items():
- yield (dependency, condition)
+ yield (self.__lookup(dependency), condition)
# Perform a transitive closure for modules from the configuration.
# The result of this transitive closure is a set of main modules.
- explicit_modules = {lookup(module) for module in config.modules}
- base_closure = TransitiveClosure(lookup, explicit_modules, mask, gnumake)
- full_closure = TransitiveClosure(lookup, set(base_closure), mask, gnumake, True)
+ conditionals = config.conditionals
+ modules = explicit_modules = {lookup(module) for module in config.modules}
+ base_closure = TransitiveClosure(lookup, modules, mask, gnumake, conditionals)
+ modules = map(lambda module: lookup(module.name + "-tests"), set(base_closure))
+ modules = set(filter(lambda module: module is not None, modules))
+ full_closure = TransitiveClosure(lookup, (explicit_modules | modules), mask, gnumake, conditionals, True)
# Once the full transitive closure is completed, populate the database.
main_modules = set(base_closure)
def __iter__(self):
- def _iter():
- for dependency in self.__closure:
- for (demander, condition) in self.__closure.demanders(dependency):
- yield (dependency, demander, condition)
- return iter(sorted(_iter()))
+ return iter(self.__closure)
+
+
+ def paths(self, module):
+ return self.__closure.paths(module)
def conditional(self, module):
from .config import LGPLv2_LICENSE as _LGPLv2_LICENSE
from .config import LGPLv3_LICENSE as _LGPLv3_LICENSE
from .config import GPLv2_LICENSE as _GPLv2_LICENSE
-from .config import LGPL_LICENSE as _LGPL_LICENSE
+from .config import LGPL_LICENSES as _LGPL_LICENSES
value = {
"2": _LGPLv2_LICENSE,
"3": _LGPLv3_LICENSE,
- "yes": _LGPL_LICENSE,
+ "yes": _LGPL_LICENSES,
"3orGPLv2": (_GPLv2_LICENSE | _LGPLv3_LICENSE),
}[value]
super().__call__(parser, namespace, value, option)
+++ /dev/null
-#!/usr/bin/python
-# encoding: UTF-8
-"""gnulib command-line tools"""
-
-
-
-import subprocess as _sp
-
-
-
-class _PipeMeta(type):
- __INSTANCE = None
- def __call__(cls, *args, **kwargs):
- if _PipeMeta.__INSTANCE is None:
- _PipeMeta.__INSTANCE = super(_PipeMeta, cls).__call__(*args, **kwargs)
- return _PipeMeta.__INSTANCE
-
-
-class Pipe(metaclass=_PipeMeta):
- """pipe handle singleton"""
- pass
-
-
-
-class Executable:
- """command-line program or script"""
- def __init__(self, name, path=None, encoding=None):
- self.__name = name
- self.__path = path
- self.__encoding = encoding
-
-
- @property
- def name(self):
- """executable name"""
- return self.__name
-
-
- @property
- def path(self):
- """executable path"""
- return self.__path if self.__path else self.name
-
-
- def __call__(self, *args, **kwargs):
- """
- Invoke command-line tool with the given arguments.
- Upon execution subprocess.Popen instance is returned.
- """
- args = ([self.path] + list(args))
- for key in ("stdin", "stdout", "stderr"):
- if isinstance(kwargs.get(key, Pipe()), Pipe):
- kwargs[key] = _sp.PIPE
- kwargs.setdefault("encoding", self.__encoding)
- return _sp.Popen(args, **kwargs)
import filecmp as _filecmp
import os as _os
import shutil as _shutil
+import sys as _sys
import tempfile as _tempfile
import subprocess as _sp
from .error import UnknownModuleError as _UnknownModuleError
from .module import DummyModule as _DummyModule
from .module import GnulibModule as _GnulibModule
+from .misc import Executable as _Executable
-def lookup(name, primary, secondary, patch="patch"):
+def lookup(name, primary, secondary, patch):
"""
Try to look up a regular file inside virtual file systems or combine it via patch utility.
The name argument is a relative file name which is going to be looked up.
NOTE: It is up to the caller to unlink files obtained after dynamic patching.
"""
+ if not isinstance(name, str):
+ raise TypeError("name: str expected")
+ if not isinstance(primary, BaseVFS):
+ raise TypeError("primary: VFS expected")
+ if not isinstance(secondary, BaseVFS):
+ raise TypeError("secondary: VFS expected")
+ if not isinstance(patch, _Executable):
+ raise TypeError("patch: executable expected")
+
if name in secondary:
return (secondary, name)
- diff = "{}.diff".format(name)
+ diff = f"{name}.diff"
if diff not in secondary:
return (primary, name)
tmp = _tempfile.NamedTemporaryFile(mode="w+b", delete=False)
tmp.close()
stdin = _codecs.open(secondary[diff], "rb")
cmd = (patch, "-s", tmp.name)
+ raise 0
pipes = _sp.Popen(cmd, stdin=stdin, stdout=_sp.PIPE, stderr=_sp.PIPE)
(stdout, stderr) = pipes.communicate()
stdout = stdout.decode("UTF-8")
class GnulibGitVFS(BaseVFS):
"""gnulib git repository"""
- __slots__ = ("__cache", "__modules")
+ __slots__ = ("__cache", "__prefix")
_EXCLUDE = {
def __init__(self, origin):
super().__init__(origin=origin)
self.__cache = {"dummy": _DummyModule()}
- self.__modules = _os.path.join(self.root, "modules")
+ self.__prefix = _sys.intern(_os.path.join(self.root, "modules"))
if not _os.path.exists(self.root):
raise FileNotFoundError(self.root)
if not _os.path.isdir(self.root):
def module(self, name):
- """instantiate a module"""
+ """Try to find the module by name."""
if name in self.__cache:
return self.__cache[name]
- path = _os.path.join(self.__modules, name)
+ path = _os.path.join(self.__prefix, name)
try:
- self.__cache[name] = _GnulibModule(path=path, name=name)
- return self.__cache[name]
- except FileNotFoundError:
- raise _UnknownModuleError(name)
+ result = self.__cache[name] = _GnulibModule(path=path, name=name)
+ return result
+ except _UnknownModuleError:
+ return None
def modules(self):
- """iterate over all available modules"""
- for root, _, files in _os.walk(self.__modules):
+ """Iterate over all available modules."""
+ for root, _, files in _os.walk(self.__prefix):
names = []
for name in files:
exclude = False
names += [name]
for name in names:
path = _os.path.join(root, name)
- name = path[len(self.__modules) + 1:]
+ name = path[len(self.__prefix) + 1:]
yield self.module(name)