from pygnulib.error import CommandLineError
from pygnulib.error import UnknownModuleError
-from pygnulib.config import Base as BaseConfig
-from pygnulib.config import Cache as CacheConfig
+from pygnulib.config import BaseConfig
+from pygnulib.config import CachedConfig
-from pygnulib.generator import CommandLine as CommandLineGenerator
-from pygnulib.generator import GnulibCache as GnulibCacheGenerator
-from pygnulib.generator import LibMakefile as LibMakefileGenerator
-from pygnulib.generator import POMakevars as POMakevarsGenerator
+from pygnulib.generator import CommandLineGenerator
+from pygnulib.generator import GnulibCacheGenerator
+from pygnulib.generator import LibMakefileGenerator
+from pygnulib.generator import POMakevarsGenerator
+from pygnulib.generator import GnulibCompGenerator
-from pygnulib.module import filelist
-from pygnulib.module import dummy_required
-from pygnulib.module import libtests_required
-from pygnulib.module import transitive_closure
+from pygnulib.module import DummyModule
+from pygnulib.module import Database
from pygnulib.parser import CommandLine as CommandLineParser
+from pygnulib.vfs import BaseVFS
+from pygnulib.vfs import GnulibGitVFS
from pygnulib.vfs import backup as vfs_backup
from pygnulib.vfs import compare as vfs_compare
from pygnulib.vfs import copy as vfs_copy
from pygnulib.vfs import iostream as vfs_iostream
from pygnulib.vfs import symlink as vfs_symlink
from pygnulib.vfs import unlink as vfs_unlink
-from pygnulib.vfs import Base as BaseVFS
-from pygnulib.vfs import GnulibGit as GnulibGitVFS
def import_hook(script, gnulib, namespace, explicit, verbosity, options, *args, **kwargs):
- keywords = frozenset({
- "tests",
- "obsolete",
- "cxx_tests",
- "longrunning_tests",
- "privileged_tests",
- "unportable_tests",
- })
(_, _) = (args, kwargs)
config = BaseConfig(**namespace)
- cache = CacheConfig(configure=None)
+ cache = CachedConfig(configure=None)
for key in {"ac_version", "files"}:
if key not in namespace:
config[key] = cache[key]
- test_options = {key:config[key] for key in keywords}
- (db, main, final, tests) = transitive_closure(gnulib.module, config.modules, **test_options)
+ database = Database(gnulib.module, config)
# Print some information about modules.
print("Module list with included dependencies (indented):", file=sys.stdout)
if not (stat.S_ISFIFO(mode) or stat.S_ISREG(mode)):
BOLD_ON = "\033[1m"
BOLD_OFF = "\033[0m"
- for module in sorted(final):
- manual = module.name in config.modules
+ for module in database.final_modules:
+ manual = module in database.explicit_modules
prefix = " " if manual else " "
bold_on = BOLD_ON if manual else ""
bold_off = BOLD_OFF if manual else ""
print("{0}{1}{2}{3}".format(prefix, bold_on, module.name, bold_off), file=sys.stdout)
if verbosity >= 1:
print("Main module list:", file=sys.stdout)
- for module in sorted(main):
- print(" {0}".format(module.name), file=sys.stdout)
- print("" if main else "\n", end="")
+ for module in database.main_modules:
+ if module is not DummyModule:
+ print(" {0}".format(module.name), file=sys.stdout)
+ if database.main_modules:
+ print("", file=sys.stdout)
print("Tests-related module list:", file=sys.stdout)
- for module in sorted(tests):
+ for module in database.test_modules:
print(" {0}".format(module.name), file=sys.stdout)
- print("" if tests else "\n", end="")
-
- # Determine if dummy needs to be added to main or test sets.
- if "dummy" not in config.avoids:
- if dummy_required(main):
- main.add(gnulib.module("dummy"))
- if libtests_required(tests) and dummy_required(tests):
- tests.add(gnulib.module("dummy"))
+ if database.test_modules:
+ print("", file=sys.stdout)
# Determine license incompatibilities, if any.
incompatibilities = set()
# Show banner notice of every module.
if verbosity >= -1:
- for module in sorted(main):
- name = module.name
+ for module in database.main_modules:
notice = module.notice
if notice.strip():
- print("Notice from module {0}:".format(name), file=sys.stdout)
+ print("Notice from module {0}:".format(module.name), file=sys.stdout)
print("\n".join(" " + line for line in notice.splitlines()), file=sys.stdout)
- # Determine the final file lists.
- main_files = filelist(main, config.ac_version)
- tests_files = filelist(tests, config.ac_version)
- for file in tests_files:
- if file.startswith("lib/"):
- tests_files.remove(file)
- file = "tests=lib/" + file[len("lib/"):]
- tests_files.add(file)
- files = (main_files | tests_files)
+ # Determine the final file list.
+ files = (set(database.main_files) | set(database.test_files))
if verbosity >= 0:
print("File list:", file=sys.stdout)
for file in sorted(files):
action = ("Removing", "Remove")[dry_run]
fmt = (action + " file {file} (backup in {file}~)")
if not dry_run:
- vfs_unlink(project, file, backup=True)
+ try:
+ vfs_unlink(project, file, backup=True)
+ except FileNotFoundError:
+ pass
print(fmt.format(file=file), file=sys.stdout)
def update_file(local, src_vfs, src_name, dst_vfs, dst_name, present):
# Generate the contents of library makefile.
path = os.path.join(config.source_base, config.makefile_name)
with tempfile.NamedTemporaryFile("w", encoding="UTF-8", delete=False) as tmp:
- for line in LibMakefileGenerator(config, explicit, path, main, mkedits, False):
- tmp.write(line + "\n")
+ for line in LibMakefileGenerator(path, config, explicit, database, mkedits, False):
+ print(line, file=tmp)
(src, dst) = (tmp.name, path)
present = vfs_exists(project, dst)
if present:
# Create po makefile parameterization, part 1.
with tempfile.NamedTemporaryFile("w", encoding="UTF-8", delete=False) as tmp:
for line in POMakevarsGenerator(config):
- tmp.write(line + "\n")
+ print(line, file=tmp)
(src, dst) = (tmp.name, "po/Makevars")
present = vfs_exists(project, dst)
if present:
# Create po makefile parameterization, part 2.
with tempfile.NamedTemporaryFile("w", encoding="UTF-8", delete=False) as tmp:
for line in POMakevarsGenerator(config):
- tmp.write(line + "\n")
- (src, dst) = (tmp.name, "po/POTFILES.in")
+ print(line, file=tmp)
+ (src, dst) = (tmp.name, "po/POTFILESGenerator.in")
present = vfs_exists(project, dst)
if present:
added_files.add(dst)
with tempfile.NamedTemporaryFile("w", encoding="UTF-8", delete=False) as tmp:
tmp.write("# Set of available languages.\n")
for line in sorted(languages):
- tmp.write(line + "\n")
+ print(line, file=tmp)
(src, dst) = (tmp.name, "po/LINGUAS")
present = vfs_exists(project, dst)
if present:
# Create m4/gnulib-cache.m4.
with tempfile.NamedTemporaryFile("w", encoding="UTF-8", delete=False) as tmp:
for line in GnulibCacheGenerator(config):
- tmp.write(line + "\n")
+ print(line, file=tmp)
(src, dst) = (tmp.name, "m4/gnulib-cache.m4")
present = vfs_exists(project, dst)
if present:
action(False, None, src, project, dst, present)
os.unlink(tmp.name)
+ # Create m4/gnulib-comp.m4.
+ with tempfile.NamedTemporaryFile("w", encoding="UTF-8", delete=False) as tmp:
+ for line in GnulibCompGenerator(config, explicit, database):
+ print(line, file=tmp)
+ (src, dst) = (tmp.name, "m4/gnulib-comp.m4")
+ present = vfs_exists(project, dst)
+ if present:
+ added_files.add(dst)
+ action = update_file if present else add_file
+ action(False, None, src, project, dst, present)
+ os.unlink(tmp.name)
+
return os.EX_OK
def add_import_hook(script, gnulib, namespace, explicit, verbosity, options, *args, **kwargs):
(_, _) = (args, kwargs)
modules = set(namespace.pop("modules"))
- config = CacheConfig(**namespace)
+ config = CachedConfig(**namespace)
namespace = {k:v for (k, v) in config.items()}
namespace["modules"] = (config.modules | modules)
return import_hook(script, gnulib, namespace, verbosity, options)
def remove_import_hook(script, gnulib, namespace, explicit, verbosity, options, *args, **kwargs):
(_, _) = (args, kwargs)
modules = set(namespace.pop("modules"))
- config = CacheConfig(**namespace)
+ config = CachedConfig(**namespace)
namespace = {k:v for (k, v) in config.items()}
namespace["modules"] = (config.modules - modules)
return import_hook(script, gnulib, namespace, verbosity, options)
def update_hook(script, gnulib, namespace, explicit, verbosity, options, *args, **kwargs):
(_, _) = (args, kwargs)
- config = CacheConfig(**namespace)
+ config = CachedConfig(**namespace)
namespace = {k:v for (k, v) in config.items()}
return import_hook(script, gnulib, namespace, verbosity, options)
-class Base:
+class BaseConfig:
"""gnulib generic configuration"""
_TABLE = {
"root" : ".",
"witness_c_macro" : "",
"licenses" : set(),
"libtool" : False,
- "conddeps" : False,
+ "conditionals" : True,
"vc_files" : False,
"ac_version" : 2.59,
"modules" : set(),
def __init__(self, **kwargs):
self.__table = {"options": 0}
- for (key, value) in Base._TABLE.items():
+ for (key, value) in BaseConfig._TABLE.items():
self[key] = kwargs.get(key, value)
_type_assert("source_base", value, str)
value = _os.path.normpath(value)
if _os.path.isabs(value):
- return ValueError("source_base cannot be an absolute path")
+ raise ValueError("source_base cannot be an absolute path")
self.__table["source_base"] = _os.path.normpath(value) if value else "lib"
_type_assert("m4_base", value, str)
value = _os.path.normpath(value)
if _os.path.isabs(value):
- return ValueError("m4_base cannot be an absolute path")
+ raise ValueError("m4_base cannot be an absolute path")
self.__table["m4_base"] = _os.path.normpath(value) if value else "m4"
_type_assert("po_base", value, str)
value = _os.path.normpath(value)
if _os.path.isabs(value):
- return ValueError("po_base cannot be an absolute path")
+ raise ValueError("po_base cannot be an absolute path")
self.__table["po_base"] = _os.path.normpath(value) if value else "po"
_type_assert("doc_base", value, str)
value = _os.path.normpath(value)
if _os.path.isabs(value):
- return ValueError("doc_base cannot be an absolute path")
+ raise ValueError("doc_base cannot be an absolute path")
self.__table["doc_base"] = _os.path.normpath(value) if value else "doc"
_type_assert("tests_base", value, str)
value = _os.path.normpath(value)
if _os.path.isabs(value):
- return ValueError("tests_base cannot be an absolute path")
+ raise ValueError("tests_base cannot be an absolute path")
self.__table["tests_base"] = _os.path.normpath(value) if value else "tests"
_type_assert("auxdir", value, str)
value = _os.path.normpath(value)
if _os.path.isabs(value):
- return ValueError("auxdir cannot be an absolute path")
+ raise ValueError("auxdir cannot be an absolute path")
self.__table["auxdir"] = _os.path.normpath(value) if value else "build-aux"
_type_assert("makefile_name", value, str)
value = _os.path.normpath(value)
if _os.path.isabs(value):
- return ValueError("makefile_name cannot be an absolute path")
+ raise ValueError("makefile_name cannot be an absolute path")
self.__table["makefile_name"] = value
@property
def tests(self):
"""include unit tests for the included modules"""
- return bool(self.__table["options"] & Base._Option.Tests)
+ return bool(self.__table["options"] & BaseConfig._Option.Tests)
@tests.setter
def tests(self, value):
_type_assert("tests", value, bool)
if value:
- self.__table["options"] |= Base._Option.Tests
+ self.__table["options"] |= BaseConfig._Option.Tests
else:
- self.__table["options"] &= ~Base._Option.Tests
+ self.__table["options"] &= ~BaseConfig._Option.Tests
@property
def obsolete(self):
"""include obsolete modules when they occur among the modules"""
- return bool(self.__table["options"] & Base._Option.Tests)
+ return bool(self.__table["options"] & BaseConfig._Option.Tests)
@obsolete.setter
def obsolete(self, value):
_type_assert("obsolete", value, bool)
if value:
- self.__table["options"] |= Base._Option.Obsolete
+ self.__table["options"] |= BaseConfig._Option.Obsolete
else:
- self.__table["options"] &= ~Base._Option.Obsolete
+ self.__table["options"] &= ~BaseConfig._Option.Obsolete
@property
def cxx_tests(self):
"""include even unit tests for C++ interoperability"""
- return bool(self.__table["options"] & Base._Option.CXX)
+ return bool(self.__table["options"] & BaseConfig._Option.CXX)
@cxx_tests.setter
def cxx_tests(self, value):
_type_assert("cxx_tests", value, bool)
if value:
- self.__table["options"] |= Base._Option.CXX
+ self.__table["options"] |= BaseConfig._Option.CXX
else:
- self.__table["options"] &= ~Base._Option.CXX
+ self.__table["options"] &= ~BaseConfig._Option.CXX
@property
def longrunning_tests(self):
"""include even unit tests that are long-runners"""
- return bool(self.__table["options"] & Base._Option.Longrunning)
+ return bool(self.__table["options"] & BaseConfig._Option.Longrunning)
@longrunning_tests.setter
def longrunning_tests(self, value):
_type_assert("longrunning_tests", value, bool)
if value:
- self.__table["options"] |= Base._Option.Longrunning
+ self.__table["options"] |= BaseConfig._Option.Longrunning
else:
- self.__table["options"] &= ~Base._Option.Longrunning
+ self.__table["options"] &= ~BaseConfig._Option.Longrunning
@property
def privileged_tests(self):
"""include even unit tests that require root privileges"""
- return bool(self.__table["options"] & Base._Option.Privileged)
+ return bool(self.__table["options"] & BaseConfig._Option.Privileged)
@privileged_tests.setter
def privileged_tests(self, value):
_type_assert("privileged_tests", value, bool)
if value:
- self.__table["options"] |= Base._Option.Privileged
+ self.__table["options"] |= BaseConfig._Option.Privileged
else:
- self.__table["options"] &= ~Base._Option.Privileged
+ self.__table["options"] &= ~BaseConfig._Option.Privileged
@property
def unportable_tests(self):
"""include even unit tests that fail on some platforms"""
- return bool(self.__table["options"] & Base._Option.Unportable)
+ return bool(self.__table["options"] & BaseConfig._Option.Unportable)
@unportable_tests.setter
def unportable_tests(self, value):
_type_assert("unportable_tests", value, bool)
if value:
- self.__table["options"] |= Base._Option.Unportable
+ self.__table["options"] |= BaseConfig._Option.Unportable
else:
- self.__table["options"] &= ~Base._Option.Unportable
+ self.__table["options"] &= ~BaseConfig._Option.Unportable
@property
def all_tests(self):
"""include all kinds of problematic unit tests"""
- return (self.__table["options"] & Base._Option.AllTests) == Base._Option.AllTests
+ return (self.__table["options"] & BaseConfig._Option.AllTests) == BaseConfig._Option.AllTests
@all_tests.setter
def all_tests(self, value):
if value:
- self.__table["options"] |= Base._Option.AllTests
+ self.__table["options"] |= BaseConfig._Option.AllTests
else:
- self.__table["options"] &= Base._Option.AllTests
+ self.__table["options"] &= BaseConfig._Option.AllTests
@property
@property
- def conddeps(self):
+ def conditionals(self):
"""support conditional dependencies (may save configure time and object code)"""
- return self.__table["conddeps"]
+ return self.__table["conditionals"]
- @conddeps.setter
- def conddeps(self, value):
- _type_assert("conddeps", value, bool)
- self.__table["conddeps"] = value
+ @conditionals.setter
+ def conditionals(self, value):
+ _type_assert("conditionals", value, bool)
+ self.__table["conditionals"] = value
@property
def include_guard_prefix(self):
"""include guard prefix"""
prefix = self.__table["macro_prefix"].upper()
- default = Base._TABLE["macro_prefix"].upper()
+ default = BaseConfig._TABLE["macro_prefix"].upper()
return "GL" if prefix == default else "GL_{0}".format(prefix)
@property
def gnumake(self):
"""update the license copyright text"""
- return bool(self.__table["options"] & Base._Option.GNUMake)
+ return bool(self.__table["options"] & BaseConfig._Option.GNUMake)
@gnumake.setter
def gnumake(self, value):
_type_assert("gnumake", value, bool)
if value:
- self.__table["options"] |= Base._Option.GNUMake
+ self.__table["options"] |= BaseConfig._Option.GNUMake
else:
- self.__table["options"] &= ~Base._Option.GNUMake
+ self.__table["options"] &= ~BaseConfig._Option.GNUMake
@property
def copyrights(self):
"""update the license copyright text"""
- return bool(self.__table["options"] & Base._Option.Copyrights)
+ return bool(self.__table["options"] & BaseConfig._Option.Copyrights)
@copyrights.setter
def copyrights(self, value):
_type_assert("copyrights", value, bool)
if value:
- self.__table["options"] |= Base._Option.Copyrights
+ self.__table["options"] |= BaseConfig._Option.Copyrights
else:
- self.__table["options"] &= ~Base._Option.Copyrights
+ self.__table["options"] &= ~BaseConfig._Option.Copyrights
def __getitem__(self, key):
- table = (set(Base._TABLE.keys()) | Base._OPTIONS)
+ table = (set(BaseConfig._TABLE.keys()) | BaseConfig._OPTIONS)
if key not in table:
key = key.replace("-", "_")
if key not in table:
def __setitem__(self, key, value):
- table = (set(Base._TABLE.keys()) | Base._OPTIONS)
+ table = (set(BaseConfig._TABLE.keys()) | BaseConfig._OPTIONS)
if key not in table:
key = key.replace("_", "-")
if key not in table:
-class Cache(Base):
+class CachedConfig(BaseConfig):
"""gnulib cached configuration"""
_COMMENTS = _compile(r"((?:(?:#)|(?:^dnl\s+)|(?:\s+dnl\s+)).*?)$")
_AUTOCONF = {
_GNULIB_CACHE = {
"overrides" : (list, _compile(r"gl_LOCAL_DIR\(\[(.*?)\]\)")),
"libtool" : (bool, _compile(r"gl_LIBTOOL\(\[(.*?)\]\)")),
- "conddeps" : (bool, _compile(r"gl_CONDITIONAL_DEPENDENCIES\(\[(.*?)\]\)")),
+ "conditionals" : (bool, _compile(r"gl_CONDITIONAL_DEPENDENCIES\(\[(.*?)\]\)")),
"vc_files" : (bool, _compile(r"gl_VC_FILES\(\[(.*?)\]\)")),
"tests" : (bool, _compile(r"gl_WITH_TESTS\(\[(.*?)\]\)")),
"obsolete" : (bool, _compile(r"gl_WITH_OBSOLETE\(\[(.*?)\]\)")),
def __configure_ac(self, configure, explicit, **kwargs):
with _codecs.open(configure, "rb", "UTF-8") as stream:
- data = Cache._COMMENTS.sub("", stream.read())
- for (key, pattern) in Cache._AUTOCONF.items():
+ data = CachedConfig._COMMENTS.sub("", stream.read())
+ for (key, pattern) in CachedConfig._AUTOCONF.items():
match = pattern.findall(data)
if match and key not in explicit:
self[key] = match[-1]
if not _os.path.exists(path):
raise FileNotFoundError(path)
with _codecs.open(path, "rb", "UTF-8") as stream:
- data = Cache._COMMENTS.sub("", stream.read())
- for (key, (typeid, pattern)) in Cache._GNULIB_CACHE.items():
+ data = CachedConfig._COMMENTS.sub("", stream.read())
+ for (key, (typeid, pattern)) in CachedConfig._GNULIB_CACHE.items():
match = pattern.findall(data)
if match and key not in explicit:
if key == "licenses":
if not _os.path.exists(path):
raise FileNotFoundError(path)
with _codecs.open(path, "rb", "UTF-8") as stream:
- data = Cache._COMMENTS.sub("", stream.read())
+ data = CachedConfig._COMMENTS.sub("", stream.read())
pattern = _compile(r"AC_DEFUN\(\[{0}_FILE_LIST\], \[(.*?)\]\)".format(macro_prefix))
match = pattern.findall(data)
if match and "files" not in explicit:
from .error import type_assert as _type_assert
-from .config import Base as _BaseConfig
-from .module import Base as _BaseModule
+from .config import BaseConfig as _BaseConfig
+from .module import BaseModule as _BaseModule
+from .module import Database as _Database
from .config import LGPLv2_LICENSE as _LGPLv2_LICENSE
from .config import LGPLv3_LICENSE as _LGPLv3_LICENSE
from .config import GPLv2_LICENSE as _GPLv2_LICENSE
-class Base:
+_DISCLAIMER = (
+ "## DO NOT EDIT! GENERATED AUTOMATICALLY!",
+ "#",
+ "# This file is free software; you can redistribute it and/or modify",
+ "# it under the terms of the GNU General Public License as published by",
+ "# the Free Software Foundation; either version 3 of the License, or",
+ "# (at your option) any later version.",
+ "#",
+ "# This file is distributed in the hope that it will be useful,",
+ "# but WITHOUT ANY WARRANTY; without even the implied warranty of",
+ "# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the",
+ "# GNU General Public License for more details.",
+ "#",
+ "# You should have received a copy of the GNU General Public License",
+ "# along with this file. If not, see <http://www.gnu.org/licenses/>.",
+ "#",
+ "# As a special exception to the GNU General Public License,",
+ "# this file may be distributed as part of a program that",
+ "# contains a configuration script generated by Autoconf, under",
+ "# the same distribution terms as the rest of that program.",
+ "#",
+ "# Generated by gnulib-tool.",
+)
+
+
+
+class BaseGenerator:
"""gnulib file content generator"""
- _TEMPLATE = (
- "## DO NOT EDIT! GENERATED AUTOMATICALLY!",
- "#",
- "# This file is free software; you can redistribute it and/or modify",
- "# it under the terms of the GNU General Public License as published by",
- "# the Free Software Foundation; either version 3 of the License, or",
- "# (at your option) any later version.",
- "#",
- "# This file is distributed in the hope that it will be useful,",
- "# but WITHOUT ANY WARRANTY; without even the implied warranty of",
- "# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the",
- "# GNU General Public License for more details.",
- "#",
- "# You should have received a copy of the GNU General Public License",
- "# along with this file. If not, see <http://www.gnu.org/licenses/>.",
- "#",
- "# As a special exception to the GNU General Public License,",
- "# this file may be distributed as part of a program that",
- "# contains a configuration script generated by Autoconf, under",
- "# the same distribution terms as the rest of that program.",
- "#",
- "# Generated by gnulib-tool.",
- )
-
-
def __repr__(self):
module = self.__class__.__module__
name = self.__class__.__name__
def __iter__(self):
- for line in Base._TEMPLATE:
- yield line
+ yield
-class POMakevars(Base):
+class POMakevarsGenerator(BaseGenerator):
"""PO Makefile parameterization"""
_TEMPLATE = (
"# These options get passed to xgettext.",
def __iter__(self):
- for line in super().__iter__():
+ for line in _DISCLAIMER:
yield line
yield "# Usually the message domain is the same as the package name."
yield "# But here it has a '-gnulib' suffix."
yield "# These two variables depend on the location of this directory."
yield "subdir = {}".format(self.po_domain)
yield "top_subdir = {}".format("/".join(".." for _ in self.po_base.split(_os.path.sep)))
- for line in POMakevars._TEMPLATE:
+ for line in POMakevarsGenerator._TEMPLATE:
yield line
-class POTFILES(Base):
+class POTFILESGenerator(BaseGenerator):
"""file list to be passed to xgettext"""
def __init__(self, config, files):
_type_assert("config", config, _BaseConfig)
def __iter__(self):
- for line in super().__iter__():
+ for line in _DISCLAIMER:
yield line
yield "# List of files which contain translatable strings."
for file in [_ for _ in self.files if _.startswith("lib/")]:
-class AutoconfSnippet(Base):
+class AutoconfSnippetGenerator(BaseGenerator):
"""autoconf snippet generator for standalone module"""
def __init__(self, config, module, toplevel, no_libtool, no_gettext):
"""
"""libtool switch, disabling libtool configuration parameter"""
return self.__config.libtool and not self.__no_libtool
-
@property
def gettext(self):
"""gettext switch, disabling AM_GNU_GETTEXT invocations"""
-class InitMacro(Base):
+class AutoconfMultisnippetGenerator(BaseGenerator):
+ """multi-module autoconf snippets generator"""
+ def __init__(self, config, database, modules, toplevel, no_libtool, no_gettext, macro_prefix=None):
+ _type_assert("config", config, _BaseConfig)
+ _type_assert("database", database, _Database)
+ _type_assert("modules", modules, _ITERABLES)
+ _type_assert("toplevel", toplevel, bool)
+ _type_assert("no_libtool", no_libtool, bool)
+ _type_assert("no_gettext", no_gettext, bool)
+ if macro_prefix is None:
+ macro_prefix = config.macro_prefix
+ _type_assert("macro_prefix", macro_prefix, str)
+ super().__init__()
+ self.__config = config
+ self.__database = database
+ self.__modules = set()
+ for module in modules:
+ _type_assert("module", module, _BaseModule)
+ self.__modules.add(module)
+ self.__modules = sorted(self.__modules)
+ self.__toplevel = toplevel
+ self.__no_libtool = no_libtool
+ self.__no_gettext = no_gettext
+ self.__macro_prefix = macro_prefix
+
+
+ def __iter__(self):
+ config = self.__config
+ database = self.__database
+ base_table = database.base_table
+ modules = self.__modules
+ macro_prefix = self.__macro_prefix
+
+ arguments = {
+ "config": config,
+ "module": None,
+ "toplevel": self.__toplevel,
+ "no_libtool": self.__no_libtool,
+ "no_gettext": self.__no_gettext,
+ }
+ if not config.conditionals:
+ for module in sorted(modules):
+ arguments["module"] = module
+ for line in AutoconfSnippetGenerator(**arguments):
+ yield " {}".format(line)
+ return
+
+ conditional = set()
+ unconditional = set()
+ for dependency in modules:
+ if base_table.conditional(dependency):
+ conditional.add(dependency)
+ else:
+ unconditional.add(dependency)
+ conditional = sorted(set(conditional))
+ unconditional = sorted(set(unconditional))
+
+ # Emit the autoconf code for the unconditional modules.
+ for module in unconditional:
+ arguments["module"] = module
+ for line in AutoconfSnippetGenerator(**arguments):
+ yield " {}".format(line)
+
+ # Initialize the shell variables indicating that the modules are enabled.
+ for module in conditional:
+ shellvar = module.shell_variable(macro_prefix)
+ yield " {}=false".format(shellvar)
+
+ # Emit the autoconf code for the conditional modules, each in a separate
+ # function. This makes it possible to support cycles among conditional
+ # modules.
+ for demander in conditional:
+ shellvar = demander.shell_variable(macro_prefix)
+ shellfunc = demander.shell_function(macro_prefix)
+ yield " {} ()".format(shellfunc)
+ yield " {"
+ yield " if ! ${}; then".format(shellvar)
+ arguments["module"] = demander
+ for line in AutoconfSnippetGenerator(**arguments):
+ yield " {}".format(line)
+ yield " {}=true".format(shellvar)
+ try:
+ for (dependency, condition) in sorted(base_table.dependencies(demander)):
+ if base_table.conditional(dependency):
+ shellfunc = dependency.shell_function(macro_prefix)
+ if condition is not None:
+ yield " if {}; then".format(condition)
+ yield " {}".format(shellfunc)
+ yield " fi"
+ else:
+ yield " {}".format(shellfunc)
+ except KeyError:
+ pass
+ yield " fi"
+ yield " }"
+
+ # Emit the dependencies from the unconditional to the conditional modules.
+ for demander in unconditional:
+ try:
+ for (dependency, condition) in sorted(base_table.dependencies(demander)):
+ if dependency in modules and base_table.conditional(dependency):
+ condname = dependency.conditional_name(macro_prefix)
+ shellfunc = dependency.shell_function(macro_prefix)
+ if condition is not None:
+ yield " if {}; then".format(condition)
+ yield " {}".format(shellfunc)
+ yield " fi"
+ else:
+ yield " {}".format(shellfunc)
+ except KeyError:
+ pass
+
+ # Define the Automake conditionals.
+ yield " m4_pattern_allow([^{}_GNULIB_ENABLED_])".format(macro_prefix)
+ for module in conditional:
+ condname = module.conditional_name(macro_prefix)
+ shellvar = module.shell_variable(macro_prefix)
+ yield " AM_CONDITIONAL([{}], [${}])".format(condname, shellvar)
+
+
+
+class InitMacroGenerator(BaseGenerator):
"""basic gl_INIT macro generator"""
def __init__(self, config, macro_prefix=None):
"""
if macro_prefix is None:
macro_prefix = config.macro_prefix
_type_assert("macro_prefix", macro_prefix, str)
+ super().__init__()
self.__macro_prefix = macro_prefix
-class InitMacroHeader(InitMacro):
+class InitMacroHeaderGenerator(InitMacroGenerator):
"""the first few statements of the gl_INIT macro"""
_TEMPLATE = (
# Overriding AC_LIBOBJ and AC_REPLACE_FUNCS has the effect of storing
def __iter__(self):
macro_prefix = self.macro_prefix
- for line in InitMacroHeader._TEMPLATE:
+ for line in InitMacroHeaderGenerator._TEMPLATE:
yield line.format(macro_prefix=macro_prefix)
-class InitMacroFooter(InitMacro):
+class InitMacroFooterGenerator(InitMacroGenerator):
"""the last few statements of the gl_INIT macro"""
_TEMPLATE = (
" m4_ifval({macro_prefix}_LIBSOURCES_LIST, [",
# arguments. The check is performed only when autoconf is run from the
# directory where the configure.ac resides; if it is run from a different
# directory, the check is skipped.
- for line in InitMacroFooter._TEMPLATE:
+ for line in InitMacroFooterGenerator._TEMPLATE:
yield line.format(macro_prefix=self.macro_prefix)
-class InitMacroDone(InitMacro):
+class InitMacroDoneGenerator(InitMacroGenerator):
"""few statements AFTER the gl_INIT macro"""
_TEMPLATE = (
"",
def __init__(self, config, source_base=None, macro_prefix=None):
- super().__init__(config=config, macro_prefix=macro_prefix)
if source_base is None:
source_base = config.source_base
_type_assert("source_base", source_base, str)
+ super().__init__(config=config, macro_prefix=macro_prefix)
self.__source_base = source_base
def __iter__(self):
- for line in InitMacroDone._TEMPLATE:
+ for line in InitMacroDoneGenerator._TEMPLATE:
yield line.format(source_base=self.__source_base, macro_prefix=self.macro_prefix)
-class CommandLine(Base):
+class CommandLineGenerator(BaseGenerator):
"""gnulib command-line invocation generator"""
_TESTS = {
"tests": "tests",
"unportable_tests": "unportable-tests",
}
+
def __init__(self, config, explicit):
_type_assert("config", config, _BaseConfig)
_type_assert("explicit", explicit, _ITERABLES)
+ super().__init__()
self.__config = config
self.__explicit = explicit
yield "--doc-base={}".format(config.doc_base)
yield "--tests-base={}".format(config.tests_base)
yield "--aux-dir={}".format(config.auxdir)
- for (key, value) in CommandLine._TESTS.items():
+ for (key, value) in CommandLineGenerator._TESTS.items():
if config[key]:
yield "--with-{}".format(value)
if config.all_tests:
yield "--gnu-make"
if "makefile_name" in explicit:
yield "--makefile-name={}".format(config.makefile_name)
- yield "--{}conditional-dependencies".format("" if config.conddeps else "no-")
+ yield "--{}conditional-dependencies".format("" if config.conditionals else "no-")
yield "--{}libtool".format("" if config.libtool else "no-")
yield "--macro-prefix={}".format(config.macro_prefix)
if "po_domain" in explicit:
-class LibMakefile(Base):
+class LibMakefileGenerator(BaseGenerator):
+ """library Makefile.am generator"""
_LDFLAGS = _re.compile(r"^lib_LDFLAGS\s*\+\=.*?$", _re.S)
_LIBNAME = _re.compile(r"lib_([A-Z][A-Z]*)", _re.S)
_GNUMAKE = _re.compile(r"^if (.*?)$", _re.S)
- def __init__(self, config, explicit, path, modules, mkedits, for_test):
+ def __init__(self, path, config, explicit, database, mkedits, testing):
+ _type_assert("path", path, str)
_type_assert("config", config, _BaseConfig)
_type_assert("explicit", explicit, _ITERABLES)
- _type_assert("path", path, str)
- _type_assert("modules", modules, _ITERABLES)
+ _type_assert("database", database, _Database)
_type_assert("mkedits", mkedits, _ITERABLES)
- _type_assert("for_test", for_test, bool)
+ _type_assert("testing", testing, bool)
+ super().__init__()
self.__config = config
self.__explicit = explicit
+ self.__database = database
self.__path = path
- self.__modules = modules
self.__mkedits = mkedits
- self.__for_test = for_test
+ self.testing = testing
def __iter__(self):
date = _datetime.now()
config = self.__config
explicit = self.__explicit
- for_test = self.__for_test
- modules = self.__modules
+ testing = self.testing
+ database = self.__database
gnumake = config.gnumake
libtool = config.libtool
# especially link errors, already during "make" rather than during
# "make check", because "make check" is not possible in a cross-compiling
# situation. Turn check_PROGRAMS into noinst_PROGRAMS.
- transform_check_PROGRAMS = True if for_test else False
+ transform_check_PROGRAMS = True if testing else False
yield "## DO NOT EDIT! GENERATED AUTOMATICALLY!"
yield "## Process this file with automake to produce Makefile.in."
yield "# Copyright (C) 2002-{} Free Software Foundation, Inc.".format(date.year)
- for line in super().__iter__():
+ for line in _DISCLAIMER:
yield line
# The maximum line length (excluding the terminating newline) of any file
# that is to be preprocessed by config.status is 3070. config.status uses
# awk, and the HP-UX 11.00 awk fails if a line has length >= 3071;
# similarly, the IRIX 6.5 awk fails if a line has length >= 3072.
- actioncmd = " ".join(CommandLine(config, explicit))
+ actioncmd = " ".join(CommandLineGenerator(config, explicit))
if len(actioncmd) <= 3000:
yield "# Reproduce by: {}".format(actioncmd)
yield ""
yield "ifeq (,$(OMIT_GNULIB_MODULE_{}))".format(module.name)
yield ""
yield "ifneq (,$({}))".format(module.conditional_name)
- yield LibMakefile._GNUMAKE.sub("ifneq (,$(\\1))", conditional)
+ yield LibMakefileGenerator._GNUMAKE.sub("ifneq (,$(\\1))", conditional)
yield "endif"
yield "endif"
- yield LibMakefile._GNUMAKE.sub("ifneq (,$(\\1))", unconditional)
+ yield LibMakefileGenerator._GNUMAKE.sub("ifneq (,$(\\1))", unconditional)
def _gnumake_unconditional(module, conditional, unconditional):
yield ""
- yield LibMakefile._GNUMAKE.sub("ifneq (,$(\\1))", conditional)
- yield LibMakefile._GNUMAKE.sub("ifneq (,$(\\1))", unconditional)
+ yield LibMakefileGenerator._GNUMAKE.sub("ifneq (,$(\\1))", conditional)
+ yield LibMakefileGenerator._GNUMAKE.sub("ifneq (,$(\\1))", unconditional)
uses_subdirs = False
process = (
(_common_unconditional, _gnumake_unconditional),
(_common_conditional, _gnumake_conditional),
- )[config.conddeps][gnumake]
- for module in sorted(modules):
+ )[config.conditionals][gnumake]
+ for module in database.main_modules:
if module.name.endswith("-tests"):
continue
conditional = module.conditional_automake_snippet
conditional = conditional.replace("lib_LIBRARIES", "lib%_LIBRARIES")
conditional = conditional.replace("lib_LTLIBRARIES", "lib%_LTLIBRARIES")
if eliminate_LDFLAGS:
- conditional = LibMakefile._LDFLAGS.sub("", conditional)
- conditional = LibMakefile._LIBNAME.sub("{libname}_{libext}_\\1".format(**kwargs), conditional)
+ conditional = LibMakefileGenerator._LDFLAGS.sub("", conditional)
+ conditional = LibMakefileGenerator._LIBNAME.sub("{libname}_{libext}_\\1".format(**kwargs), conditional)
conditional = conditional.replace("lib%_LIBRARIES", "lib_LIBRARIES")
conditional = conditional.replace("lib%_LTLIBRARIES", "lib_LTLIBRARIES")
if transform_check_PROGRAMS:
conditional = conditional.replace("check_PROGRAMS", "noinst_PROGRAMS")
conditional = conditional.replace(r"${gl_include_guard_prefix}", config.include_guard_prefix)
unconditional = module.unconditional_automake_snippet(config.auxdir)
- unconditional = LibMakefile._LIBNAME.sub("{libname}_{libext}_\\1".format(**kwargs), unconditional)
+ unconditional = LibMakefileGenerator._LIBNAME.sub("{libname}_{libext}_\\1".format(**kwargs), unconditional)
if (conditional + unconditional).strip():
lines.append("## begin gnulib module {}".format(module.name))
if module.name == "alloca":
cppflags = "".join((
" -D{}=1".format(config.witness_c_macro) if "witness_c_macro" in explicit else "",
- " -DGNULIB_STRICT_CHECKING=1" if for_test else "",
+ " -DGNULIB_STRICT_CHECKING=1" if testing else "",
))
if "makefile_name" not in explicit:
yield ""
if index != -1:
directive = directive[:index].strip(" ")
yield directive
- for directive in sorted(set(_directives(modules))):
+ for directive in _directives(database.main_modules):
yield ("{libname}_{libext}_LDFLAGS += {directive}".format(directive=directive, **kwargs))
yield ""
-class GnulibCache(Base):
+class GnulibCacheGenerator(BaseGenerator):
+ """gnulib-cache.m4 generator"""
+ _OPTIONS = (
+ ("obsolete", "gl_WITH_OBSOLETE"),
+ ("cxx_tests", "gl_WITH_CXX_TESTS"),
+ ("longrunning", "gl_WITH_LONGRUNNING_TESTS"),
+ ("privileged", "gl_WITH_PRIVILEGED_TESTS"),
+ )
+
+
def __init__(self, config):
_type_assert("config", config, _BaseConfig)
+ super().__init__()
self.__config = config
yield "## DO NOT EDIT! GENERATED AUTOMATICALLY!"
yield "## Process this file with automake to produce Makefile.in."
yield "# Copyright (C) 2002-{} Free Software Foundation, Inc.".format(date.year)
- for line in super().__iter__():
+ for line in _DISCLAIMER:
yield line
yield "#"
yield "# This file represents the specification of how gnulib-tool is used."
for module in sorted(config.modules):
yield " {}".format(module)
yield "])"
- if config.obsolete:
- yield "gl_WITH_OBSOLETE"
- if config.cxx_tests:
- yield "gl_WITH_CXX_TESTS"
- if config.longrunning_tests:
- yield "gl_WITH_LONGRUNNING_TESTS"
- if config.privileged_tests:
- yield "gl_WITH_PRIVILEGED_TESTS"
- if config.unportable_tests:
- yield "gl_WITH_UNPORTABLE_TESTS"
+ for key in ("obsolete", "cxx_tests", "longrunning_tests", "privileged_tests", "unportable_tests"):
+ if config[key]:
+ yield "gl_WITH_{}".format(key.upper())
if config.all_tests:
yield "gl_WITH_ALL_TESTS"
yield "gl_AVOID([{}])".format(" ".join(sorted(config.avoids)))
lgpl = _LGPL[config.licenses]
yield "gl_LGPL([{}])".format(lgpl) if lgpl != "yes" else "gl_LGPL"
yield "gl_MAKEFILE_NAME([{}])".format(config.makefile_name)
- if config.conddeps:
+ if config.conditionals:
yield "gl_CONDITIONAL_DEPENDENCIES"
if config.libtool:
yield "gl_LIBTOOL"
yield "gl_WITNESS_C_MACRO([{}])".format(config.witness_c_macro)
if config.vc_files:
yield "gl_VC_FILES([{}])".format(" ".join(sorted(config.vc_files)))
+
+
+
+class GnulibCompGenerator(BaseGenerator):
+ """gnulib-comp.m4 generator"""
+ def __init__(self, config, explicit, database, macro_prefix=None):
+ _type_assert("config", config, _BaseConfig)
+ _type_assert("explicit", explicit, _ITERABLES)
+ _type_assert("database", database, _Database)
+ if macro_prefix is None:
+ macro_prefix = config.macro_prefix
+ _type_assert("macro_prefix", macro_prefix, str)
+ super().__init__()
+ self.__config = config
+ self.__explicit = explicit
+ self.__database = database
+ self.__macro_prefix = macro_prefix
+ self.__uses_subdirs = True
+
+
+ def __iter__(self):
+ config = self.__config
+ explicit = self.__explicit
+ database = self.__database
+ main_modules = database.main_modules
+ test_modules = database.test_modules
+ macro_prefix = self.__macro_prefix
+
+ date = _datetime.now()
+ yield "# DO NOT EDIT! GENERATED AUTOMATICALLY!"
+ yield "# Copyright (C) 2002-{} Free Software Foundation, Inc.".format(date.year)
+ iterable = super().__iter__()
+ try:
+ next(iterable)
+ except StopIteration:
+ pass
+ for line in iterable:
+ yield line
+
+ yield "#"
+ yield "# This file represents the compiled summary of the specification in"
+ yield "# gnulib-cache.m4. It lists the computed macro invocations that need"
+ yield "# to be invoked from configure.ac."
+ yield "# In projects that use version control, this file can be treated like"
+ yield "# other built files."
+ yield ""
+ yield ""
+ yield "# This macro should be invoked from $configure_ac, in the section"
+ yield "# \"Checks for programs\", right after AC_PROG_CC, and certainly before"
+ yield "# any checks for libraries, header files, types and library functions."
+ yield "AC_DEFUN([{}_EARLY],".format(config.macro_prefix)
+ yield "["
+ yield " m4_pattern_forbid([^gl_[A-Z]])dnl the gnulib macro namespace"
+ yield " m4_pattern_allow([^gl_ES$])dnl a valid locale name"
+ yield " m4_pattern_allow([^gl_LIBOBJS$])dnl a variable"
+ yield " m4_pattern_allow([^gl_LTLIBOBJS$])dnl a variable"
+ yield ""
+ yield " # Pre-early section."
+ if "externsions" not in (module.name for module in database.final_modules):
+ yield " AC_REQUIRE([gl_USE_SYSTEM_EXTENSIONS])"
+ yield " AC_REQUIRE([gl_PROG_AR_RANLIB])"
+ yield ""
+ if not config.gnumake and self.__uses_subdirs:
+ yield " AC_REQUIRE([AM_PROG_CC_C_O])"
+ for module in database.final_modules:
+ yield " # Code from module {}:".format(module.name)
+ lines = module.early_autoconf_snippet.split("\n")
+ for line in filter(lambda line: line.strip(), lines):
+ yield line
+ yield "])"
+ yield ""
+ yield "# This macro should be invoked from $configure_ac, in the section"
+ yield "# \"Check for header files, types and library functions\"."
+ yield "AC_DEFUN([{}_INIT],".format(macro_prefix)
+ yield "["
+ if config.libtool:
+ yield " AM_CONDITIONAL([GL_COND_LIBTOOL], [true])"
+ yield " gl_cond_libtool=true"
+ else:
+ yield " AM_CONDITIONAL([GL_COND_LIBTOOL], [false])"
+ yield " gl_cond_libtool=false"
+ yield " gl_libdeps="
+ yield " gl_ltlibdeps="
+ yield " gl_m4_base='{}'".format(config.m4_base)
+ for line in InitMacroHeaderGenerator(config, macro_prefix):
+ yield line
+ yield " gl_source_base='{}'".format(config.source_base)
+ if "witness_c_macro" in explicit:
+ yield " m4_pushdef([gl_MODULE_INDICATOR_CONDITION], [{}])".format(config.witness_c_macro)
+ for line in AutoconfMultisnippetGenerator(config, database, main_modules, True, False, True, macro_prefix):
+ yield line
+ if "witness_c_macro" in explicit:
+ yield " m4_popdef([gl_MODULE_INDICATOR_CONDITION])"
+ yield " # End of code from modules"
+ for line in InitMacroFooterGenerator(config, macro_prefix):
+ yield line
+ yield " gltests_libdeps="
+ yield " gltests_ltlibdeps="
+ for line in InitMacroHeaderGenerator(config, (macro_prefix + "tests")):
+ yield line
+ yield " gl_source_base='{}'".format(config.tests_base)
+ # Define a tests witness macro that depends on the package.
+ # PACKAGE is defined by AM_INIT_AUTOMAKE, PACKAGE_TARNAME is defined by AC_INIT.
+ # See <http://lists.gnu.org/archive/html/automake/2009-05/msg00145.html>.
+ yield "changequote(,)dnl"
+ yield "".join((
+ " {}tests_WITNESS=IN_`".format(macro_prefix),
+ "echo \"${PACKAGE-$PACKAGE_TARNAME}\"",
+ " | ",
+ "LC_ALL=C tr abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ",
+ " | ",
+ "LC_ALL=C sed -e 's/[^A-Z0-9_]/_/g'",
+ "`_GNULIB_TESTS",
+ ))
+ yield "changequote([, ])dnl"
+ yield " AC_SUBST([{}tests_WITNESS])".format(macro_prefix)
+ yield " gl_module_indicator_condition=${}tests_WITNESS".format(macro_prefix)
+ yield " m4_pushdef([gl_MODULE_INDICATOR_CONDITION], [$gl_module_indicator_condition])"
+ for line in AutoconfMultisnippetGenerator(config, database, test_modules, True, True, True, macro_prefix):
+ yield line
+ yield " m4_popdef([gl_MODULE_INDICATOR_CONDITION])"
+ for line in InitMacroFooterGenerator(config, (macro_prefix + "tests")):
+ yield line
+
+ # _LIBDEPS and _LTLIBDEPS variables are not needed if this library is
+ # created using libtool, because libtool already handles the dependencies.
+ if not config.libtool:
+ libname = config.libname.upper()
+ yield " {}_LIBDEPS=\"$gl_libdeps\"".format(libname)
+ yield " AC_SUBST([{}_LIBDEPS])".format(libname)
+ yield " {}_LTLIBDEPS=\"$gl_ltlibdeps\"".format(libname)
+ yield " AC_SUBST([{}_LTLIBDEPS])".format(libname)
+ if database.libtests:
+ yield " LIBTESTS_LIBDEPS=\"$gltests_libdeps\""
+ yield " AC_SUBST([LIBTESTS_LIBDEPS])"
+ yield "])"
+ for line in InitMacroDoneGenerator(config, source_base=config.source_base, macro_prefix=macro_prefix):
+ yield line
+ for line in InitMacroDoneGenerator(config, source_base=config.tests_base, macro_prefix=(macro_prefix + "tests")):
+ yield line
+ yield ""
+ yield "# This macro records the list of files which have been installed by"
+ yield "# gnulib-tool and may be removed by future gnulib-tool invocations."
+ yield "AC_DEFUN([{}_FILE_LIST], [".format(macro_prefix)
+ for file in sorted(set(database.main_files + database.test_files)):
+ yield " {}".format(file)
+ yield "])"
from .error import type_assert as _type_assert
from .error import UnknownModuleError as _UnknownModuleError
+from .config import BaseConfig as _BaseConfig
-class Base:
+class BaseModule:
"""gnulib generic module"""
_TABLE = {
"description" : (0x00, str, "Description"),
# unconditional_automake_snippet
# automake_snippet
}
- _DEPENDENCIES = _re.compile("^(\\S+)(?:\\s+(.+))*$")
_LIB_SOURCES = _re.compile(r"^lib_SOURCES\s*\+\=\s*(.*?)$", _re.S | _re.M)
kwargs["licenses"] = licenses
if "maintainers" not in kwargs:
kwargs["maintainers"] = ("all",)
+ self.__name = name
self.__table = _collections.OrderedDict()
- self.__table["name"] = name
- for (key, (_, typeid, _)) in Base._TABLE.items():
+ for (key, (_, typeid, _)) in BaseModule._TABLE.items():
self.__table[key] = typeid(kwargs.get(key, typeid()))
def __repr__(self):
- module = self.__class__.__module__
- name = self.__class__.__name__
- return "{}.{}<{}>".format(module, name, self.__table["name"])
+ return "<" + self.__name + ">"
def __str__(self):
- return "<" + self.__table["name"] + ">"
+ return "<" + self.__name + ">"
def __enter__(self):
def __hash__(self):
- return hash(self.__table["name"])
+ return hash(self.__name)
def __getitem__(self, key):
- if key not in Base._TABLE:
+ _type_assert("key", key, str)
+ if key not in BaseModule._TABLE:
key = key.replace("-", "_")
- if key not in Base._TABLE:
+ if key not in BaseModule._TABLE:
raise KeyError(repr(key))
return getattr(self, key)
def __setitem__(self, key, value):
- if key not in Base._TABLE:
+ _type_assert("key", key, str)
+ if key not in BaseModule._TABLE:
key = key.replace("-", "_")
- if key not in Base._TABLE:
+ if key not in BaseModule._TABLE:
raise KeyError(repr(key))
return setattr(self, key, value)
@property
def package(self):
+ """gnulib-compatible module package"""
result = ""
- for (key, (_, typeid, field)) in sorted(Base._TABLE.items(), key=lambda k: k[1][0]):
+ for (key, (_, typeid, field)) in sorted(BaseModule._TABLE.items(), key=lambda k: k[1][0]):
field += ":\n"
if typeid in _ITERABLES:
value = "\n".join(self.__table[key])
@property
def name(self):
"""name"""
- return self.__table["name"]
+ return self.__name
@name.setter
def name(self, value):
_type_assert("name", value, str)
- self.__table["name"] = value
+ self.__name = value
@property
def dependencies(self):
"""dependencies iterator (name, condition)"""
entries = set()
- for entry in self.__table["dependencies"]:
- entries.add(Base._DEPENDENCIES.findall(entry)[0])
+ for line in self.__table["dependencies"]:
+ line = line.replace("\t", " ").strip()
+ index = line.find(" ")
+ if index == -1:
+ module = line
+ condition = ""
+ else:
+ module = line[:index].strip()
+ condition = line[index:].strip()
+ if condition.startswith("["):
+ condition = condition[1:]
+ if condition.endswith("]"):
+ condition = condition[:-1]
+ if not condition:
+ condition = None
+ entries.add((module, condition))
return frozenset(entries)
@dependencies.setter
lib_SOURCES = False
lines = list(snippet.splitlines())
for (index, line) in enumerate(lines):
- if Base._LIB_SOURCES.findall(line):
+ if BaseModule._LIB_SOURCES.findall(line):
(first, last) = (index, index)
while line.endswith("\\"):
line = lines[last]
last += 1
lines = list(lines)[first:(last + 1)]
- lines[0] = Base._LIB_SOURCES.sub("\\1", lines[0])
+ lines[0] = BaseModule._LIB_SOURCES.sub("\\1", lines[0])
lib_SOURCES = True
break
lines = tuple(lines) if lib_SOURCES else ()
@property
def automake_snippet(self):
"""full automake snippet (conditional + unconditional parts)"""
- return (self.conditional_automake_snippet + self.unconditional_automake_snippet)
+ return self.conditional_automake_snippet + self.unconditional_automake_snippet
@property
def shell_variable(self, macro_prefix="gl"):
"""Get the name of the shell variable set to true once m4 macros have been executed."""
module = self.name
- if len(module) != len(module.encode()):
+ if any(filter(lambda rune: not (rune.isalnum() or rune == "_"), module)):
module = (module + "\n").encode("UTF-8")
module = _hashlib.md5(module).hexdigest()
return "{}_gnulib_enabled_{}".format(macro_prefix, module)
def shell_function(self, macro_prefix="gl"):
"""Get the name of the shell function containing the m4 macros."""
module = self.name
- if len(module) != len(module.encode()):
+ if any(filter(lambda rune: not (rune.isalnum() or rune == "_"), module)):
module = (module + "\n").encode("UTF-8")
module = _hashlib.md5(module).hexdigest()
return "func_{}_gnulib_m4code_{}".format(macro_prefix, module)
def conditional_name(self, macro_prefix="gl"):
"""Get the automake conditional name."""
module = self.name
- if len(module) != len(module.encode()):
+ if any(filter(lambda rune: not (rune.isalnum() or rune == "_"), module)):
module = (module + "\n").encode("UTF-8")
module = _hashlib.md5(module).hexdigest()
return "{}_GNULIB_ENABLED_{}".format(macro_prefix, module)
def __lt__(self, value):
- if not isinstance(value, Base):
+ if not isinstance(value, BaseModule):
return True
return self.name < value.name
return self.__lt__(value) or self.__eq__(value)
def __eq__(self, value):
- if not isinstance(value, Base):
+ if not isinstance(value, BaseModule):
return False
return self.name == value.name
-class File(Base):
+class DummyModule(BaseModule):
+ """dummy module placeholder"""
+
+ files = property(lambda *args, **kwargs: {"lib/dummy.c"})
+ description = property(lambda *args, **kwargs: "\n".join((
+ "A dummy file, to make sure the library is non-empty.",
+ "",
+ )))
+ conditional_automake_snippet = property(lambda *args, **kwargs: "\n".join((
+ "lib_SOURCES += dummy.c",
+ "",
+ )))
+ licenses = property(lambda *args, **kwargs: {"public domain"})
+ maintainers = property(lambda *args, **kwargs: {"all"})
+
+
+ def __init__(self):
+ #super().__init__(name="dummy")
+ pass
+
+
+ def __hash__(self):
+ return hash(None)
+
+
+ def __setitem__(self, key, value):
+ return self.__setattr__(key, value)
+
+
+ def __setattr__(self, key, value):
+ raise TypeError("read-only module")
+
+
+DummyModule = DummyModule()
+
+
+
+class FileModule(BaseModule):
"""gnulib module text file"""
- _TABLE = {_value[2]:(_value[1], _key) for (_key, _value) in Base._TABLE.items()}
- _FIELDS = [_field for (_, _, _field) in Base._TABLE.values()]
+ _TABLE = {_value[2]:(_value[1], _key) for (_key, _value) in BaseModule._TABLE.items()}
+ _FIELDS = [_field for (_, _, _field) in BaseModule._TABLE.values()]
_PATTERN = _re.compile("({}):".format("|".join(_FIELDS)))
raise ValueError("illegal mode: {}".format(mode))
if mode == "r":
with _codecs.open(path, "rb", "UTF-8") as stream:
- match = File._PATTERN.split(stream.read())[1:]
+ match = FileModule._PATTERN.split(stream.read())[1:]
for (group, value) in zip(match[::2], match[1::2]):
- (typeid, key) = File._TABLE[group]
+ (typeid, key) = FileModule._TABLE[group]
if typeid in _ITERABLES:
lines = []
for line in value.splitlines():
- if not line.strip() or line.startswith("#"):
- continue
- lines += [line]
+ if line.strip() and not line.startswith("#"):
+ lines.append(line)
table[key] = typeid(lines)
else:
table[key] = value.strip()
-class Database:
- """gnulib module database"""
- def __init__(self, lookup=None):
+class TransitiveClosure:
+ """transitive closure table"""
+ def __init__(self, lookup, config, tests):
+ if not callable(lookup):
+ raise TypeError("lookup: callable expected")
+ _type_assert("config", config, _BaseConfig)
+ _type_assert("tests", tests, bool)
+
+ def _exclude(module):
+ return any((
+ (not config.obsolete and module.obsolete),
+ (not config.cxx_tests and module.cxx_test),
+ (not config.longrunning_tests and module.longrunning_test),
+ (not config.privileged_tests and module.privileged_test),
+ (not config.unportable_tests and module.unportable_test),
+ ))
+
def _lookup(module):
- if not (module is None or isinstance(module, Base)):
+ if not (module is None or isinstance(module, BaseModule)):
if isinstance(module, str):
if lookup is None:
raise TypeError("cannot instantiate {} module".format(module))
module = lookup(module)
- if not isinstance(module, Base):
- raise TypeError("module: pygnulib.module.Base expected")
+ if not isinstance(module, BaseModule):
+ raise TypeError("module: pygnulib.module.BaseModule expected")
return module
+
+ conditional = set()
+ unconditional = set()
+ demanders = _collections.defaultdict(dict)
+ dependencies = _collections.defaultdict(dict)
+ modules = {lookup(module) for module in config.modules}
+ for dependency in modules:
+ unconditional.add(dependency)
+ dependencies[dependency][None] = None
+ while modules:
+ for demander in set(modules):
+ if tests and not demander.name.endswith("-tests"):
+ try:
+ dependency = lookup("{}-tests".format(demander.name))
+ if not _exclude(dependency):
+ demanders[None][dependency] = None
+ dependencies[dependency][None] = None
+ except _UnknownModuleError:
+ pass # ignore non-existent tests
+ for (dependency, condition) in demander.dependencies:
+ dependency = lookup(dependency)
+ if config.gnumake and condition.startswith("if "):
+ # A module whose Makefile.am snippet contains a reference to an
+ # automake conditional. If we were to use it conditionally, we
+ # would get an error
+ # configure: error: conditional "..." was never defined.
+ # because automake 1.11.1 does not handle nested conditionals
+ # correctly. As a workaround, make the module unconditional.
+ demanders[None][dependency] = None
+ dependencies[dependency][None] = None
+ unconditional.add(dependency)
+ elif not _exclude(dependency):
+ if condition is not None:
+ conditional.add(dependency)
+ elif demander in conditional:
+ conditional.add(dependency)
+ else:
+ conditional.discard(dependency)
+ unconditional.add(dependency)
+ demanders[demander][dependency] = None
+ dependencies[dependency][demander] = condition
+ modules.add(dependency)
+ modules.discard(demander)
+
self.__lookup = _lookup
- self.__storage = _collections.defaultdict(dict)
+ self.__demanders = demanders
+ self.__dependencies = dependencies
+ self.__conditional = frozenset(conditional)
def __iter__(self):
- for dependency in sorted(self.__storage):
- entries = self.__storage[dependency]
- for element in entries.items():
- (demander, condition) = element
- yield (dependency, condition, element)
+ for dependency in self.__dependencies:
+ yield dependency
def dump(self, indent=" "):
- """Export gnulib module database into string."""
+ """Export transitive closure result into string."""
def _dump():
unconditional = set()
storage = _collections.defaultdict(dict)
yield "{{".format()
- for (key, value) in self.__storage.items():
+ for (key, value) in self.__dependencies.items():
for (subkey, subvalue) in value.items():
dependency = key.name
demander = subkey.name if subkey else ""
unconditional.add(dependency)
condition = condition.replace("\"", "\\\"")
storage[dependency][demander] = condition
-
for dependency in sorted(storage):
if dependency in unconditional:
yield "{}\"{}\": {{}},".format(indent, dependency)
yield "{}\"{}\": \"{}\",".format((indent * 2), demander, condition)
yield "{}}},".format(indent)
yield "}}".format()
-
- if not self.__storage:
+ if not self.__dependencies:
return "{{}}".format()
return _os.linesep.join(_dump())
def load(self, string):
- """Import gnulib module database from string."""
- storage = _collections.defaultdict(dict)
+ """Import transitive closure result from string."""
+ demanders = _collections.defaultdict(dict)
+ dependencies = _collections.defaultdict(dict)
collection = _ast.literal_eval(string)
_type_assert("collection", collection, dict)
for key in collection:
demander = self.__lookup(demander)
if not condition:
condition = None
- storage[dependency][demander] = condition
- self.__storage = storage
-
-
- def insert(self, dependency, demander, condition, lookup=None):
- """Mark demander module as requiring a dependency module under specific condition."""
- dependency = self.__lookup(dependency)
- demander = self.__lookup(demander)
- if dependency is None:
- raise TypeError("dependency: pygnulib.module.Base expected")
- if not (condition is None or isinstance(condition, str)):
- raise TypeError("condition: None or str expected")
- if demander in self.__storage[dependency]:
- if condition is None:
- fmt = "{} already unconditionally depends on {}"
- args = (demander.name, dependency.name)
- else:
- fmt = "{} already depends on {} ({})"
- args = (demander.name, dependency.name, repr(condition))
- raise ValueError(fmt.format(*args))
- self.__storage[dependency][demander] = condition
-
-
- def modify(self, dependency, demander, condition):
- """Modify condition under which the demander requires a dependency."""
- dependency = self.__lookup(dependency)
- demander = self.__lookup(demander)
- if dependency is None:
- raise TypeError("dependency: pygnulib.module.Base expected")
- if demander is None and condition is not None:
- raise ValueError("condition: None expected")
- if not (condition is None and isinstance(condition, str)):
- raise TypeError("condition: None or str expected")
- if demander not in self.__storage[dependency]:
- fmt = "{} does not depend on {}"
- raise ValueError(fmt.format(demander.name, dependency.name))
- self.__storage[dependency][demander] = condition
-
-
- def remove(self, dependency, demander):
- """Remove a dependency between two modules."""
- dependency = self.__lookup(dependency)
- demander = self.__lookup(demander)
- if demander not in self.__storage[dependency]:
- fmt = "{} does not depend on {}"
- raise ValueError(fmt.format(demander.name, dependency.name))
- del self.__storage[dependency][demander]
-
-
- def reset(self):
- self.__storage = _collections.defaultdict(dict)
-
-
- def demanders(self, module, lookup=None):
+ demanders[demander][dependency] = condition
+ dependencies[dependency][demander] = condition
+ self.__demanders = dict(demanders)
+ self.__dependencies = dict(dependencies)
+
+
+ def conditional(self, module):
+ """
+ Test whether module is a conditional dependency.
+ Note that this check also takes all parent modules into account.
+ Any module with an unconditional demander is also unconditional.
+ """
+ module = self.__lookup(module)
+ if module not in self.__dependencies:
+ fmt = "dependency {} not found"
+ raise KeyError(fmt.format(module))
+ return module in self.__conditional
+
+
+ def unconditional(self, module):
+ """
+ Test whether module is an unconditional dependency.
+ Note that this check also takes all parent modules into account.
+ Any module with an unconditional demander is also unconditional.
+ """
+ return not self.conditional(module)
+
+
+ def demanders(self, module):
"""For each demander which requires the module yield the demander and condition."""
module = self.__lookup(module)
- entries = self.__storage[module]
- if not entries:
- fmt = "module {} is not found"
- raise KeyError(fmt.format(module.name))
- for (demander, condition) in entries.items():
+ if module not in self.__dependencies:
+ fmt = "dependency {} not found"
+ raise KeyError(fmt.format(module))
+ for (demander, condition) in self.__dependencies.get(module, {}).items():
yield (demander, condition)
- def dependencies(self, module, lookup=None):
+ def dependencies(self, module):
"""For each dependency of this module yield the dependency and the condition."""
- present = False
module = self.__lookup(module)
- for dependency in self.__storage:
- for (demander, condition) in self.__storage[dependency].items():
- if module == demander:
- present = True
- yield (dependency, condition)
- fmt = "module {} is not found"
- raise KeyError(fmt.format(module.name))
-
-
-
-def transitive_closure(lookup, modules, **options):
- """
- Perform a transitive closure, generating a set of module dependencies (database).
- Each iteration over the database yields a tuple of (module, demander, condition).
- At the same time return sets, which designate main, final and test modules respectively.
-
- If condition is None, but demander is not, there is no special condition for this module.
- If demander is None, the module is provided unconditionally (condition is always None).
-
- lookup must be a callable which obtains a pygnulib module by its name.
- modules is an iterable, yielding a module (either name or instance).
- options may be any combination of gnulib configuration options.
- """
- keywords = frozenset({
- "gnumake",
- "tests",
- "obsolete",
- "cxx_tests",
- "longrunning_tests",
- "privileged_tests",
- "unportable_tests",
- })
- if not callable(lookup):
- raise TypeError("lookup must be a callable")
- for (key, value) in options.items():
- if key not in keywords:
- return KeyError(key)
- _type_assert("option", value, bool)
- modules = set(lookup(module) for module in modules)
-
- def _exclude_(module):
- return any((
- (not options.get("obsolete", False) and module.obsolete),
- (not options.get("cxx_tests", False) and module.cxx_test),
- (not options.get("longrunning_tests", False) and module.longrunning_test),
- (not options.get("privileged_tests", False) and module.privileged_test),
- (not options.get("unportable_tests", False) and module.unportable_test),
- ))
-
- def _transitive_closure_(tests):
- queue = set()
- previous = set()
- current = set()
- for module in modules:
- current.add((module, None, None))
- while previous != current:
- previous.update(current)
- for (demander, _, _) in previous:
- if demander in queue:
- continue
- if tests and not demander.name.endswith("-tests"):
- try:
- module = lookup("{0}-tests".format(demander.name))
- if not _exclude_(module):
- current.add((module, None, None))
- except _UnknownModuleError:
- pass # ignore non-existent tests
- for (dependency, condition) in demander.dependencies:
- module = lookup(dependency)
- if options.get("gnumake", False) and condition.startswith("if "):
- # A module whose Makefile.am snippet contains a reference to an
- # automake conditional. If we were to use it conditionally, we
- # would get an error
- # configure: error: conditional "..." was never defined.
- # because automake 1.11.1 does not handle nested conditionals
- # correctly. As a workaround, make the module unconditional.
- current.add((module, None, None))
- else:
- condition = condition if condition.strip() else None
- if not _exclude_(module):
- current.add((module, demander, condition))
- queue.add(demander)
- return current
-
- db = Database(lookup=lookup)
- base = _transitive_closure_(False)
- full = _transitive_closure_(True)
- for (dependency, demander, condition) in (base | full):
- db.insert(dependency, demander, condition)
- main = {module for (module, _, _) in base}
- final = {module for (module, _, _) in full} if options.get("tests", False) else set(main)
- ignore = frozenset({"main"} if options.get("tests", False) else {"main", "all"})
- tests = (final - {module for module in main if module.applicability in ignore})
- return (db, main, final, tests)
-
-
-
-def libtests_required(modules):
- """Determine whether libtests.a is required."""
- for module in modules:
- for file in module.files:
- if file.startswith("lib/"):
- return True
- return False
+ if module not in self.__demanders:
+ fmt = "demander {} not found"
+ raise KeyError(fmt.format(module))
+ for (dependency, condition) in self.__demanders.get(module, {}).items():
+ yield (dependency, condition)
-_DUMMY_REQUIRED_PATTERN = _re.compile(r"^lib_SOURCES\s*\+\=\s*(.*?)$", _re.S | _re.M)
-def dummy_required(modules):
- """Determine whether dummy module is required."""
- for module in modules:
- snippet = module.conditional_automake_snippet
- for match in _DUMMY_REQUIRED_PATTERN.findall(snippet):
- files = {file.strip() for file in match.split("#", 1)[0].split(" ") if file.strip()}
- if {file for file in files if not file.endswith(".h")}:
+class Database:
+ """gnulib module database"""
+ __DUMMY_PATTERN = _re.compile(r"^lib_SOURCES\s*\+\=\s*(.*?)$", _re.S | _re.M)
+
+
+ def __init__(self, lookup, config):
+ if not callable(lookup):
+ raise TypeError("lookup: callable expected")
+ _type_assert("config", config, _BaseConfig)
+
+ def _applicability(module):
+ return module.applicability in ({"main", "all"}, {"main"})[config.tests]
+
+ def _dummy(modules):
+ if "dummy" in config.avoids:
return False
- return True
+ for module in modules:
+ snippet = module.conditional_automake_snippet
+ for match in Database.__DUMMY_PATTERN.findall(snippet):
+ files = {file.strip() for file in match.split("#", 1)[0].split(" ") if file.strip()}
+ if {file for file in files if not file.endswith(".h")}:
+ return False
+ return True
+
+ def _files(modules):
+ files = set()
+ for module in modules:
+ files.update(module.files)
+ files.add("m4/00gnulib.m4")
+ files.add("m4/gnulib-common.m4")
+ if config.ac_version == 2.59:
+ files.add("m4/onceonly.m4")
+ return files
+
+ def _libtests(modules):
+ for module in modules:
+ for file in module.files:
+ if file.startswith("lib/"):
+ return True
+ return False
+
+ base_table = TransitiveClosure(lookup, config, False)
+ full_table = TransitiveClosure(lookup, config, True)
+ main_modules = set(base_table)
+ explicit_modules = set()
+ for module in full_table:
+ if module.name in config.modules:
+ explicit_modules.add(module)
+ final_modules = set(full_table) if config.tests else main_modules
+ test_modules = (final_modules - set(filter(_applicability, main_modules)))
+ libtests = _libtests(test_modules)
+ if _dummy(main_modules):
+ main_modules.add(DummyModule)
+ if _dummy(test_modules) and libtests:
+ test_modules.add(DummyModule)
+ main_files = _files(main_modules)
+ test_files = set()
+ for file in _files(test_modules):
+ if file.startswith("lib/"):
+ file = ("lib=tests/" + file[len("lib/"):])
+ test_files.add(file)
+
+ self.__libtests = libtests
+ self.__base_table = base_table
+ self.__full_table = full_table
+ self.__main_modules = tuple(sorted(main_modules))
+ self.__test_modules = tuple(sorted(test_modules))
+ self.__final_modules = tuple(sorted(final_modules))
+ self.__explicit_modules = tuple(sorted(explicit_modules))
+ self.__main_files = tuple(sorted(main_files))
+ self.__test_files = tuple(sorted(test_files))
+
+
+ @property
+ def base_table(self):
+ """
+ The transitive closure table of the specified modules, ignoring tests modules.
+ An iteration yields a (dependency, demander, condition) on each step.
+ """
+ return self.__base_table
+
+
+ @property
+ def full_table(self):
+ """
+ The full transitive closure table of the specified modules.
+ An iteration yields a (dependency, demander, condition) on each step.
+ """
+ return self.__full_table
+
+
+ @property
+ def final_modules(self):
+ """
+ The final module list is the transitive closure of the specified modules,
+ including or ignoring tests modules (depending on the tests configuration
+ option).
+ """
+ return self.__final_modules
+
+
+ @property
+ def main_modules(self):
+ """
+ The main module list is the transitive closure of the specified modules,
+ ignoring tests modules. Its lib/* sources go into {source_base}. If --lgpl
+ is specified, it will consist only of LGPLed source.
+ """
+ return self.__main_modules
+ @property
+ def main_files(self):
+ """The full set of the files required for modules in the main modules list."""
+ return self.__main_files
-def filelist(modules, ac_version):
- """Determine the final file list."""
- files = set()
- for module in modules:
- files.update(module.files)
- files.add("m4/00gnulib.m4")
- files.add("m4/gnulib-common.m4")
- if ac_version == 2.59:
- files.add("m4/onceonly.m4")
- return files
+
+ @property
+ def test_modules(self):
+ """
+ The tests-related module list is the transitive closure of the specified
+ modules, including tests modules, minus the main module list excluding
+ modules of applicability 'all'. Its lib/* sources (brought in through
+ dependencies of *-tests modules) go into {tests_base}. It may contain GPLed
+ source, even if --lgpl is specified.
+ """
+ return self.__test_modules
+
+
+ @property
+ def explicit_modules(self):
+ """
+ The list of modules which were explicitly required to be imported.
+ This list does not include direct or indirect dependencies at all.
+ """
+ return self.__explicit_modules
+
+
+ @property
+ def test_files(self):
+ """The full set of the files required for modules in the test modules list."""
+ return self.__test_files
+
+
+ @property
+ def libtests(self):
+ """If libtests.a is required, this variable yields true."""
+ return self.__libtests
"time and object code)",
),
"action": _TrueOption,
- "dest": "conddeps",
+ "dest": "conditionals",
}),
(["--no-conditional-dependencies"], {
"help": (
"don't use conditional dependencies",
),
"action": _FalseOption,
- "dest": "conddeps",
+ "dest": "conditionals",
}),
(["--libtool"], {
"help": (
from .error import type_assert as _type_assert
from .error import UnknownModuleError as _UnknownModuleError
-from .module import Base as _BaseModule
-from .module import File as _FileModule
+from .module import BaseModule as _BaseModule
+from .module import FileModule as _FileModule
-class Base:
+class BaseVFS:
"""gnulib generic virtual file system"""
def __init__(self, prefix, **table):
_type_assert("prefix", prefix, str)
@property
def relative(self):
- """base VFS name"""
+ """BaseVFS VFS name"""
return self.__prefix
NOTE: It is up to the caller to unlink files obtained after dynamic patching.
"""
- _type_assert("primary", primary, Base)
- _type_assert("secondary", secondary, Base)
+ _type_assert("primary", primary, BaseVFS)
+ _type_assert("secondary", secondary, BaseVFS)
if name in secondary:
return (secondary, name)
diff = "{}.diff".format(name)
def mkdir(root, name):
"""Create a leaf directory and all intermediate ones recursively."""
- root = Base(".") if root is None else root
+ root = BaseVFS(".") if root is None else root
path = name if _os.path.isabs(name) else _os.path.join(root.absolute, root[name])
_os.makedirs(root[name], exist_ok=True)
def backup(root, name):
"""Backup the given file."""
- root = Base(".") if root is None else root
+ root = BaseVFS(".") if root is None else root
original_path = _os.path.join(root.absolute, root[name])
backup_path = "{}~".format(original_path)
try:
def compare(lhs_root, lhs_name, rhs_root, rhs_name):
"""Compare the given files; return True if files contain the same data."""
- lhs_root = Base(".") if lhs_root is None else lhs_root
- rhs_root = Base(".") if rhs_root is None else rhs_root
+ lhs_root = BaseVFS(".") if lhs_root is None else lhs_root
+ rhs_root = BaseVFS(".") if rhs_root is None else rhs_root
(lhs_path, rhs_path) = (lhs_name, rhs_name)
if not _os.path.isabs(lhs_name):
lhs_path = _os.path.join(lhs_root.absolute, lhs_root[lhs_name])
if src_abs and dst_abs:
raise ValueError("absolute src and dst")
limit = (16 * 1024)
- src_root = Base(".") if src_root is None else src_root
- dst_root = Base(".") if dst_root is None else dst_root
+ src_root = BaseVFS(".") if src_root is None else src_root
+ dst_root = BaseVFS(".") if dst_root is None else dst_root
mkdir(dst_root, _os.path.dirname(dst_name))
(src_path, dst_path) = (src_name, dst_name)
if not _os.path.isabs(src_name):
def exists(root, name):
"""Check whether the given file exists."""
- root = Base(".") if root is None else root
+ root = BaseVFS(".") if root is None else root
path = name if _os.path.isabs(name) else _os.path.join(root.absolute, root[name])
return _os.path.exists(path)
dst_abs = _os.path.isabs(dst_name)
if src_abs and dst_abs:
raise ValueError("absolute src and dst")
- src_root = Base(".") if src_root is None else src_root
- dst_root = Base(".") if dst_root is None else dst_root
+ src_root = BaseVFS(".") if src_root is None else src_root
+ dst_root = BaseVFS(".") if dst_root is None else dst_root
mkdir(src_root, _os.path.dirname(src_name))
mkdir(dst_root, _os.path.dirname(dst_name))
(src_path, dst_path) = (src_name, dst_name)
dst_abs = _os.path.isabs(dst_name)
if src_abs and dst_abs:
raise ValueError("absolute src and dst")
- src_root = Base(".") if src_root is None else src_root
- dst_root = Base(".") if dst_root is None else dst_root
+ src_root = BaseVFS(".") if src_root is None else src_root
+ dst_root = BaseVFS(".") if dst_root is None else dst_root
mkdir(dst_root, _os.path.dirname(dst_name))
(src_path, dst_path) = (src_name, dst_name)
if not _os.path.isabs(src_name):
def iostream(root, name, mode="r", encoding=None):
"""Open file and return a stream. Raise IOError upon failure."""
- root = Base(".") if root is None else root
+ root = BaseVFS(".") if root is None else root
path = name if _os.path.isabs(name) else _os.path.join(root.absolute, root[name])
return _codecs.open(path, mode, encoding)
def readlink(root, name):
"""Obtain the path to which the symbolic link points."""
- root = Base(".") if root is None else root
+ root = BaseVFS(".") if root is None else root
mkdir(root, _os.path.dirname(name))
path = name if _os.path.isabs(name) else _os.path.join(root.absolute, root[name])
return _os.readlink(path)
dst_abs = _os.path.isabs(dst_name)
if src_abs and dst_abs:
raise ValueError("absolute src and dst")
- src_root = Base(".") if src_root is None else src_root
- dst_root = Base(".") if dst_root is None else dst_root
+ src_root = BaseVFS(".") if src_root is None else src_root
+ dst_root = BaseVFS(".") if dst_root is None else dst_root
mkdir(dst_root, _os.path.dirname(dst_name))
if not relative:
(src_path, dst_path) = (src_name, dst_name)
def unlink(root, name, backup=True):
"""Unlink a file, backing it up if necessary."""
- root = Base(".") if root is None else root
+ root = BaseVFS(".") if root is None else root
mkdir(root, _os.path.dirname(name))
path = name if _os.path.isabs(name) else _os.path.join(root.absolute, root[name])
_os.unlink(path)
-class GnulibGit(Base):
+class GnulibGitVFS(BaseVFS):
"""gnulib git repository"""
_EXCLUDE = {
"." : str.startswith,
"""obtain gnulib module by name"""
_type_assert("name", name, str)
_type_assert("full", full, bool)
- if name in GnulibGit._EXCLUDE:
+ if name in GnulibGitVFS._EXCLUDE:
raise ValueError("illegal module name")
path = _os.path.join(self.absolute, self["modules"], name)
try:
names = []
for name in files:
exclude = False
- for (key, method) in GnulibGit._EXCLUDE.items():
+ for (key, method) in GnulibGitVFS._EXCLUDE.items():
if method(name, key):
exclude = True
break