diff --git a/README.rst b/README.rst index 379d9a2..f1f53b4 100644 --- a/README.rst +++ b/README.rst @@ -167,7 +167,8 @@ Note: - external projects will be copied/downloaded into "build/targetname/external_sources" - you can specify a subdirectory, if the thirdparty code has an unusual structure -- further granularity is given by ``include_directories`` and ``source_directories`` +- further granularity is given by ``include_directories`` and ``sources`` +- `sources`, `headers_exclude` and `sources_exclude` expect a list of globbing patterns or files (not folders!) .. code:: toml @@ -175,9 +176,8 @@ Note: url = "/~https://github.com/trick-17/mylib" version = 1.1 # will try to `git checkout 1.1` directory = "sources" # will point to "build/mylib/external_sources/sources" - [mylib.sources] include_directories = ["mylib/include"] # will point to "build/mylib/external_sources/sources/mylib/include" - source_directories = ["mylib/src"] # will point to "build/mylib/external_sources/sources/mylib/src" + sources = ["mylib/src/*"] # will list everything inside "build/mylib/external_sources/sources/mylib/src" # Maybe we need to deactivate annoying warnings coming from the library [mylib.flags] compile = ["-Wno-deprecated-declarations", "-Wno-self-assign"] diff --git a/clang_build/clang_build.py b/clang_build/clang_build.py index 4ca129d..06598f9 100644 --- a/clang_build/clang_build.py +++ b/clang_build/clang_build.py @@ -21,8 +21,7 @@ SharedLibrary as _SharedLibrary,\ StaticLibrary as _StaticLibrary,\ HeaderOnly as _HeaderOnly -from .dependency_tools import find_circular_dependencies as _find_circular_dependencies,\ - find_non_existent_dependencies as _find_non_existent_dependencies,\ +from .dependency_tools import find_non_existent_dependencies as _find_non_existent_dependencies,\ get_dependency_walk as _get_dependency_walk from .io_tools import get_sources_and_headers as _get_sources_and_headers from .progress_bar import CategoryProgress as _CategoryProgress,\ @@ -70,18 +69,42 @@ def parse_args(args): parser.add_argument('-V', '--verbose', help='activate more detailed output', action='store_true') - parser.add_argument('-p', '--progress', help='activates a progress bar output. is overruled by -V and --debug', action='store_true') - parser.add_argument('-d', '--directory', type=_Path, + parser.add_argument('-p', '--progress', + help='activates a progress bar output', + action='store_true') + parser.add_argument('-d', '--directory', + type=_Path, help='set the root source directory') - parser.add_argument('-b', '--build-type', choices=list(_BuildType), type=_BuildType, default=_BuildType.Default, + parser.add_argument('-b', '--build-type', + choices=list(_BuildType), + type=_BuildType, + default=_BuildType.Default, help='set the build type for this project') - parser.add_argument('-j', '--jobs', type=int, default=1, + parser.add_argument('-a', '--all', + help='build every target, irrespective of whether any root target depends on it', + action='store_true') + parser.add_argument('-t', '--targets', + type=str, + default="", + help='only these targets and their dependencies should be built (comma-separated list)') + parser.add_argument('-f', '--force-rebuild', + help='whether the targets should be rebuilt', + action='store_true') + parser.add_argument('-j', '--jobs', + type=int, + default=1, help='set the number of concurrent build jobs') - parser.add_argument('--debug', help='activates additional debug output, overrides verbosity option.', action='store_true') + parser.add_argument('--debug', + help='activates additional debug output, overrides verbosity option.', + action='store_true') + parser.add_argument('--no-graph', + help='deactivates output of a dependency graph dotfile', + action='store_true') return parser.parse_args(args=args) def _find_clang(logger): + clang = _which('clang') clangpp = _which('clang++') clang_ar = _which('llvm-ar') if clangpp: @@ -94,64 +117,70 @@ def _find_clang(logger): error_message = 'Couldn\'t find llvm-ar executable' logger.error(error_message) raise RuntimeError(error_message) + if not clang: + error_message = 'Couldn\'t find clang executable' + logger.error(error_message) + raise RuntimeError(error_message) logger.info(f'llvm root directory: {llvm_root}') - logger.info(f'clang++ executable: {clangpp}') - logger.info(f'llvm-ar executable: {clang_ar}') + logger.info(f'clang executable: {clang}') + logger.info(f'clang++ executable: {clangpp}') + logger.info(f'llvm-ar executable: {clang_ar}') logger.info(f'Newest supported C++ dialect: {_get_max_supported_compiler_dialect(clangpp)}') - return clangpp, clang_ar + return clang, clangpp, clang_ar class _Environment: def __init__(self, args): # Some defaults - self.logger = None - self.progress_disabled = True - self.buildType = None - self.clangpp = "clang++" - self.clang_ar = "llvm-ar" + self.logger = None + self.buildType = None + self.clang = "clang" + self.clangpp = "clang++" + self.clang_ar = "llvm-ar" # Directory this was called from self.calling_directory = _Path().resolve() # Working directory is where the project root should be - this is searched for 'clang-build.toml' self.working_directory = self.calling_directory - # Verbosity - if not args.debug: - if args.verbose: - _setup_logger(_logging.INFO) - else: - # Only file log - _setup_logger(None) - else: - _setup_logger(_logging.DEBUG) - - # Progress bar - if args.progress: - self.progress_disabled = False - self.logger = _logging.getLogger(__name__) self.logger.info(f'clang-build {__version__}') # Check for clang++ executable - self.clangpp, self.clang_ar = _find_clang(self.logger) + self.clang, self.clangpp, self.clang_ar = _find_clang(self.logger) # Working directory if args.directory: self.working_directory = args.directory.resolve() if not self.working_directory.exists(): - error_message = f'ERROR: specified non-existent directory [{self.working_directory}]' + error_message = f'ERROR: specified non-existent directory \'{self.working_directory}\'' self.logger.error(error_message) raise RuntimeError(error_message) - self.logger.info(f'Working directory: {self.working_directory}') + self.logger.info(f'Working directory: \'{self.working_directory}\'') # Build type (Default, Release, Debug) self.buildType = args.build_type self.logger.info(f'Build type: {self.buildType.name}') + # Whether to build all targets + self.build_all = True if args.all else False + + # List of targets which should be built + self.target_list = [] + if args.targets: + if args.all: + error_message = f'ERROR: specified target list \'{args.targets}\', but also flag \'--all\'' + self.logger.error(error_message) + raise RuntimeError(error_message) + self.target_list = [str(target) for target in args.targets.split(',')] + + # Whether to force a rebuild + self.force_rebuild = True if args.force_rebuild else False + # Multiprocessing pool self.processpool = _Pool(processes = args.jobs) self.logger.info(f'Running up to {args.jobs} concurrent build jobs') @@ -159,6 +188,11 @@ def __init__(self, args): # Build directory self.build_directory = _Path('build') + # Progress bar + self.progress_disabled = False if args.progress else True + + # Whether to create a dotfile for graphing dependencies + self.create_dependency_dotfile = False if args.no_graph else True def build(args): @@ -173,7 +207,7 @@ def build(args): # Check for build configuration toml file toml_file = _Path(environment.working_directory, 'clang-build.toml') if toml_file.exists(): - logger.info('Found config file') + logger.info(f'Found config file: \'{toml_file}\'') # Parse config file config = toml.load(str(toml_file)) @@ -187,10 +221,10 @@ def build(args): multiple_projects = True # Create root project - project = _Project(config, environment, multiple_projects) + root_project = _Project(config, environment, multiple_projects, True) # Get list of all targets - target_list += project.get_targets() + target_list += root_project.get_targets(root_project.target_dont_build_list) # # Generate list of all targets # for project in working_projects: @@ -201,12 +235,13 @@ def build(args): files = _get_sources_and_headers({}, environment.working_directory, environment.build_directory) if not files['sourcefiles']: - error_message = f'Error, no sources and no [clang-build.toml] found in folder: {environment.working_directory}' + error_message = f'Error, no sources and no \'clang-build.toml\' found in folder \'{environment.working_directory}\'' logger.error(error_message) raise RuntimeError(error_message) # Create target target_list.append( _Executable( + '', 'main', environment.working_directory, environment.build_directory.joinpath(environment.buildType.name.lower()), @@ -214,6 +249,7 @@ def build(args): files['include_directories'], files['sourcefiles'], environment.buildType, + environment.clang, environment.clangpp)) # Build the targets @@ -236,7 +272,7 @@ def build(args): for target in target_list: if target.__class__ is not _HeaderOnly: if target.unsuccessful_builds: - errors[target.name] = [source.compile_report for source in target.unsuccessful_builds] + errors[target.full_name] = [source.compile_report for source in target.unsuccessful_builds] if errors: raise _CompileError('Compilation was unsuccessful', errors) @@ -251,7 +287,7 @@ def build(args): for target in target_list: if target.__class__ is not _HeaderOnly: if target.unsuccessful_link: - errors[target.name] = target.link_report + errors[target.full_name] = target.link_report if errors: raise _LinkError('Linking was unsuccessful', errors) @@ -260,26 +296,39 @@ def build(args): -def main(): +def _main(): # Build try: - build(parse_args(sys.argv[1:])) + args = parse_args(sys.argv[1:]) + + # Logger verbosity + if not args.debug: + if args.verbose: + _setup_logger(_logging.INFO) + else: + # Only file log + _setup_logger(None) + else: + _setup_logger(_logging.DEBUG) + + build(args) + except _CompileError as compile_error: logger = _logging.getLogger(__name__) logger.error('Compilation was unsuccessful:') for target, errors in compile_error.error_dict.items(): - printout = f'Target [{target}] did not compile. Errors:\n' + printout = f'[{target}]: target did not compile. Errors:\n' printout += ' '.join(errors) logger.error(printout) except _LinkError as link_error: logger = _logging.getLogger(__name__) logger.error('Linking was unsuccessful:') for target, errors in link_error.error_dict.items(): - printout = f'Target [{target}] did not link. Errors:\n{errors}' + printout = f'[{target}]: target did not link. Errors:\n{errors}' logger.error(printout) if __name__ == '__main__': _freeze_support() - main() + _main() diff --git a/clang_build/dependency_tools.py b/clang_build/dependency_tools.py index d2db8fa..cd19a72 100644 --- a/clang_build/dependency_tools.py +++ b/clang_build/dependency_tools.py @@ -39,4 +39,19 @@ def get_dependency_walk(project): subnames = str(dependency).split(".") graph.add_edge(str(nodename), str(subnames[-1])) - return list(reversed(list(_nx.topological_sort(graph)))) \ No newline at end of file + return list(reversed(list(_nx.topological_sort(graph)))) + +def get_dependency_graph(project): + graph = _nx.DiGraph() + for nodename, node in project.items(): + dependencies = node.get('dependencies', []) + if not dependencies: + graph.add_node(str(nodename)) + continue + + for dependency in dependencies: + # Split string at dots + subnames = str(dependency).split(".") + graph.add_edge(str(nodename), str(subnames[-1])) + + return graph \ No newline at end of file diff --git a/clang_build/io_tools.py b/clang_build/io_tools.py index fa435f8..590032c 100644 --- a/clang_build/io_tools.py +++ b/clang_build/io_tools.py @@ -1,63 +1,50 @@ from glob import iglob as _iglob from pathlib import Path as _Path -def _get_header_files(folder, recursive=True): - headers = [] - for ext in ('*.hpp', '*.hxx', '*.h'): - headers += [_Path(f) for f in _iglob(str(folder) + '/**/'+ext, recursive=recursive)] - - return headers - -def _get_source_files(folder, recursive=True): - sources = [] - if recursive: - for ext in ('*.cpp', '*.cxx', '*.c'): - sources += [_Path(f) for f in _iglob(str(folder) + '/**/'+ext, recursive=recursive)] - else: - for ext in ('*.cpp', '*.cxx', '*.c'): - sources += [_Path(f) for f in _iglob(str(folder) + '/*'+ext, recursive=recursive)] +def _get_header_files_in_folders(folders, exclude_patterns=[], recursive=True): + delimiter = '/**/' if recursive else '/*' + patterns = [str(folder) + delimiter + ext for ext in ('*.hpp', '*.hxx', '*.h') for folder in folders] + return _get_files_in_patterns(patterns) + +def _get_source_files_in_folders(folders, exclude_patterns=[], recursive=True): + delimiter = '/**/' if recursive else '/*' + patterns = [str(folder) + delimiter + ext for ext in ('*.cpp', '*.cxx', '*.c') for folder in folders] + return _get_files_in_patterns(patterns) - return sources +def _get_files_in_patterns(patterns, exclude_patterns=[], recursive=True): + included = [_Path(f) for pattern in patterns for f in _iglob(str(pattern), recursive=recursive) if _Path(f).is_file()] + excluded = [_Path(f) for pattern in exclude_patterns for f in _iglob(str(pattern), recursive=recursive) if _Path(f).is_file()] + return list(set(included) - set(excluded)) def get_sources_and_headers(target_options, target_root_directory, target_build_directory): output = {'headers': [], 'include_directories': [], 'sourcefiles': []} - relative_includes = [] - relative_source_directories = [] # TODO: maybe the output should also include the root dir, build dir and potentially download dir? # TODO: should warn when a specified directory does not exist! - # Find source files - headers_specified = False - if 'sources' in target_options: - sourcenode = target_options['sources'] - if 'include_directories' in sourcenode: - headers_specified = True - output['include_directories'] += list(set(target_root_directory.joinpath(path) for path in sourcenode['include_directories'])) - for directory in output['include_directories']: - output['headers'] += _get_header_files(directory, recursive=True) - if not headers_specified: + # Find header files + exclude_patterns = list(set( [target_root_directory.joinpath(path) for path in target_options.get('headers_exclude', [])] )) + if 'include_directories' in target_options: + output['include_directories'] += list(set(target_root_directory.joinpath(path) for path in target_options['include_directories'] )) + output['headers'] += _get_header_files_in_folders(output['include_directories'], exclude_patterns=exclude_patterns, recursive=True) + else: output['include_directories'] += [target_root_directory.joinpath(''), target_root_directory.joinpath('include'), target_root_directory.joinpath('thirdparty')] - for directory in output['include_directories']: - output['headers'] += _get_header_files(target_root_directory.joinpath(directory), recursive=False) - - - - sources_specified = False - if 'sources' in target_options: - sourcenode = target_options['sources'] - if 'source_directories' in sourcenode: - sources_specified = True - for directory in list(set(target_root_directory.joinpath(path) for path in sourcenode['source_directories'])): - output['sourcefiles'] += _get_source_files(directory, recursive=True) - if not sources_specified: - output['sourcefiles'] += _get_source_files(target_root_directory.joinpath('src'), recursive=True) + output['headers'] += _get_header_files_in_folders(output['include_directories'], exclude_patterns=exclude_patterns, recursive=False) + + # Find source files from patterns + exclude_patterns = list(set( [target_root_directory.joinpath(path) for path in target_options.get('sources_exclude', [])] )) + sources_patterns = list(set( [target_root_directory.joinpath(path) for path in target_options.get('sources', [])] )) + output['sourcefiles'] += _get_files_in_patterns(sources_patterns, exclude_patterns=exclude_patterns, recursive=True) + # Else find source files from src folder + if not sources_patterns: + output['sourcefiles'] += _get_source_files_in_folders([target_root_directory.joinpath('src')], exclude_patterns=exclude_patterns, recursive=True) + # Search the root folder as last resort if not output['sourcefiles']: - output['sourcefiles'] += _get_source_files(target_root_directory, recursive=False) - + output['sourcefiles'] += _get_source_files_in_folders([target_root_directory], exclude_patterns=exclude_patterns, recursive=False) - output['include_directories'] = list(set(output['include_directories'])) - output['headers'] = list(set(output['headers'])) - output['sourcefiles'] = list(set(output['sourcefiles'])) + # Fill return dict + output['include_directories'] = list(set( output['include_directories'] )) + output['headers'] = list(set( output['headers'] )) + output['sourcefiles'] = list(set( output['sourcefiles'] )) return output \ No newline at end of file diff --git a/clang_build/project.py b/clang_build/project.py index 0fdcab3..92f6091 100644 --- a/clang_build/project.py +++ b/clang_build/project.py @@ -20,7 +20,8 @@ HeaderOnly as _HeaderOnly from .dependency_tools import find_circular_dependencies as _find_circular_dependencies,\ find_non_existent_dependencies as _find_non_existent_dependencies,\ - get_dependency_walk as _get_dependency_walk + get_dependency_walk as _get_dependency_walk,\ + get_dependency_graph as _get_dependency_graph from .io_tools import get_sources_and_headers as _get_sources_and_headers from .progress_bar import CategoryProgress as _CategoryProgress,\ IteratorProgress as _IteratorProgress @@ -31,11 +32,33 @@ class Project: - def __init__(self, config, environment, multiple_projects): + def __init__(self, config, environment, multiple_projects, is_root_project, parent_name=""): + + self.working_directory = environment.working_directory + self.is_root_project = is_root_project self.name = config.get("name", "") - self.working_directory = environment.working_directory + if "directory" in config: + self.working_directory = environment.working_directory.joinpath(config["directory"]) + toml_file = _Path(self.working_directory, 'clang-build.toml') + if toml_file.exists(): + environment.logger.info(f'Found config file {toml_file}') + config = toml.load(str(toml_file)) + else: + error_message = f"Project [[{self.name}]]: could not find project file in directory {self.working_directory}" + _LOGGER.exception(error_message) + raise RuntimeError(error_message) + + # Re-fetch name if name not specified previously and config was changed + if not self.name: + self.name = config.get("name", "") + + # If this is not the root project, it needs to have a name + if not is_root_project and not self.name: + error_message = f"Subproject name was not specified in the parent project [[{parent_name}]], nor it's config file." + _LOGGER.exception(error_message) + raise RuntimeError(error_message) # Project build directory self.build_directory = environment.build_directory @@ -47,33 +70,31 @@ def __init__(self, config, environment, multiple_projects): download_directory = self.build_directory.joinpath('external_sources') # Check if directory is already present and non-empty if download_directory.exists() and _os.listdir(str(download_directory)): - _LOGGER.info(f'External project [[{self.name}]]: sources found in {str(download_directory)}') + _LOGGER.info(f'[[{self.name}]]: external project sources found in \'{str(download_directory)}\'') # Otherwise we download the sources else: - _LOGGER.info(f'External project [[{self.name}]]: downloading to {str(download_directory)}') + _LOGGER.info(f'[[{self.name}]]: downloading external project to \'{str(download_directory)}\'') download_directory.mkdir(parents=True, exist_ok=True) try: _subprocess.run(["git", "clone", config["url"], str(download_directory)], stdout=_subprocess.PIPE, stderr=_subprocess.PIPE, encoding='utf-8') except _subprocess.CalledProcessError as e: - error_message = f"Error trying to download external project [[{self.name}]]. Message " + e.output + error_message = f"[[{self.name}]]: error trying to download external project. Message " + e.output _LOGGER.exception(error_message) raise RuntimeError(error_message) - _LOGGER.info(f'External project [[{self.name}]]: downloaded') + _LOGGER.info(f'[[{self.name}]]: external project downloaded') self.working_directory = download_directory - if "directory" in config: - self.working_directory = environment.working_directory.joinpath(config["directory"]) - toml_file = _Path(self.working_directory, 'clang-build.toml') - if toml_file.exists(): - environment.logger.info(f'Found config file {toml_file}') - config = toml.load(str(toml_file)) - else: - error_message = f"Project {self.name}: could not find project file in directory {self.working_directory}" - _LOGGER.exception(error_message) - raise RuntimeError(error_message) + if "version" in config: + version = config["version"] + try: + _subprocess.run(["git", "checkout", version], cwd=download_directory, stdout=_subprocess.PIPE, stderr=_subprocess.PIPE, encoding='utf-8') + except _subprocess.CalledProcessError as e: + error_message = f"[{target_name_full}]: error trying to checkout version \'{version}\' from url \'{url}\'. Message " + e.output + _LOGGER.exception(error_message) + raise RuntimeError(error_message) # Get subset of config which contains targets not associated to any project name - self.targets_config = {key: val for key, val in config.items() if not key == "subproject" and not key == "name"} + self.targets_config = {key: val for key, val in config.items() if not key in ["subproject", "name", "url", "version"]} # Get subsets of config which define projects self.subprojects_config = {key: val for key, val in config.items() if key == "subproject"} @@ -81,68 +102,114 @@ def __init__(self, config, environment, multiple_projects): # An "anonymous" project, i.e. project-less targets, is not allowed together with subprojects if self.targets_config and self.subprojects_config: if not self.name: - error_message = f"Project {self.name}: Your config file specified one or more projects. In this case you are not allowed to specify targets which do not belong to a project." + error_message = f"[[{self.name}]]: the config file specifies one or more projects. In this case it is not allowed to specify targets which do not belong to a project." _LOGGER.exception(error_message) raise RuntimeError(error_message) - # Generate Projects - subprojects = [] - # if targets_config: - # subprojects += [Project(targets_config, environment, multiple_projects)] - + # Generate subprojects of this project + self.subprojects = [] if self.subprojects_config: - subprojects += [Project(config, environment, multiple_projects) for config in self.subprojects_config["subproject"]] - - self.subprojects = subprojects - self.subproject_names = [project.name if project.name else "anonymous" for project in subprojects] + self.subprojects += [Project(config, environment, multiple_projects, False, self.name) for config in self.subprojects_config["subproject"]] + self.subproject_names = [project.name if project.name else "anonymous" for project in self.subprojects] # Use sub-build directories if the project contains multiple targets multiple_targets = False if len(self.targets_config.items()) > 1: multiple_targets = True + # TODO: document why this is here if not self.targets_config: return - targets_and_subprojects = self.targets_config.copy() - for project in subprojects: - targets_and_subprojects[project.name] = project.targets_config - - targets_and_subproject_targets = self.targets_config.copy() - for project in subprojects: - targets_and_subproject_targets.update(project.targets_config) - - # Parse targets from toml file - non_existent_dependencies = _find_non_existent_dependencies(targets_and_subprojects) - if non_existent_dependencies: - error_messages = [f'In {target}: the dependency {dependency} does not point to a valid target' for\ - target, dependency in non_existent_dependencies] - - error_message = _textwrap.indent('\n'.join(error_messages), prefix=' '*3) - _LOGGER.exception(error_message) - raise RuntimeError(error_message) - + # Check this project's targets for circular dependencies circular_dependencies = _find_circular_dependencies(self.targets_config) if circular_dependencies: - error_messages = [f'In {target}: circular dependency -> {dependency}' for\ + error_messages = [f'Circular dependency [{target}] -> [{dependency}]' for\ target, dependency in circular_dependencies] - error_message = _textwrap.indent('\n'.join(error_messages), prefix=' '*3) + error_message = f"[[{self.name}]]:\n" + _textwrap.indent('\n'.join(error_messages), prefix=' '*3) _LOGGER.exception(error_message) raise RuntimeError(error_message) + # Create a structured dict of target and subproject configs for this project to resolve all dependencies + targets_and_subprojects_config = self.targets_config.copy() + for project in self.subprojects: + targets_and_subprojects_config[project.name] = project.targets_config - target_names_total = _get_dependency_walk(targets_and_subproject_targets) - target_names_project = [] - for name in target_names_total: - if name in self.targets_config: - target_names_project.append(name) - - self.target_list = [] + non_existent_dependencies = _find_non_existent_dependencies(targets_and_subprojects_config) + if non_existent_dependencies: + error_messages = [f'[[{self.name}]].[{target}]: the dependency [{dependency}] does not point to a valid target of this project or it\'s subprojects' for\ + target, dependency in non_existent_dependencies] + error_message = f"[[{self.name}]]:\n" + _textwrap.indent('\n'.join(error_messages), prefix=' '*3) + _LOGGER.exception(error_message) + raise RuntimeError(error_message) - for target_name in _IteratorProgress(target_names_project, environment.progress_disabled, len(target_names_project)): - target_node = targets_and_subproject_targets[target_name] + # Create a dict of all target configs for this project and its subprojects + # TODO: this approach has the problem that two subprojects cannot have a target with the same name! + targets_and_subproject_targets_config = self.targets_config.copy() + for project in self.subprojects: + targets_and_subproject_targets_config.update(project.targets_config) + + # Unless all should be built, don't build targets which are not in the root project + # or a dependency of a target of the root project + self.target_dont_build_list = [] + if is_root_project and not environment.build_all: + + import networkx as nx + G = _get_dependency_graph(targets_and_subproject_targets_config) + + # Root targets (i.e. targets of the root project), + # or the specified projects will be retained + base_set = set(self.targets_config) + if environment.target_list: + _LOGGER.info(f'Only building targets [{"], [".join(environment.target_list)}] out of base set of targets [{"], [".join(base_set)}].') + for target in self.targets_config: + if target not in environment.target_list: + base_set -= {target} + + # Descendants will be retained, too + self.target_dont_build_list = set(targets_and_subproject_targets_config) + for root_target in base_set: + self.target_dont_build_list -= {root_target} + self.target_dont_build_list -= nx.algorithms.dag.descendants(G, root_target) + self.target_dont_build_list = list(self.target_dont_build_list) + + if self.target_dont_build_list: + _LOGGER.info(f'Not building target(s) [{"], [".join(self.target_dont_build_list)}].') + + elif is_root_project: + _LOGGER.info(f'Building all targets!') + + + # Create a dotfile of the dependency graph + if is_root_project and environment.create_dependency_dotfile: + create_dotfile = False + try: + import pydot + create_dotfile = True + except: + _LOGGER.error(f'Could not create dependency dotfile, as pydot is not installed') + + if create_dotfile: + import networkx as nx + G = _get_dependency_graph(targets_and_subproject_targets_config) + + # Color the targets which should be built in red + for d in set(targets_and_subproject_targets_config) - set(self.target_dont_build_list): + G.node[d]['color'] = 'red' + + _Path(environment.build_directory).mkdir(parents=True, exist_ok=True) + nx.drawing.nx_pydot.write_dot(G, str(_Path(environment.build_directory, 'dependencies.dot'))) + + # Generate a correctly ordered list of target names + target_names_ordered = [name for name in _get_dependency_walk(targets_and_subproject_targets_config) if name in self.targets_config] + + # Generate the list of target instances + self.target_list = [] + for target_name in _IteratorProgress(target_names_ordered, environment.progress_disabled, len(target_names_ordered)): + target_name_full = f'{self.name}.{target_name}' if self.name else target_name + target_node = targets_and_subproject_targets_config[target_name] # Directories target_build_directory = self.build_directory if not multiple_targets else self.build_directory.joinpath(target_name) target_root_directory = self.working_directory @@ -151,21 +218,22 @@ def __init__(self, config, environment, multiple_projects): ### TODO: external sources should be fetched before any sources are read in, i.e. even before the first target is created external = "url" in target_node if external: + url = target_node["url"] download_directory = target_build_directory.joinpath('external_sources') # Check if directory is already present and non-empty if download_directory.exists() and _os.listdir(str(download_directory)): - _LOGGER.info(f'External target [{target_name}]: sources found in {str(download_directory)}') + _LOGGER.info(f'[{target_name_full}]: external target sources found in {str(download_directory)}') # Otherwise we download the sources else: - _LOGGER.info(f'External target [{target_name}]: downloading to {str(download_directory)}') + _LOGGER.info(f'[{target_name_full}]: downloading external target to {str(download_directory)}') download_directory.mkdir(parents=True, exist_ok=True) try: - _subprocess.run(["git", "clone", target_node["url"], str(download_directory)], stdout=_subprocess.PIPE, stderr=_subprocess.PIPE, encoding='utf-8') + _subprocess.run(["git", "clone", url, str(download_directory)], stdout=_subprocess.PIPE, stderr=_subprocess.PIPE, encoding='utf-8') except _subprocess.CalledProcessError as e: - error_message = f"Error trying to download external target [{target_name}]. Message " + e.output + error_message = f"[{target_name_full}]: error trying to download external target. " + e.output _LOGGER.exception(error_message) raise RuntimeError(error_message) - _LOGGER.info(f'External target [{target_name}]: downloaded') + _LOGGER.info(f'[{target_name_full}]: external target downloaded') # self.includeDirectories.append(download_directory) target_root_directory = download_directory @@ -174,7 +242,7 @@ def __init__(self, config, environment, multiple_projects): try: _subprocess.run(["git", "checkout", version], cwd=target_root_directory, stdout=_subprocess.PIPE, stderr=_subprocess.PIPE, encoding='utf-8') except _subprocess.CalledProcessError as e: - error_message = f"Error trying to checkout target [{target_name}] version \'{version}\'. Message " + e.output + error_message = f"[{target_name_full}]: error trying to checkout version \'{version}\' from url \'{url}\'. Message " + e.output _LOGGER.exception(error_message) raise RuntimeError(error_message) @@ -193,23 +261,25 @@ def __init__(self, config, environment, multiple_projects): subnames = name.split(".") if subnames[0] in self.subproject_names: idx = self.subproject_names.index(subnames[0]) - # TODO: so far, we are only going one layer deep... this is not enough + # TODO: so far, we are only going one layer deep... this is not enough! + # Also, this should probably be done differently... subproject = self.subprojects[idx] - for i in range(1, len(subnames)): - subproject = self.subprojects[i-1] - for target in subproject.target_list: - if subnames[-1] == target.name: - dependencies.append(target) - i = len(subnames) - if name in target_names_project: - dependencies.append(self.target_list[target_names_project.index(name)]) - + for target in subproject.target_list: + if subnames[-1] == target.name: + dependencies.append(target) + i = len(subnames) + if name in target_names_ordered: + dependencies.append(self.target_list[target_names_ordered.index(name)]) + + # Make sure all dependencies are actually libraries executable_dependencies = [target for target in dependencies if target.__class__ is _Executable] - if executable_dependencies: exelist = ', '.join([f'[{dep.name}]' for dep in executable_dependencies]) - environment.logger.error(f'Error: The following targets are linking dependencies but were identified as executables:\n {exelist}') + error_message = f'[[{self.name}]]: ERROR: The following targets are linking dependencies but were identified as executables:\n {exelist}' + _LOGGER.exception(error_message) + raise RuntimeError(error_message) + # Create specific target if the target type was specified if 'target_type' in target_node: # # Add an executable @@ -217,6 +287,7 @@ def __init__(self, config, environment, multiple_projects): if target_node['target_type'].lower() == 'executable': self.target_list.append( _Executable( + self.name, target_name, target_root_directory, target_build_directory, @@ -224,16 +295,19 @@ def __init__(self, config, environment, multiple_projects): files['include_directories'], files['sourcefiles'], environment.buildType, + environment.clang, environment.clangpp, target_node, - dependencies)) + dependencies, + environment.force_rebuild)) # # Add a shared library # - if target_node['target_type'].lower() == 'shared library': + elif target_node['target_type'].lower() == 'shared library': self.target_list.append( _SharedLibrary( + self.name, target_name, target_root_directory, target_build_directory, @@ -241,9 +315,11 @@ def __init__(self, config, environment, multiple_projects): files['include_directories'], files['sourcefiles'], environment.buildType, + environment.clang, environment.clangpp, target_node, - dependencies)) + dependencies, + environment.force_rebuild)) # # Add a static library @@ -251,6 +327,7 @@ def __init__(self, config, environment, multiple_projects): elif target_node['target_type'].lower() == 'static library': self.target_list.append( _StaticLibrary( + self.name, target_name, target_root_directory, target_build_directory, @@ -258,51 +335,60 @@ def __init__(self, config, environment, multiple_projects): files['include_directories'], files['sourcefiles'], environment.buildType, + environment.clang, environment.clangpp, environment.clang_ar, target_node, - dependencies)) + dependencies, + environment.force_rebuild)) # # Add a header-only # elif target_node['target_type'].lower() == 'header only': if files['sourcefiles']: - environment.logger.info(f'Source files found for header-only target {target_name}. You may want to check your build configuration.') + environment.logger.info(f'[{target_name_full}]: {len(files["sourcefiles"])} source file(s) found for header-only target. You may want to check your build configuration.') self.target_list.append( _HeaderOnly( + self.name, target_name, target_root_directory, target_build_directory, files['headers'], files['include_directories'], environment.buildType, + environment.clang, environment.clangpp, target_node, dependencies)) else: - environment.logger.error(f'ERROR: Unsupported target type: {target_node["target_type"]}') + error_message = f'[[{self.name}]]: ERROR: Unsupported target type: "{target_node["target_type"].lower()}"' + _LOGGER.exception(error_message) + raise RuntimeError(error_message) # No target specified so must be executable or header only else: if not files['sourcefiles']: - environment.logger.info(f'No source files found for target {target_name}. Creating header-only target.') + environment.logger.info(f'[{target_name_full}]: no source files found. Creating header-only target.') self.target_list.append( _HeaderOnly( + self.name, target_name, target_root_directory, target_build_directory, files['headers'], files['include_directories'], environment.buildType, + environment.clang, environment.clangpp, target_node, dependencies)) else: - environment.logger.info(f'{len(files["sourcefiles"])} source files found for target {target_name}. Creating executable target.') + environment.logger.info(f'[{target_name_full}]: {len(files["sourcefiles"])} source file(s) found. Creating executable target.') self.target_list.append( _Executable( + self.name, target_name, target_root_directory, target_build_directory, @@ -310,13 +396,15 @@ def __init__(self, config, environment, multiple_projects): files['include_directories'], files['sourcefiles'], environment.buildType, + environment.clang, environment.clangpp, target_node, - dependencies)) + dependencies, + environment.force_rebuild)) - def get_targets(self): + def get_targets(self, exclude=[]): targetlist = [] for subproject in self.subprojects: - targetlist += subproject.get_targets() - targetlist += self.target_list + targetlist += subproject.get_targets(exclude) + targetlist += [target for target in self.target_list if target.name not in exclude] return targetlist \ No newline at end of file diff --git a/clang_build/single_source.py b/clang_build/single_source.py index f2472c0..029bff5 100644 --- a/clang_build/single_source.py +++ b/clang_build/single_source.py @@ -53,22 +53,28 @@ def __init__( object_directory, include_strings, compile_flags, - clangpp): + clang, + clangpp, + max_cpp_dialect): # Get the relative file path - self.name = source_file.name - self.source_file = source_file + self.name = source_file.name + self.source_file = source_file # If the source file is in a directory called 'src', we do not create a # subdirectory called 'src' in the build folder structure relpath = _os.path.relpath(source_file.parents[0], current_target_root_path) - if current_target_root_path.joinpath('src').exists(): + if current_target_root_path.joinpath('src').exists() and "src" in self.source_file.parts: relpath = _os.path.relpath(relpath, 'src') # Set name, extension and potentially produced output files self.object_file = _Path(object_directory, relpath, self.source_file.stem + '.o') self.depfile = _Path(depfile_directory, relpath, self.source_file.stem + '.d') + compiler = clangpp + if source_file.suffix in [".c", ".cc", ".m"]: + compiler = clang + max_cpp_dialect = '' self.needs_rebuild = _needs_rebuild(self.object_file, self.source_file, self.depfile) @@ -77,18 +83,17 @@ def __init__( self.compilation_failed = False # prepare everything for dependency file generation - self.depfile.parents[0].mkdir(parents=True, exist_ok=True) - self.dependency_command = [clangpp, '-E', '-MMD', str(self.source_file), '-MF', str(self.depfile)] + flags + self.dependency_command = [compiler, max_cpp_dialect, '-E', '-MMD', str(self.source_file), '-MF', str(self.depfile)] + flags # prepare everything for compilation - self.object_file.parents[0].mkdir(parents=True, exist_ok=True) - self.compile_command = [clangpp, '-c', str(self.source_file), '-o', str(self.object_file)] + flags + platform_flags + self.compile_command = [compiler, max_cpp_dialect, '-c', str(self.source_file), '-o', str(self.object_file)] + flags + platform_flags def generate_depfile(self): # TODO: logging in multiprocess # _LOGGER.debug(' ' + ' '.join(dependency_command)) try: + self.depfile.parents[0].mkdir(parents=True, exist_ok=True) self.depfile_report = _subprocess.check_output(self.dependency_command, stderr=_subprocess.STDOUT).decode('utf-8').strip() self.depfile_failed = False except _subprocess.CalledProcessError as error: @@ -100,6 +105,7 @@ def compile(self): # TODO: logging in multiprocess # _LOGGER.debug(' ' + ' '.join(self.compile_command)) try: + self.object_file.parents[0].mkdir(parents=True, exist_ok=True) self.compile_report = _subprocess.check_output(self.compile_command, stderr=_subprocess.STDOUT).decode('utf-8').strip() self.compilation_failed = False except _subprocess.CalledProcessError as error: diff --git a/clang_build/target.py b/clang_build/target.py index 3b1d067..ec211a5 100644 --- a/clang_build/target.py +++ b/clang_build/target.py @@ -19,7 +19,7 @@ _LOGGER = _logging.getLogger('clang_build.clang_build') class Target: - DEFAULT_COMPILE_FLAGS = ['-Wall', '-Werror'] + DEFAULT_COMPILE_FLAGS = ['-Wall', '-Wextra', '-Wpedantic', '-Werror'] DEFAULT_RELEASE_COMPILE_FLAGS = ['-O3', '-DNDEBUG'] DEFAULT_DEBUG_COMPILE_FLAGS = ['-O0', '-g3', '-DDEBUG'] DEFAULT_COVERAGE_COMPILE_FLAGS = ( @@ -31,12 +31,14 @@ class Target: def __init__(self, + project_name, name, root_directory, build_directory, headers, include_directories, build_type, + clang, clangpp, options=None, dependencies=None): @@ -50,6 +52,7 @@ def __init__(self, # Basics self.name = name + self.full_name = f'{project_name}.{name}' if project_name else name self.root_directory = _Path(root_directory) self.build_type = build_type @@ -74,13 +77,13 @@ def __init__(self, compile_flags = [] compile_flags_debug = Target.DEFAULT_DEBUG_COMPILE_FLAGS compile_flags_release = Target.DEFAULT_RELEASE_COMPILE_FLAGS - self.linkFlags = [] + self.link_flags = [] if 'flags' in options: compile_flags += options['flags'].get('compile', []) compile_flags_release += options['flags'].get('compileRelease', []) compile_flags_debug += options['flags'].get('compileDebug', []) - self.linkFlags += options['flags'].get('link', []) + self.link_flags += options['flags'].get('link', []) self.compile_flags = compile_flags if self.build_type == _BuildType.Release: @@ -115,10 +118,10 @@ def compile(self, process_pool, progress_disabled): class HeaderOnly(Target): def link(self): - _LOGGER.info(f'Header-only target [{self.name}] does not require linking.') + _LOGGER.info(f'[{self.full_name}]: Header-only target does not require linking.') def compile(self, process_pool, progress_disabled): - _LOGGER.info(f'Header-only target [{self.name}] does not require compiling.') + _LOGGER.info(f'[{self.full_name}]: Header-only target does not require compiling.') def generate_depfile_single_source(buildable): buildable.generate_depfile() @@ -131,6 +134,7 @@ def compile_single_source(buildable): class Compilable(Target): def __init__(self, + project_name, name, root_directory, build_directory, @@ -138,6 +142,7 @@ def __init__(self, include_directories, source_files, build_type, + clang, clangpp, link_command, output_folder, @@ -145,21 +150,24 @@ def __init__(self, prefix, suffix, options=None, - dependencies=None): + dependencies=None, + force_build=False): super().__init__( + project_name=project_name, name=name, root_directory=root_directory, build_directory=build_directory, headers=headers, include_directories=include_directories, build_type=build_type, + clang=clang, clangpp=clangpp, options=options, dependencies=dependencies) if not source_files: - error_message = f'ERROR: Targt [{name}] was defined as a {self.__class__} but no source files were found' + error_message = f'[{self.full_name}]: ERROR: Target was defined as a {self.__class__} but no source files were found' _LOGGER.error(error_message) raise RuntimeError(error_message) @@ -168,14 +176,12 @@ def __init__(self, if dependencies is None: dependencies = [] + self.force_build = force_build + self.object_directory = self.build_directory.joinpath('obj').resolve() self.depfile_directory = self.build_directory.joinpath('dep').resolve() self.output_folder = self.build_directory.joinpath(output_folder).resolve() - self.object_directory.mkdir(parents=True, exist_ok=True) - self.depfile_directory.mkdir(parents=True, exist_ok=True) - self.output_folder.mkdir(parents=True, exist_ok=True) - if 'output_name' in options: self.outname = options['output_name'] else: @@ -185,6 +191,7 @@ def __init__(self, # Clang + self.clang = clang self.clangpp = clangpp # Sources @@ -198,8 +205,10 @@ def __init__(self, depfile_directory=self.depfile_directory, object_directory=self.object_directory, include_strings=self.get_include_directory_command(), - compile_flags=[self.dialect] + Target.DEFAULT_COMPILE_FLAGS + self.compile_flags, - clangpp=self.clangpp) for source_file in self.source_files] + compile_flags=Target.DEFAULT_COMPILE_FLAGS + self.compile_flags, + clang =self.clang, + clangpp=self.clangpp, + max_cpp_dialect=self.dialect) for source_file in self.source_files] # If compilation of buildables fail, they will be stored here later self.unsuccessful_builds = [] @@ -207,8 +216,6 @@ def __init__(self, # Linking setup self.link_command = link_command + [str(self.outfile)] - self.link_command += self.linkFlags - ## Additional scripts self.before_compile_script = "" @@ -224,29 +231,32 @@ def __init__(self, def compile(self, process_pool, progress_disabled): # Object file only needs to be (re-)compiled if the source file or headers it depends on changed - self.needed_buildables = [buildable for buildable in self.buildables if buildable.needs_rebuild] + if not self.force_build: + self.needed_buildables = [buildable for buildable in self.buildables if buildable.needs_rebuild] + else: + self.needed_buildables = self.buildables # If the target was not modified, it may not need to compile if not self.needed_buildables: - _LOGGER.info(f'Target [{self.name}] is already compiled') + _LOGGER.info(f'[{self.full_name}]: target is already compiled') return - _LOGGER.info(f'Target [{self.name}] needs to build sources %s', [b.name for b in self.needed_buildables]) + _LOGGER.info(f'[{self.full_name}]: target needs to build sources %s', [b.name for b in self.needed_buildables]) # Before-compile step if self.before_compile_script: - script_file = self.root_directory.joinpath(self.before_compile_script) - _LOGGER.info(f'Pre-compile step of target [{self.name}]: {script_file}') + script_file = self.root_directory.joinpath(self.before_compile_script).resolve() + _LOGGER.info(f'[{self.full_name}]: pre-compile step: \'{script_file}\'') original_directory = _os.getcwd() _os.chdir(self.root_directory) with open(script_file) as f: code = compile(f.read(), script_file, 'exec') exec(code, globals(), locals()) _os.chdir(original_directory) - _LOGGER.info(f'Finished pre-compile step of target [{self.name}]') + _LOGGER.info(f'[{self.full_name}]: finished pre-compile step') # Execute depfile generation command - _LOGGER.info(f'Scan dependencies of target [{self.outname}]') + _LOGGER.info(f'[{self.full_name}]: scan dependencies') for b in self.needed_buildables: _LOGGER.debug(' '.join(b.dependency_command)) self.needed_buildables = list(_get_build_progress_bar( @@ -258,7 +268,7 @@ def compile(self, process_pool, progress_disabled): name=self.name)) # Execute compile command - _LOGGER.info(f'Compile target [{self.outname}]') + _LOGGER.info(f'[{self.full_name}]: compile') for b in self.needed_buildables: _LOGGER.debug(' '.join(b.compile_command)) self.needed_buildables = list(_get_build_progress_bar( @@ -275,21 +285,22 @@ def compile(self, process_pool, progress_disabled): def link(self): # Before-link step if self.before_link_script: - _LOGGER.info(f'Pre-link step of target [{self.name}]') + script_file = self.root_directory.joinpath(self.before_link_script) + _LOGGER.info(f'[{self.full_name}]: pre-link step: \'{script_file}\'') original_directory = _os.getcwd() _os.chdir(self.root_directory) - script_file = self.root_directory.joinpath(self.before_link_script) with open(script_file) as f: code = compile(f.read(), script_file, 'exec') exec(code, globals(), locals()) _os.chdir(original_directory) - _LOGGER.info(f'Finished pre-link step of target [{self.name}]') + _LOGGER.info(f'[{self.full_name}]: finished pre-link step') - _LOGGER.info(f'Link target [{self.name}]') + _LOGGER.info(f'[{self.full_name}]: link -> "{self.outfile}"') _LOGGER.debug(' ' + ' '.join(self.link_command)) # Execute link command try: + self.output_folder.mkdir(parents=True, exist_ok=True) self.link_report = _subprocess.check_output(self.link_command, stderr=_subprocess.STDOUT).decode('utf-8').strip() self.unsuccessful_link = False except _subprocess.CalledProcessError as error: @@ -298,19 +309,20 @@ def link(self): ## After-build step if self.after_build_script: - _LOGGER.info(f'After-build step of target [{self.name}]') + script_file = self.root_directory.joinpath(self.after_build_script) + _LOGGER.info(f'[{self.full_name}]: after-build step: \'{script_file}\'') original_directory = _os.getcwd() _os.chdir(self.root_directory) - script_file = self.root_directory.joinpath(self.after_build_script) with open(script_file) as f: code = compile(f.read(), script_file, 'exec') exec(code, globals(), locals()) _os.chdir(original_directory) - _LOGGER.info(f'Finished after-build step of target [{self.name}]') + _LOGGER.info(f'[{self.full_name}]: finished after-build step') class Executable(Compilable): def __init__(self, + project_name, name, root_directory, build_directory, @@ -318,11 +330,14 @@ def __init__(self, include_directories, source_files, build_type, + clang, clangpp, options=None, - dependencies=None): + dependencies=None, + force_build=False): super().__init__( + project_name=project_name, name=name, root_directory=root_directory, build_directory=build_directory, @@ -330,22 +345,26 @@ def __init__(self, include_directories=include_directories, source_files=source_files, build_type=build_type, + clang=clang, clangpp=clangpp, link_command=[clangpp, '-o'], - output_folder = _platform.EXECUTABLE_OUTPUT, + output_folder=_platform.EXECUTABLE_OUTPUT, platform_flags=_platform.PLATFORM_EXTRA_FLAGS_EXECUTABLE, prefix=_platform.EXECUTABLE_PREFIX, suffix=_platform.EXECUTABLE_SUFFIX, options=options, - dependencies=dependencies) + dependencies=dependencies, + force_build=force_build) + + ### Link self + self.link_command += [str(buildable.object_file) for buildable in self.buildables] ### Library dependency search paths for target in self.dependency_targets: if not target.__class__ is HeaderOnly: self.link_command += ['-L', str(target.output_folder.resolve())] - ### Link self - self.link_command += [str(buildable.object_file) for buildable in self.buildables] + self.link_command += self.link_flags ### Link dependencies for target in self.dependency_targets: @@ -355,6 +374,7 @@ def __init__(self, class SharedLibrary(Compilable): def __init__(self, + project_name, name, root_directory, build_directory, @@ -362,11 +382,14 @@ def __init__(self, include_directories, source_files, build_type, + clang, clangpp, options=None, - dependencies=None): + dependencies=None, + force_build=False): super().__init__( + project_name=project_name, name=name, root_directory=root_directory, build_directory=build_directory, @@ -374,22 +397,26 @@ def __init__(self, include_directories=include_directories, source_files=source_files, build_type=build_type, + clang=clang, clangpp=clangpp, link_command=[clangpp, '-shared', '-o'], - output_folder = _platform.SHARED_LIBRARY_OUTPUT, + output_folder=_platform.SHARED_LIBRARY_OUTPUT, platform_flags=_platform.PLATFORM_EXTRA_FLAGS_SHARED, prefix=_platform.SHARED_LIBRARY_PREFIX, suffix=_platform.SHARED_LIBRARY_SUFFIX, options=options, - dependencies=dependencies) + dependencies=dependencies, + force_build=force_build) + + ### Link self + self.link_command += [str(buildable.object_file) for buildable in self.buildables] ### Library dependency search paths for target in self.dependency_targets: if not target.__class__ is HeaderOnly: self.link_command += ['-L', str(target.output_folder.resolve())] - ### Link self - self.link_command += [str(buildable.object_file) for buildable in self.buildables] + self.link_command += self.link_flags ### Link dependencies for target in self.dependency_targets: @@ -399,6 +426,7 @@ def __init__(self, class StaticLibrary(Compilable): def __init__(self, + project_name, name, root_directory, build_directory, @@ -406,12 +434,15 @@ def __init__(self, include_directories, source_files, build_type, + clang, clangpp, clang_ar, options=None, - dependencies=None): + dependencies=None, + force_build=False): super().__init__( + project_name=project_name, name=name, root_directory=root_directory, build_directory=build_directory, @@ -419,20 +450,23 @@ def __init__(self, include_directories=include_directories, source_files=source_files, build_type=build_type, + clang=clang, clangpp=clangpp, link_command=[clang_ar, 'rc'], - output_folder = _platform.STATIC_LIBRARY_OUTPUT, + output_folder=_platform.STATIC_LIBRARY_OUTPUT, platform_flags=_platform.PLATFORM_EXTRA_FLAGS_STATIC, prefix=_platform.STATIC_LIBRARY_PREFIX, suffix=_platform.STATIC_LIBRARY_SUFFIX, options=options, - dependencies=dependencies) + dependencies=dependencies, + force_build=force_build) # ### Include directories # self.link_command += self.get_include_directory_command() ### Link self self.link_command += [str(buildable.object_file) for buildable in self.buildables] + self.link_command += self.link_flags ### Link dependencies for target in self.dependency_targets: diff --git a/docs/Defaults.md b/docs/Defaults.md index 143946f..1cd6a87 100644 --- a/docs/Defaults.md +++ b/docs/Defaults.md @@ -27,7 +27,7 @@ where by default `clang-build` will try the target's root directory and an "incl ### Source directories `clang-build`'s source directories will be searched for source files for a target. -In your project file, you can add a `source_directories` array to specify a target's source directories, +In your project file, you can add a `sources` array of patterns to specify a target's sources, where by default `clang-build` will try the target's root directory and a "src" subdirectory. diff --git a/setup.cfg b/setup.cfg index 5e4b891..69bc7f4 100644 --- a/setup.cfg +++ b/setup.cfg @@ -12,4 +12,4 @@ packages = clang_build [entry_points] console_scripts = - clang-build = clang_build.clang_build:main \ No newline at end of file + clang-build = clang_build.clang_build:_main \ No newline at end of file diff --git a/test/boost-filesystem/clang-build.toml b/test/boost-filesystem/clang-build.toml index 01f5ded..e9e7c4a 100644 --- a/test/boost-filesystem/clang-build.toml +++ b/test/boost-filesystem/clang-build.toml @@ -1,91 +1,98 @@ +name = "myproject" + [myexe] target_type = "executable" -dependencies = ["filesystem"] +dependencies = ["boost.filesystem"] + + +[[subproject]] +name = "boost" -[system] +[subproject.filesystem] target_type = "static library" -url = "/~https://github.com/boostorg/system" +url = "/~https://github.com/boostorg/filesystem" version = "boost-1.65.0" -dependencies = ["core", "winapi", "config", "predef", "assert"] -[system.flags] -compile = ['-DBOOST_NO_CXX11_HDR_SYSTEM_ERROR', '-Wno-deprecated-declarations'] +dependencies = ["system", "throw_exception", "functional", "smart_ptr", "config", "io", "range", "detail", "core", "type_traits", "predef", "assert", "iterator", "mpl", "static_assert"] +[subproject.filesystem.flags] +compile = ["-Wno-parentheses-equality", "-Wno-unused-parameter", "-Wno-nested-anon-types", "-Wno-vla-extension", "-Wno-pedantic"] -[filesystem] +[subproject.system] target_type = "static library" -url = "/~https://github.com/boostorg/filesystem" +url = "/~https://github.com/boostorg/system" version = "boost-1.65.0" -dependencies = ["throw_exception", "functional", "smart_ptr", "config", "io", "range", "detail", "system", "core", "type_traits", "predef", "assert", "iterator", "mpl", "static_assert"] -[filesystem.flags] -compile = ["-Wno-parentheses-equality"] +dependencies = ["core", "winapi", "config", "predef", "assert"] +[subproject.system.flags] +compile = ['-DBOOST_NO_CXX11_HDR_SYSTEM_ERROR', '-Wno-deprecated-declarations', '-Wno-language-extension-token'] + -[winapi] +[subproject.winapi] url = "/~https://github.com/boostorg/winapi" version = "boost-1.65.0" -[config] +[subproject.config] url = "/~https://github.com/boostorg/config" version = "boost-1.65.0" -[core] +[subproject.core] url = "/~https://github.com/boostorg/core" version = "boost-1.65.0" -[smart_ptr] +[subproject.smart_ptr] url = "/~https://github.com/boostorg/smart_ptr" version = "boost-1.65.0" -[preprocessor] +[subproject.preprocessor] url = "/~https://github.com/boostorg/preprocessor" version = "boost-1.65.0" -[mpl] +[subproject.mpl] url = "/~https://github.com/boostorg/mpl" version = "boost-1.65.0" dependencies = ["preprocessor"] -[io] +[subproject.io] url = "/~https://github.com/boostorg/io" version = "boost-1.65.0" -[detail] +[subproject.detail] url = "/~https://github.com/boostorg/detail" version = "boost-1.65.0" -[functional] +[subproject.functional] url = "/~https://github.com/boostorg/functional" version = "boost-1.65.0" -[throw_exception] +[subproject.throw_exception] url = "/~https://github.com/boostorg/throw_exception" version = "boost-1.65.0" -[iterator] +[subproject.iterator] url = "/~https://github.com/boostorg/iterator" version = "boost-1.65.0" dependencies = ["detail"] -[predef] +[subproject.predef] url = "/~https://github.com/boostorg/predef" version = "boost-1.65.0" -[range] +[subproject.range] url = "/~https://github.com/boostorg/range" version = "boost-1.65.0" -[assert] +[subproject.assert] url = "/~https://github.com/boostorg/assert" version = "boost-1.65.0" -[static_assert] # has sources which should not be included +[subproject.static_assert] # has sources which should not be included target_type = "header only" url = "/~https://github.com/boostorg/static_assert" version = "boost-1.65.0" -[utility] # has sources which should not be included +[subproject.utility] # has sources which should not be included target_type = "header only" url = "/~https://github.com/boostorg/utility" version = "boost-1.65.0" -[type_traits] +[subproject.type_traits] url = "/~https://github.com/boostorg/type_traits" version = "boost-1.65.0" \ No newline at end of file diff --git a/test/c-library/clang-build.toml b/test/c-library/clang-build.toml new file mode 100644 index 0000000..f34f926 --- /dev/null +++ b/test/c-library/clang-build.toml @@ -0,0 +1,76 @@ +name = "mainproject" + +[myexe] +target_type = "executable" +dependencies = ["qhull.qhullcpp", "qhull.qhullstatic_r"] + + +################# Qhull ################# + +[[subproject]] +name = "qhull" +url = "/~https://github.com/qhull/qhull" +version = "v7.2.0" + +# Qhull libraries + +[subproject.qhull] +target_type = "shared library" +directory = "src/libqhull" +[subproject.qhull.flags] +compile = ["-Wno-deprecated-declarations"] + +[subproject.qhull_r] +target_type = "shared library" +directory = "src/libqhull_r" +[subproject.qhull_r.flags] +compile = ["-Wno-deprecated-declarations"] + +[subproject.qhullcpp] +target_type = "static library" +include_directories = ["src"] +sources = ["src/libqhullcpp/*.cpp"] +sources_exclude = ["src/libqhullcpp/qt-qhull.cpp", "src/libqhullcpp/usermem_r-cpp.cpp"] +[subproject.qhullcpp.flags] +compile = ["-Wno-deprecated-declarations", "-Wno-self-assign"] + +[subproject.qhullstatic] +target_type = "static library" +include_directories = ["src"] +sources = ["src/libqhull/*.c"] +[subproject.qhullstatic.flags] +compile = ["-Wno-deprecated-declarations"] + +[subproject.qhullstatic_r] +target_type = "static library" +include_directories = ["src"] +sources = ["src/libqhull_r/*.c"] +[subproject.qhullstatic_r.flags] +compile = ["-Wno-deprecated-declarations"] + +# Qhull executables + +[subproject.qhull-executable] +output_name = "qhull" +sources = ["src/qhull/unix_r.c"] +dependencies = ["qhullstatic_r"] + +[subproject.rbox] +sources = ["src/rbox/rbox.c"] +dependencies = ["qhullstatic"] + +[subproject.qconvex] +sources = ["src/qconvex/qconvex.c"] +dependencies = ["qhullstatic"] + +[subproject.qdelaunay] +sources = ["src/qdelaunay/qdelaun.c"] +dependencies = ["qhullstatic"] + +[subproject.qvoronoi] +sources = ["src/qvoronoi/qvoronoi.c"] +dependencies = ["qhullstatic"] + +[subproject.qhalf] +sources = ["src/qhalf/qhalf.c"] +dependencies = ["qhullstatic"] \ No newline at end of file diff --git a/test/c-library/main.cpp b/test/c-library/main.cpp new file mode 100644 index 0000000..e9a48b7 --- /dev/null +++ b/test/c-library/main.cpp @@ -0,0 +1,47 @@ +#include +#include +#include + +#include +#include +#include + + +typedef std::array triangle_t; +typedef struct { + double x, y; +} vector2_t; + + +std::vector compute_delaunay_triangulation_2D(const std::vector & points) +{ + const int ndim = 2; + std::vector triangles; + triangle_t tmp_triangle; + int *current_index; + + orgQhull::Qhull qhull; + qhull.runQhull("", ndim, points.size(), (coordT *) points.data(), "d Qt Qbb Qz"); + for(const auto & facet : qhull.facetList()) + { + if(!facet.isUpperDelaunay()) + { + current_index = &tmp_triangle[0]; + for(const auto & vertex : facet.vertices()) + { + *current_index++ = vertex.point().id(); + } + triangles.push_back(tmp_triangle); + } + } + return triangles; +} + + +int main() +{ + std::vector points{{0, 0}, {1, 0}, {0, 1}, {1, 1}}; + auto triangles = compute_delaunay_triangulation_2D(points); + for( auto& tri : triangles ) + std::cerr << tri[0] << " " << tri[1] << " " << tri[2] << std::endl; +} \ No newline at end of file diff --git a/test/external_scripts/main.cpp b/test/external_scripts/main.cpp index fef7352..e651789 100644 --- a/test/external_scripts/main.cpp +++ b/test/external_scripts/main.cpp @@ -1,7 +1,7 @@ #include #include "version.h" -int main(int argc, char ** argv) +int main() { std::cerr << "the version is " << VERSION_MAJOR << "." << VERSION_MINOR << "." << VERSION_PATCH << std::endl; return 0; diff --git a/test/multi_target_external/clang-build.toml b/test/multi_target_external/clang-build.toml index 521c471..2c75ee5 100644 --- a/test/multi_target_external/clang-build.toml +++ b/test/multi_target_external/clang-build.toml @@ -1,12 +1,10 @@ [myexe] output_name = "runLib" dependencies = ["mylib"] -directory = "myexe" [mylib] version = "0.0.0" target_type = "shared library" -directory = "mylib" dependencies = ["Eigen"] [Eigen] diff --git a/test/multi_target_external/myexe/main.cpp b/test/multi_target_external/myexe/main.cpp index 44398e0..03c454e 100644 --- a/test/multi_target_external/myexe/main.cpp +++ b/test/multi_target_external/myexe/main.cpp @@ -2,7 +2,7 @@ #include -int main(int argc, char ** argv) +int main() { std::cerr << "Hello! mylib::calculate() returned " << mylib::calculate() << std::endl; return 0; diff --git a/test/mwe/hello.cpp b/test/mwe/hello.cpp index 0ee3f06..f7f802c 100644 --- a/test/mwe/hello.cpp +++ b/test/mwe/hello.cpp @@ -1,6 +1,6 @@ #include -int main(int argc, char ** argv) +int main() { std::cerr << "Hello!" << std::endl; return 0; diff --git a/test/subproject/myexe/main.cpp b/test/subproject/myexe/main.cpp index 5591344..1a26311 100644 --- a/test/subproject/myexe/main.cpp +++ b/test/subproject/myexe/main.cpp @@ -2,7 +2,7 @@ #include -int main(int argc, char ** argv) +int main() { std::cerr << "Hello! mylib::triple(3) returned " << mylib::triple(3) << std::endl; return 0; diff --git a/test/test.py b/test/test.py index f8e092d..daa1178 100644 --- a/test/test.py +++ b/test/test.py @@ -11,6 +11,7 @@ from clang_build import clang_build from clang_build.errors import CompileError from clang_build.errors import LinkError +from clang_build.logging_stream_handler import TqdmHandler as TqdmHandler def on_rm_error( func, path, exc_info): # path contains the path of the file that couldn't be removed @@ -25,25 +26,19 @@ def clang_build_try_except( args ): logger = logging.getLogger('clang_build') logger.error('Compilation was unsuccessful:') for target, errors in compile_error.error_dict.items(): - printout = f'Target {target} did not compile. Errors:' - for file, output in errors: - for out in output: - row = out['row'] - column = out['column'] - messagetype = out['type'] - message = out['message'] - printout += f'\n{file}:{row}:{column}: {messagetype}: {message}' + printout = f'Target [{target}] did not compile. Errors:\n' + printout += ' '.join(errors) logger.error(printout) except LinkError as link_error: logger = logging.getLogger('clang_build') logger.error('Linking was unsuccessful:') for target, errors in link_error.error_dict.items(): - printout = f'Target {target} did not link. Errors:\n{errors}' + printout = f'Target [{target}] did not link. Errors:\n{errors}' logger.error(printout) class TestClangBuild(unittest.TestCase): def test_hello_world_mwe(self): - clang_build_try_except(['-d', 'test/mwe', '-p']) + clang_build_try_except(['-d', 'test/mwe']) try: output = subprocess.check_output(['./build/default/bin/main'], stderr=subprocess.STDOUT).decode('utf-8').strip() @@ -54,11 +49,11 @@ def test_hello_world_mwe(self): def test_compile_error(self): with self.assertRaises(CompileError): - clang_build.build(clang_build.parse_args(['-d', 'test/mwe_build_error', '-V', '-p'])) + clang_build.build(clang_build.parse_args(['-d', 'test/mwe_build_error', '-V'])) def test_script_call(self): try: - subprocess.check_output(['clang-build', '-d', 'test/mwe', '-V', '-p'], stderr=subprocess.STDOUT) + subprocess.check_output(['clang-build', '-d', 'test/mwe', '-V'], stderr=subprocess.STDOUT) except subprocess.CalledProcessError: self.fail('Compilation failed') try: @@ -69,26 +64,28 @@ def test_script_call(self): self.assertEqual(output, 'Hello!') def test_hello_world_rebuild(self): - clang_build_try_except(['-d', 'test/mwe', '-p']) + clang_build_try_except(['-d', 'test/mwe']) logger = logging.getLogger('clang_build') logger.setLevel(logging.DEBUG) stream_capture = io.StringIO() ch = logging.StreamHandler(stream_capture) ch.setLevel(logging.DEBUG) logger.addHandler(ch) - clang_build_try_except(['-d', 'test/mwe', '-V', '-p']) + clang_build_try_except(['-d', 'test/mwe', '-V']) try: output = subprocess.check_output(['./build/default/bin/main'], stderr=subprocess.STDOUT).decode('utf-8').strip() except subprocess.CalledProcessError: self.fail('Could not run compiled program') + clang_build_try_except(['-d', 'test/mwe', '-V', '-f']) + logger.removeHandler(ch) - self.assertRegex(stream_capture.getvalue(), r'.*Target \[main\] is already compiled.*') + self.assertRegex(stream_capture.getvalue(), r'.*\[main\]: target is already compiled*') self.assertEqual(output, 'Hello!') def test_automatic_include_folders(self): - clang_build_try_except(['-d', 'test/mwe_with_default_folders', '-V', '-p']) + clang_build_try_except(['-d', 'test/mwe_with_default_folders', '-V']) try: output = subprocess.check_output(['./build/default/bin/main'], stderr=subprocess.STDOUT).decode('utf-8').strip() @@ -98,7 +95,7 @@ def test_automatic_include_folders(self): self.assertEqual(output, 'Calculated Magic: 30') def test_toml_mwe(self): - clang_build_try_except(['-d', 'test/toml_mwe', '-p']) + clang_build_try_except(['-d', 'test/toml_mwe']) try: output = subprocess.check_output(['./build/default/bin/runHello'], stderr=subprocess.STDOUT).decode('utf-8').strip() @@ -108,7 +105,7 @@ def test_toml_mwe(self): self.assertEqual(output, 'Hello!') def test_toml_custom_folder(self): - clang_build_try_except(['-d', 'test/toml_with_custom_folder', '-p']) + clang_build_try_except(['-d', 'test/toml_with_custom_folder']) try: output = subprocess.check_output(['./build/default/bin/runHello'], stderr=subprocess.STDOUT).decode('utf-8').strip() @@ -118,7 +115,7 @@ def test_toml_custom_folder(self): self.assertEqual(output, 'Hello!') def test_external_scripts(self): - clang_build_try_except(['-d', 'test/external_scripts', '-V', '-p']) + clang_build_try_except(['-d', 'test/external_scripts', '-V']) try: output = subprocess.check_output(['./build/default/bin/runHello'], stderr=subprocess.STDOUT).decode('utf-8').strip() @@ -128,7 +125,7 @@ def test_external_scripts(self): self.assertEqual(output, 'the version is 1.2.0') def test_subproject(self): - clang_build_try_except(['-d', 'test/subproject', '-V', '-p']) + clang_build_try_except(['-d', 'test/subproject', '-V']) try: output = subprocess.check_output(['./build/mainproject/default/bin/runLib'], stderr=subprocess.STDOUT).decode('utf-8').strip() @@ -138,17 +135,37 @@ def test_subproject(self): self.assertEqual(output, 'Hello! mylib::triple(3) returned 9') def test_boost_filesystem(self): - clang_build_try_except(['-d', 'test/boost-filesystem', '-V', '-p']) + clang_build_try_except(['-d', 'test/boost-filesystem', '-V']) try: - output = subprocess.check_output(['./build/myexe/default/bin/myexe', 'build'], stderr=subprocess.STDOUT).decode('utf-8').strip() + output = subprocess.check_output(['./build/myproject/default/bin/myexe', 'build'], stderr=subprocess.STDOUT).decode('utf-8').strip() except subprocess.CalledProcessError: self.fail('Could not run compiled program') self.assertEqual(output, '"build" is a directory') + def test_c_library(self): + clang_build_try_except(['-d', 'test/c-library', '-V']) + + try: + output = subprocess.check_output(['./build/mainproject/default/bin/myexe'], stderr=subprocess.STDOUT).decode('utf-8').strip() + except subprocess.CalledProcessError: + self.fail('Could not run compiled program') + + self.assertEqual(output, '3 2 0'+os.linesep+'3 1 0') + + def test_build_all(self): + clang_build_try_except(['-d', 'test/c-library', '-V', '-a']) + + try: + output = subprocess.check_output(['./build/qhull/qhull-executable/default/bin/qhull', '-V'], stderr=subprocess.STDOUT).decode('utf-8').strip() + except subprocess.CalledProcessError: + self.fail('Could not run a target which should have been built') + + self.assertEqual(output, 'qhull_r 7.2.0 (2015.2.r 2016/01/18)') + # def test_openmp(self): - # clang_build_try_except(['-d', 'test/openmp', '-V', '-p']) + # clang_build_try_except(['-d', 'test/openmp', '-V']) # try: # output = subprocess.check_output(['./build/default/bin/runHello'], stderr=subprocess.STDOUT).decode('utf-8').strip() @@ -158,7 +175,7 @@ def test_boost_filesystem(self): # self.assertEqual(output, 'Hello from thread 1, nthreads 8') # def test_mwe_two_targets(self): - # clang_build_try_except(['-d', 'test/multi_target_external', '-V', '-p']) + # clang_build_try_except(['-d', 'test/multi_target_external', '-V']) # try: # output = subprocess.check_output(['./build/myexe/default/bin/runLib'], stderr=subprocess.STDOUT).decode('utf-8').strip() @@ -167,6 +184,16 @@ def test_boost_filesystem(self): # self.assertEqual(output, 'Hello!') + def setUp(self): + logger = logging.getLogger('clang_build') + logger.setLevel(logging.INFO) + ch = TqdmHandler() + formatter = logging.Formatter('%(message)s') + ch.setLevel(logging.INFO) + ch.setFormatter(formatter) + logger.handlers = [] + logger.addHandler(ch) + def tearDown(self): if _Path('build').exists(): shutil.rmtree('build', onerror = on_rm_error) diff --git a/test/toml_mwe/src/main.cpp b/test/toml_mwe/src/main.cpp index 2f5f5c9..7b94ba1 100644 --- a/test/toml_mwe/src/main.cpp +++ b/test/toml_mwe/src/main.cpp @@ -1,7 +1,7 @@ #include #include -int main(int argc, char ** argv) +int main() { std::cerr << main_print() << std::endl; return 0; diff --git a/test/toml_with_custom_folder/clang-build.toml b/test/toml_with_custom_folder/clang-build.toml index b01fd33..d7de943 100644 --- a/test/toml_with_custom_folder/clang-build.toml +++ b/test/toml_with_custom_folder/clang-build.toml @@ -1,5 +1,4 @@ [myexe] output_name = "runHello" -[myexe.sources] include_directories = ["include_custom"] -source_directories = ["src_custom"] \ No newline at end of file +sources = ["src_custom/*"] \ No newline at end of file diff --git a/test/toml_with_custom_folder/src_custom/main.cpp b/test/toml_with_custom_folder/src_custom/main.cpp index 2f5f5c9..7b94ba1 100644 --- a/test/toml_with_custom_folder/src_custom/main.cpp +++ b/test/toml_with_custom_folder/src_custom/main.cpp @@ -1,7 +1,7 @@ #include #include -int main(int argc, char ** argv) +int main() { std::cerr << main_print() << std::endl; return 0;