Source code for e3_build_tools.process

"""Module for processes."""

import logging
import pathlib
import pprint
from copy import deepcopy
from typing import Dict, List, Set, Tuple, cast

from e3_build_tools import definition, utils
from e3_build_tools.builder import Builder
from e3_build_tools.exceptions import (
    DependencyResolutionError,
    FetchDataException,
    ModuleExistsException,
    NoModuleChangesException,
    ProcessException,
)
from e3_build_tools.fs.environment import InstalledEnvironment
from e3_build_tools.git.registry import GitLabCommitPayload, WrapperRegistry
from e3_build_tools.git.tag import Tag
from e3_build_tools.logging import set_up_logger, CustomLogger

logging.setLoggerClass(CustomLogger)
[docs] logger: CustomLogger = cast(CustomLogger, logging.getLogger(__name__))
[docs] class EnvironmentBuildProcess: """Class for building an e3 environment from a specification.""" def __init__( self, specification: pathlib.Path, build_dir: pathlib.Path, install_path: pathlib.Path, use_ssh: bool, token: str, verbose: bool, log_file: pathlib.Path, jobs: int, ) -> None:
[docs] self.log_file = log_file
[docs] self.verbose = verbose
set_up_logger(self.verbose, self.log_file)
[docs] self.specification = specification
# From EPICS BASE configure/CONFIG_SITE the INSTALL_PATH must be an absolute path.
[docs] self.install_path = install_path.resolve()
logger.highlight("Retrieving data.")
[docs] self.registry = WrapperRegistry(private_token=token)
[docs] self.builder = Builder(build_dir, use_ssh=use_ssh, jobs=jobs)
[docs] def setup(self) -> None: """Do an initial setup necessary in order to build an environment.""" logger.highlight("Initialising process.") try: self.environment = definition.BuildDefinition.from_specification( self.specification ) except (OSError, ModuleExistsException, TypeError) as e: logger.error("Failure processing specification. %s", e) raise ProcessException() logger.highlight( f"You will be processing a specification {self.environment.name} consisting of:" ) logger.info(self.environment)
[docs] def fetch(self) -> None: """Fetch data from remotes sources.""" logger.highlight("Retriving information from modules...") try: for module in self.environment.modules.values(): module.fetch_remote_data(registry=self.registry) except FetchDataException: logger.error("Failure fetching data.") raise ProcessException() for module in self.environment.modules.values(): for ref in module.versions: module.update_deps(ref=ref) # We need to do this after having fetched version information, # in part because base and require versions are not known earlier self.target_environment = InstalledEnvironment( self.install_path, self.environment.base_version, self.environment.require_version, )
[docs] def curate(self) -> None: """Remove installed module versions from build definition.""" logger.highlight("Beginning to curate definition...") for module in deepcopy(self.environment.modules).values(): for ref in module.versions: version = module.versions[ref]["version_string"] if self.target_environment.has(module.name, version): self.environment.modules[module.name].remove_version(ref)
[docs] def resolve(self) -> None: """Resolve dependencies and construct the build order.""" logger.highlight("Establishing build order...") try: self.build_order = definition.Resolver().get_sorted_order(self.environment) except DependencyResolutionError as e: logger.error("Failure resolving build. %s", e) raise ProcessException()
[docs] def build(self) -> None: """Build and install each module from the definition.""" logger.highlight("Beginning to build...") successes = [] failures = [] for name in self.build_order: new_successes, new_failures = self.builder.build( self.environment.modules[name], self.target_environment ) successes.extend(new_successes) failures.extend(new_failures) if failures: self._print_build_results(successes, failures) raise ProcessException() self._print_build_results(successes, failures)
def _print_build_results( self, successes: List[Tuple[str, str]], failures: List[Tuple[str, str]] ) -> None: """Print the results of a build run.""" if not successes and not failures: logger.highlight("Nothing to install.") return if successes: logger.highlight("The following modules were installed:") for module, ref in successes: logger.info(f"Module {module}, {ref}") if not failures: logger.info("Build successful.") return logger.highlight("The following modules failed to build:") # Transformation to set for removing duplicates failed_module_names = set(name for (name, _) in failures) # Pretty print the failing modules in a tree style following the # dependencies. Under each failing module, other failing modules # that depend on it will be written with the previous indent + 1. def flatten_and_reverse_dependencies( modules: Set[str], ) -> Dict[str, List[str]]: dependencies = {} for module in modules: dependencies[module] = [ dependency for dependency in self.environment.modules if module in self.environment.modules[dependency].dependencies ] return dependencies failed_modules_tree = flatten_and_reverse_dependencies(failed_module_names) def print_dependency_tree( node: str, indentation_level: int = 0, indentation_char: str = " ", ) -> None: for module_name, version in ( (name, ver) for name, ver in failures if name == node ): print( f"{indentation_level * 4 * indentation_char}{module_name}: {version}" ) for child in failed_modules_tree[node]: print_dependency_tree(child, indentation_level + 1) roots = set(failed_modules_tree.keys()).difference( *failed_modules_tree.values() ) for root in roots: print_dependency_tree(root)
[docs] class SpecificationGenerationProcess(EnvironmentBuildProcess): """Class for generating an e3 specification file.""" def __init__( self, formula: pathlib.Path, build_dir: pathlib.Path, install_path: pathlib.Path, use_ssh: bool, group_id: int, token: str, verbose: bool, log_file: pathlib.Path, branch: str, ) -> None:
[docs] self.formula = formula
# From EPICS BASE configure/CONFIG_SITE the INSTALL_PATH must be a # absolute path.
[docs] self.install_path = install_path.resolve()
[docs] self.verbose = verbose
[docs] self.log_file = log_file
[docs] self.branch = branch
if group_id: self.registry = WrapperRegistry(top_group=group_id, private_token=token) else: self.registry = WrapperRegistry(private_token=token)
[docs] self.builder = Builder(build_dir, use_ssh=use_ssh)
set_up_logger(self.verbose, self.log_file)
[docs] def setup(self) -> None: """Do an initial setup in order to generate a specification file from a formula.""" try: self.environment: definition.SpecificationDefinition = ( definition.SpecificationDefinition.from_formula(self.formula) ) if not self.branch and any( starting_ref != "latest" for starting_ref in self.environment.fetch_starting_reference().values() ): logger.error("Must use 'latest' as reference in formula") raise ProcessException() except ( OSError, ModuleExistsException, TypeError, ): logger.exception("Failure processing formula %s", self.formula) raise ProcessException() logger.highlight("You will be attempting to process a formula consisting of:") logger.info(self.environment)
[docs] def curate(self) -> None: """Rename 'latest' references in definition, and update the version substitutions.""" logger.debug("Beginning to curate definition") for module in deepcopy(self.environment.modules).values(): for ref in module.versions: if ref == "latest": logger.debug( f"Converting ref from {ref!r} to {module.project.default_branch!r}" ) head = module.project.default_branch self.environment.modules[module.name].versions[ head ] = self.environment.modules[module.name].versions.pop(ref) new_version = utils.increment_build_number( self.registry.read_version_from( module.name, ref=head, config=module.version_config_file, var=module.version_config_var, ) ) self.environment.update_module_version(module.name, new_version) self.environment.update_global_dependency_from_module(module.name)
[docs] def build(self) -> None: """Build and install each module from the definition.""" logger.highlight("Testing formula...") successes = [] failures = [] for name in self.build_order: if name in ("base", "require"): substitutions = None else: substitutions = self._gather_project_changes(name) new_successes, new_failures = self.builder.build( self.environment.modules[name], self.target_environment, with_substitutions=substitutions, ) successes.extend(new_successes) failures.extend(new_failures) if failures: self._print_build_results(successes, failures) raise ProcessException() self._print_build_results(successes, failures)
[docs] def teardown(self) -> None: """Delete installed environment.""" def recursive_remove(path: pathlib.Path): for child in path.iterdir(): if child.is_file() or child.is_symlink(): child.unlink() else: recursive_remove(child) path.rmdir() logger.highlight( f"Deleting installation at {self.target_environment.base_directory}..." ) recursive_remove(self.target_environment.base_directory)
[docs] def generate(self, dry_run: bool = False) -> None: """Generate the specification file.""" logger.highlight("Generating specification...") payloads: List[Tuple[str, GitLabCommitPayload]] = [] for name in self.build_order: try: payloads.append((name, self._fetch_commit_payload(name, self.branch))) except NoModuleChangesException: continue if dry_run: logger.highlight("Dry-run would make following changes") for name, payload in payloads: submodule_changes = self._gather_project_submodule_changes(name) if not dry_run: try: self._update_module(name, payload, submodule_changes) except ValueError: logger.error("Invalid EPICS_BASE version in %s", name) raise ProcessException() else: logger.info(pprint.pformat(payload)) if submodule_changes: logger.info(f"Submodule changes: {submodule_changes}") if not dry_run: self.environment.to_specification()
def _gather_project_changes(self, name: str) -> Dict[str, Dict[str, str]]: return self.environment.combine_substitutions(name) def _gather_project_submodule_changes(self, name: str) -> Dict[str, str]: return self.environment.submodule_updates(name) def _fetch_commit_payload( self, name: str, branch: str, ) -> GitLabCommitPayload: """Fetch the commit payload to be used to update a module. Raises: NoModuleChangeException: If there are no changes to the module. """ module = self.environment.modules[name] if module.name in ("base", "require"): raise NoModuleChangesException # since there only should be a single version of any module in the formula we only take the first of .versions starting_reference = next(iter(module.versions)) substitutions = self._gather_project_changes(name) return self.registry.create_commit_payload( substitutions, starting_reference, module.name, branch if branch else module.project.default_branch, ) def _update_module( self, name: str, payload: GitLabCommitPayload, submodule_changes: Dict[str, str], ) -> None: """Apply changes to the module's remote source.""" create_tag = True if not self.branch else False module = self.environment.modules[name] commit = self.registry.commit_change(name=module.name, payload=payload) if submodule_changes: assert isinstance(payload["branch"], str) self.registry.update_submodule( name, submodule_changes["path"], submodule_changes["sha"], payload["branch"], ) release_file = self.registry.get_config_from( module.name, ref=commit.id, file=module.install_config_file ) release_defs = utils.read_makefile_definitions(release_file) base_ver = utils.extract_base_version(release_defs["EPICS_BASE"]) require_ver = release_defs["E3_REQUIRE_VERSION"] module_ver = self.registry.read_version_from( module.name, ref=commit.id, config=module.version_config_file, var=module.version_config_var, ) if create_tag: new_tag = Tag.from_components( base_ver=base_ver, require_ver=require_ver, module_ver=module_ver, commit_sha=commit.id, ) self.registry.tag_project( module.name, new_tag.name, module.project.default_branch ) entry = next(iter(module.versions)) logger.debug(f"Renaming {entry} to {new_tag.name}") module.versions[new_tag.name] = module.versions.pop(entry) else: entry = next(iter(module.versions)) module.versions[self.branch] = module.versions.pop(entry)