Skip to content

CLI

atopile.cli

build

CLI command definition for ato build.

logger module-attribute

logger = getLogger(__name__)

build

build(
    entry=None,
    build=[],
    target=[],
    option=[],
    frozen=None,
    keep_picked_parts=None,
    keep_net_names=None,
    standalone=False,
)

Build the specified --target(s) or the targets specified by the build config. Optionally specify a different entrypoint with the argument ENTRY. eg. ato build --target my_target path/to/source.ato:module.path

Source code in src/atopile/cli/build.py
def build(
    entry: Annotated[str | None, typer.Argument()] = None,
    build: Annotated[list[str], typer.Option("--build", "-b", envvar="ATO_BUILD")] = [],
    target: Annotated[
        list[str], typer.Option("--target", "-t", envvar="ATO_TARGET")
    ] = [],
    option: Annotated[
        list[str], typer.Option("--option", "-o", envvar="ATO_OPTION")
    ] = [],
    frozen: Annotated[
        bool | None,
        typer.Option(
            help="PCB must be rebuilt without changes. Useful in CI",
            envvar="ATO_FROZEN",
        ),
    ] = None,
    keep_picked_parts: bool | None = None,
    keep_net_names: bool | None = None,
    standalone: bool = False,
):
    """
    Build the specified --target(s) or the targets specified by the build config.
    Optionally specify a different entrypoint with the argument ENTRY.
    eg. `ato build --target my_target path/to/source.ato:module.path`
    """
    import json

    import atopile.config
    from atopile import buildutil
    from atopile.cli.common import create_build_contexts
    from atopile.config import BuildType
    from faebryk.library import _F as F
    from faebryk.libs.exceptions import accumulate, log_user_errors
    from faebryk.libs.picker import lcsc

    build_ctxs = create_build_contexts(entry, build, target, option, standalone)

    for build_ctx in build_ctxs:
        if keep_picked_parts is not None:
            build_ctx.keep_picked_parts = keep_picked_parts

        if keep_net_names is not None:
            build_ctx.keep_net_names = keep_net_names

        if frozen is not None:
            build_ctx.frozen = frozen
            if frozen:
                if keep_picked_parts is False:  # is, ignores None
                    raise errors.UserBadParameterError(
                        "`--keep-picked-parts` conflict with `--frozen`"
                    )

                build_ctx.keep_picked_parts = True

                if keep_net_names is False:  # is, ignores None
                    raise errors.UserBadParameterError(
                        "`--keep-net-names` conflict with `--frozen`"
                    )

                build_ctx.keep_net_names = True

    with accumulate() as accumulator:
        for build_ctx in build_ctxs:
            logger.info("Building '%s'", build_ctx.name)
            with accumulator.collect(), log_user_errors(logger):
                match build_ctx.build_type:
                    case BuildType.ATO:
                        app = _init_ato_app(build_ctx)
                    case BuildType.PYTHON:
                        app = _init_python_app(build_ctx)
                        app.add(F.is_app_root())
                    case _:
                        raise ValueError(f"Unknown build type: {build_ctx.build_type}")

                # TODO: these should be drawn from the buildcontext like everything else
                lcsc.BUILD_FOLDER = build_ctx.paths.build
                lcsc.LIB_FOLDER = build_ctx.paths.component_lib
                lcsc.LIB_FOLDER.mkdir(exist_ok=True, parents=True)
                lcsc.KICAD_PROJECT_PATH = build_ctx.paths.kicad_project.parent

                # TODO: add a mechanism to override the following with custom build machinery # noqa: E501  # pre-existing
                buildutil.build(build_ctx, app)

        with accumulator.collect():
            project_context = atopile.config.get_project_context()

            # FIXME: this should be done elsewhere, but there's no other "overview"
            # that can see all the builds simultaneously
            manifest = {}
            manifest["version"] = "2.0"
            for ctx in build_ctxs:
                if ctx.paths.layout:
                    by_layout_manifest = manifest.setdefault(
                        "by-layout", {}
                    ).setdefault(str(ctx.paths.layout), {})
                    by_layout_manifest["layouts"] = str(
                        ctx.paths.output_base.with_suffix(".layouts.json")
                    )

            manifest_path = project_context.project_path / "build" / "manifest.json"
            manifest_path.parent.mkdir(exist_ok=True, parents=True)
            with open(manifest_path, "w", encoding="utf-8") as f:
                json.dump(manifest, f)

    logger.info("Build successful! 🚀")

cli

app module-attribute

app = Typer(
    no_args_is_help=True,
    pretty_exceptions_enable=bool(FLOG_FMT),
)

python_interpreter_path

python_interpreter_path(ctx, value)

Print the current python interpreter path.

Source code in src/atopile/cli/cli.py
def python_interpreter_path(ctx: typer.Context, value: bool):
    """Print the current python interpreter path."""
    if not value or ctx.resilient_parsing:
        return
    typer.echo(sys.executable)
    raise typer.Exit()

atopile_src_path

atopile_src_path(ctx, value)

Print the current python interpreter path.

Source code in src/atopile/cli/cli.py
def atopile_src_path(ctx: typer.Context, value: bool):
    """Print the current python interpreter path."""
    if not value or ctx.resilient_parsing:
        return
    typer.echo(Path(__file__).parent.parent)
    raise typer.Exit()

version_callback

version_callback(ctx, value)
Source code in src/atopile/cli/cli.py
def version_callback(ctx: typer.Context, value: bool):
    if not value or ctx.resilient_parsing:
        return
    typer.echo(version("atopile"))
    raise typer.Exit()

cli

cli(
    ctx,
    non_interactive=False,
    debug=False,
    verbose=0,
    python_path=False,
    atopile_path=False,
    version=None,
)
Source code in src/atopile/cli/cli.py
@app.callback()
def cli(
    ctx: typer.Context,
    non_interactive: Annotated[
        bool, typer.Option("--non-interactive", envvar="ATO_NON_INTERACTIVE")
    ] = False,
    debug: Annotated[
        bool,
        typer.Option("--debug", help="Wait to attach debugger on start"),
    ] = False,
    verbose: Annotated[
        int,
        typer.Option("--verbose", "-v", count=True, help="Increase verbosity"),
    ] = 0,
    python_path: Annotated[
        bool, typer.Option(hidden=True, callback=python_interpreter_path)
    ] = False,
    atopile_path: Annotated[
        bool, typer.Option(hidden=True, callback=atopile_src_path)
    ] = False,
    version: Annotated[
        bool | None,
        typer.Option("--version", callback=version_callback, is_eager=True),
    ] = None,
):
    if debug:
        import debugpy  # pylint: disable=import-outside-toplevel

        debug_port = 5678
        debugpy.listen(("localhost", debug_port))
        logger.info("Starting debugpy on port %s", debug_port)
        debugpy.wait_for_client()

    # set the log level
    if verbose == 1:
        handler.hide_traceback_types = ()
        handler.tracebacks_show_locals = True
    elif verbose == 2:
        handler.tracebacks_suppress_map = {}  # Traceback through atopile infra
    elif verbose >= 3:
        logger.root.setLevel(logging.DEBUG)
        handler.traceback_level = logging.WARNING

    if ctx.invoked_subcommand:
        check_for_update()

        # Initialize telemetry
        telemetry.setup_telemetry_data(ctx.invoked_subcommand)

    if not non_interactive and ctx.invoked_subcommand != "configure":
        configure.do_configure_if_needed()

main

main()
Source code in src/atopile/cli/cli.py
def main():
    app()

common

Common CLI writing utilities.

log module-attribute

log = getLogger(__name__)

get_entry_arg_file_path

get_entry_arg_file_path(entry)
Source code in src/atopile/cli/common.py
def get_entry_arg_file_path(entry: str | None) -> tuple[AddrStr | None, Path]:
    # basic the entry address if provided, otherwise leave it as None

    if entry is None:
        entry_arg_file_path = Path.cwd()
    else:
        entry = AddrStr(entry)

        if address.get_file(entry) is None:
            raise errors.UserBadParameterError(
                f"Invalid entry address {entry} - entry must specify a file.",
                title="Bad 'entry' parameter",
            )

        entry_arg_file_path = (
            Path(address.get_file(entry)).expanduser().resolve().absolute()
        )

    return entry, entry_arg_file_path

get_project_config

get_project_config(entry_arg_file_path)
Source code in src/atopile/cli/common.py
def get_project_config(entry_arg_file_path: Path) -> atopile.config.ProjectConfig:
    try:
        project_config = atopile.config.get_project_config_from_addr(
            str(entry_arg_file_path)
        )
    except FileNotFoundError as ex:
        # FIXME: this raises an exception when the entry is not in a project
        raise errors.UserBadParameterError(
            f"Could not find project from path {str(entry_arg_file_path)}. "
            "Is this file path within a project?"
        ) from ex

    return project_config

check_entry_arg_file_path

check_entry_arg_file_path(entry, entry_arg_file_path)
Source code in src/atopile/cli/common.py
def check_entry_arg_file_path(
    entry: AddrStr | None, entry_arg_file_path: Path
) -> AddrStr | None:
    entry_addr_override = None

    if entry:
        if entry_arg_file_path.is_file():
            if entry_section := address.get_entry_section(entry):
                entry_addr_override = address.from_parts(
                    str(entry_arg_file_path.absolute()),
                    entry_section,
                )
            else:
                raise errors.UserBadParameterError(
                    "If an entry of a file is specified, you must specify"
                    " the node within it you want to build.",
                    title="Bad 'entry' parameter",
                )

        elif entry_arg_file_path.is_dir():
            pass

        elif not entry_arg_file_path.exists():
            raise errors.UserBadParameterError(
                "The entry you have specified does not exist.",
                title="Bad 'entry' parameter",
            )
        else:
            raise ValueError(
                f"Unexpected entry path type {entry_arg_file_path} - this should never happen!"  # noqa: E501  # pre-existing
            )

    return entry_addr_override

check_compiler_versions

check_compiler_versions(config)

Check that the compiler version is compatible with the version used to build the project.

Source code in src/atopile/cli/common.py
def check_compiler_versions(config: atopile.config.ProjectConfig):
    """
    Check that the compiler version is compatible with the version
    used to build the project.
    """
    assert config.location is not None
    dependency_cfgs = (
        faebryk.libs.exceptions.downgrade(FileNotFoundError)(
            atopile.config.get_project_config_from_path
        )(p)
        for p in config.location.glob(".ato/modules/**/ato.yaml")
    )

    for cltr, cfg in faebryk.libs.exceptions.iter_through_errors(
        itertools.chain([config], dependency_cfgs)
    ):
        if cfg is None:
            continue

        with cltr():
            semver_str = cfg.ato_version
            # FIXME: this is a hack to the moment to get around us breaking
            # the versioning scheme in the ato.yaml files
            for operator in version.OPERATORS:
                semver_str = semver_str.replace(operator, "")

            built_with_version = version.parse(semver_str)

            if not version.match_compiler_compatability(built_with_version):
                raise version.VersionMismatchError(
                    f"{cfg.location} ({cfg.ato_version}) can't be"
                    " built with this version of atopile "
                    f"({version.get_installed_atopile_version()})."
                )

configure_project_context

configure_project_context(entry, standalone=False)
Source code in src/atopile/cli/common.py
def configure_project_context(
    entry: str | None, standalone: bool = False
) -> tuple[atopile.config.ProjectConfig, atopile.config.ProjectContext]:
    entry, entry_arg_file_path = get_entry_arg_file_path(entry)

    if standalone:
        if not entry:
            raise errors.UserBadParameterError(
                "You must specify an entry to build with the --standalone option"
            )
        if not entry_arg_file_path.exists():
            raise errors.UserBadParameterError(
                f"The file you have specified does not exist: {entry_arg_file_path}"
            )

        project_config = atopile.config.ProjectConfig(
            location=Path.cwd(),
            ato_version=f"^{version.get_installed_atopile_version()}",
            paths=atopile.config.ProjectPaths(
                layout=Path.cwd() / "standalone",
                src=Path.cwd(),
            ),
            builds={"default": atopile.config.ProjectBuildConfig(targets=[])},
        )
    else:
        project_config = get_project_config(entry_arg_file_path)

    # Make sure I an all my sub-configs have appropriate versions
    check_compiler_versions(project_config)

    log.info("Using project %s", project_config.location)

    # Configure project context
    project_ctx = atopile.config.ProjectContext.from_config(project_config)
    atopile.config.set_project_context(project_ctx)

    return project_config, project_ctx

create_build_contexts

create_build_contexts(
    entry, build, target, option, standalone
)
Source code in src/atopile/cli/common.py
def create_build_contexts(
    entry: str | None,
    build: Iterable[str],
    target: Iterable[str],
    option: Iterable[str],
    standalone: bool,
) -> list[atopile.config.BuildContext]:
    entry, entry_arg_file_path = get_entry_arg_file_path(entry)

    config, project_ctx = configure_project_context(entry, standalone)

    # These checks are only relevant if we're **building** standalone
    # TODO: Some of the contents should be moved out of the project context
    if standalone:
        if not entry_arg_file_path.is_file():
            raise errors.UserBadParameterError(
                "The path you're building with the --standalone"
                f" option must be a file {entry_arg_file_path}"
            )
        assert entry is not None  # Handled by configure_project_context
        if not address.get_entry_section(entry):
            raise errors.UserBadParameterError(
                "You must specify what to build within a file to build with the"
                " --standalone option"
            )

    # add custom config overrides
    if option:
        raise errors.UserNotImplementedError(
            "Custom config overrides have been removed in a refactor. "
            "It's planned to re-add them in a future release. "
            "If this is a blocker for you, please raise an issue. "
            "In the meantime, you can use the `ato.yaml` file to set these options."
        )

    # if we set an entry-point, we now need to deal with that
    entry_addr_override = check_entry_arg_file_path(entry, entry_arg_file_path)

    # Make build contexts
    if build_names := build or config.builds.keys():
        build_ctxs: list[atopile.config.BuildContext] = [
            atopile.config.BuildContext.from_config_name(config, build_name)
            for build_name in build_names
        ]
    else:
        build_ctxs = [
            atopile.config.BuildContext.from_config(
                "default", atopile.config.ProjectBuildConfig(), project_ctx
            )
        ]

    for build_ctx in build_ctxs:
        if entry_addr_override is not None:
            build_ctx.entry = entry_addr_override
        if target:
            build_ctx.targets = list(target)

    return build_ctxs

configure

Configure the user's system for atopile development.

yaml module-attribute

yaml = YAML()

CONFIGURED_FOR_PATH module-attribute

CONFIGURED_FOR_PATH = absolute()

logger module-attribute

logger = getLogger(__name__)

config module-attribute

config = Config()

Config

version class-attribute instance-attribute
version = None
install_kicad_plugin class-attribute instance-attribute
install_kicad_plugin = None

get_configured_for_version

get_configured_for_version()

Return the version of atopile that the user's system is configured for.

Source code in src/atopile/cli/configure.py
def get_configured_for_version() -> atopile.version.Version:
    """Return the version of atopile that the user's system is configured for."""
    return atopile.version.clean_version(atopile.version.Version.parse(config.version))

configure

configure()

Configure the user's system for atopile development.

Source code in src/atopile/cli/configure.py
def configure() -> None:
    """
    Configure the user's system for atopile development.
    """
    logger.setLevel(logging.INFO)
    _load_config()
    do_configure()

do_configure_if_needed

do_configure_if_needed()

Configure the user's system for atopile development if it's not already configured.

Source code in src/atopile/cli/configure.py
def do_configure_if_needed() -> None:
    """Configure the user's system for atopile development if it's not already configured."""  # noqa: E501  # pre-existing
    if not CONFIGURED_FOR_PATH.exists():
        rich.print(
            dedent(
                """
            Welcome! :partying_face:

            Looks like you're new to atopile, there's some initial setup we need to do.
            """
            )
        )

    _load_config()

    try:
        if config.version == atopile.version.get_installed_atopile_version():
            return
    except TypeError:
        # Semver appears to do a __req__ by converting the lhs to a type, which
        # doesn't work for None
        pass

    # Otherwise we're configured, but we might need to update
    logger.setLevel(logging.WARNING)  # Quieten output for typical runs
    do_configure()

do_configure

do_configure()

Perform system configuration required for atopile.

Source code in src/atopile/cli/configure.py
def do_configure() -> None:
    """Perform system configuration required for atopile."""
    if config.install_kicad_plugin is None:
        config.install_kicad_plugin = questionary.confirm(
            ":wrench: Install KiCAD plugin?", default=True
        ).ask()

    if config.install_kicad_plugin:
        # FIXME: no idea what's up with this - but seem to help on Windows
        install_kicad_plugin()

    # final steps
    config.version = str(
        atopile.version.clean_version(atopile.version.get_installed_atopile_version())
    )
    _save_config()

install_kicad_plugin

install_kicad_plugin()

Install the kicad plugin.

Source code in src/atopile/cli/configure.py
def install_kicad_plugin() -> None:
    """Install the kicad plugin."""
    # Find the path to kicad's plugin directory
    plugin_loader = f"""
        plugin_path = r"{Path(__file__).parent.parent}"
        import sys
        import importlib

        if plugin_path not in sys.path:
            sys.path.append(plugin_path)

        # if kicad_plugin is already in sys.modules, reload it
        for module in sys.modules:
            if "kicad_plugin" in module:
                importlib.reload(sys.modules[module])

        import kicad_plugin
        """

    def _write_plugin(path: Path):
        # Create the directory if it doesn't exist
        path.mkdir(parents=True, exist_ok=True)

        # Write the plugin loader
        plugin_loader_content = dedent(plugin_loader)
        plugin_loader_path = path / "atopile.py"

        logger.info("Writing plugin loader to %s", plugin_loader_path)
        with plugin_loader_path.open("w", encoding="utf-8") as f:
            f.write(plugin_loader_content)

    kicad_config_search_path = ["~/Documents/KiCad/", "~/.local/share/kicad/"]
    no_plugin_found = True
    for sp in kicad_config_search_path:
        config_path = Path(sp).expanduser().resolve()
        if config_path.exists():
            for p in config_path.glob("*/scripting/plugins"):
                try:
                    _write_plugin(p)
                except FileNotFoundError:
                    _write_plugin(p)
                no_plugin_found = False

    if no_plugin_found:
        logger.warning("KiCAD config path not found. Couldn't install plugin!")

console

console module-attribute

console = get_console()

create

log module-attribute

log = getLogger(__name__)

PROJECT_TEMPLATE module-attribute

PROJECT_TEMPLATE = (
    "https://github.com/atopile/project-template"
)

create_app module-attribute

create_app = Typer()

stuck_user_helper_generator module-attribute

stuck_user_helper_generator = _stuck_user_helper()

ComponentType

Bases: StrEnum

ato class-attribute instance-attribute
ato = auto()
fab class-attribute instance-attribute
fab = auto()

check_name

check_name(name)

Check if a name is valid.

Source code in src/atopile/cli/create.py
def check_name(name: str) -> bool:
    """
    Check if a name is valid.
    """
    if re.match(r"^[a-zA-Z][a-zA-Z0-9_-]*$", name):
        return True
    else:
        return False

help

help(text)

Print help text.

Source code in src/atopile/cli/create.py
def help(text: str) -> None:  # pylint: disable=redefined-builtin
    """Print help text."""
    rich.print("\n" + textwrap.dedent(text).strip() + "\n")

project

project(name=None, repo=None)

Create a new ato project.

Source code in src/atopile/cli/create.py
@create_app.command()
def project(
    name: Annotated[str | None, typer.Argument()] = None,
    repo: Annotated[str | None, typer.Option("--repo", "-r")] = None,
):  # pylint: disable=redefined-builtin
    """
    Create a new ato project.
    """

    # Get a project name
    kebab_name = None
    for _ in stuck_user_helper_generator:
        if not name:
            rich.print(":rocket: What's your project [cyan]name?[/]")
            name = questionary.text("").unsafe_ask()

        if name is None:
            continue

        kebab_name = caseconverter.kebabcase(name)
        if name != kebab_name:
            help(
                f"""
                We recommend using kebab-case ([cyan]{kebab_name}[/])
                for your project name. It makes it easier to use your project
                with other tools (like git) and it embeds nicely into URLs.
                """
            )

            rich.print(f"Do you want to use [cyan]{kebab_name}[/] instead?")
            if questionary.confirm("").unsafe_ask():
                name = kebab_name

        if check_name(name):
            break
        else:
            help(
                "[red]Project names must start with a letter and"
                " contain only letters, numbers, dashes and underscores.[/]"
            )
            name = None

    assert name is not None

    if (
        not repo
        and not questionary.confirm(
            "Would you like to create a new repo for this project?"
        ).unsafe_ask()
    ):
        repo = PROJECT_TEMPLATE

    # Get a repo
    repo_obj: git.Repo | None = None
    for _ in stuck_user_helper_generator:
        if not repo:
            make_repo_url = f"https://github.com/new?name={name}&template_owner=atopile&template_name=project-template"

            help(
                f"""
                We recommend you create a Github repo for your project.

                If you already have a repo, you can respond [yellow]n[/]
                to the next question and provide the URL to your repo.

                If you don't have one, you can respond yes to the next question
                or (Cmd/Ctrl +) click the link below to create one.

                Just select the template you want to use.

                {make_repo_url}
                """
            )

            rich.print(":rocket: Open browser to create Github repo?")
            if questionary.confirm("").unsafe_ask():
                webbrowser.open(make_repo_url)

            rich.print(":rocket: What's the [cyan]repo's URL?[/]")
            repo = questionary.text("").unsafe_ask()

        assert repo is not None

        # Try download the repo from the user-provided URL
        if Path(name).exists():
            raise click.ClickException(
                f"Directory {name} already exists. Please put the repo elsewhere or"
                " choose a different name."
            )

        try:
            repo_obj = git.Repo.clone_from(repo, name, depth=1)
            break
        except git.GitCommandError as ex:
            help(
                f"""
                [red]Failed to clone repo from {repo}[/]

                {ex.stdout}
                {ex.stderr}
                """
            )
            repo = None

    assert repo_obj is not None
    assert repo_obj.working_tree_dir is not None

    # Configure the project
    do_configure(name, str(repo_obj.working_tree_dir), debug=False)

    # Commit the configured project
    # force the add, because we're potentially
    # modifying things in gitignored locations
    if repo_obj.is_dirty():
        repo_obj.git.add(A=True, f=True)
        repo_obj.git.commit(m="Configure project")
    else:
        rich.print(
            "[yellow]No changes to commit! Seems like the"
            " template you used mightn't be configurable?[/]"
        )

    # If this repo's remote it PROJECT_TEMPLATE, cleanup the git history
    if repo_obj.remotes.origin.url == PROJECT_TEMPLATE:
        try:
            robustly_rm_dir(Path(repo_obj.git_dir))
        except (PermissionError, OSError) as ex:
            with downgrade():
                raise errors.UserException(
                    f"Failed to remove .git directory: {repr(ex)}"
                ) from ex

        if not _in_git_repo(Path(repo_obj.working_dir).parent):
            # If we've created this project OUTSIDE an existing git repo
            # then re-init the repo so it has a clean history
            clean_repo = git.Repo.init(repo_obj.working_tree_dir)
            clean_repo.git.add(A=True)
            clean_repo.git.commit(m="Initial commit")

    # Install dependencies listed in the ato.yaml, typically just generics
    do_install(
        to_install=None,
        jlcpcb=False,
        link=True,
        upgrade=True,
        path=repo_obj.working_tree_dir,
    )

    # Wew! New repo created!
    rich.print(f':sparkles: [green]Created new project "{name}"![/] :sparkles:')

build

build(name=None)

Create a new build configuration. - adds entry to ato.yaml - creates a new directory in layout

Source code in src/atopile/cli/create.py
@create_app.command()
def build(
    name: Annotated[str | None, typer.Argument()] = None,
):
    """
    Create a new build configuration.
    - adds entry to ato.yaml
    - creates a new directory in layout
    """
    if not name:
        name = caseconverter.kebabcase(
            questionary.text("Enter the build name").unsafe_ask()
        )

    try:
        project_config = config.get_project_config_from_path(Path("."))
        project_context = config.ProjectContext.from_config(project_config)
        top_level_path = project_context.project_path
        layout_path = project_context.layout_path
        src_path = project_context.src_path
    except FileNotFoundError:
        raise errors.UserException(
            "Could not find the project directory, are you within an ato project?"
        )

    # Get user input for the entry file and module name
    rich.print("We will create a new ato file and add the entry to the ato.yaml")
    entry = questionary.text(
        "What would you like to call the entry file? (e.g., psuDebug)"
    ).unsafe_ask()

    target_layout_path = layout_path / name
    with tempfile.TemporaryDirectory() as tmpdirname:
        try:
            git.Repo.clone_from(PROJECT_TEMPLATE, tmpdirname)
        except git.GitCommandError as ex:
            raise errors.UserException(
                f"Failed to clone layout template from {PROJECT_TEMPLATE}: {repr(ex)}"
            )
        source_layout_path = Path(tmpdirname) / "elec" / "layout" / "default"
        if not source_layout_path.exists():
            raise errors.UserException(
                f"The specified layout path {source_layout_path} does not exist."
            )
        else:
            target_layout_path.mkdir(parents=True, exist_ok=True)
            shutil.copytree(source_layout_path, target_layout_path, dirs_exist_ok=True)
            # Configure the files in the directory using the do_configure function
            do_configure(name, str(target_layout_path), debug=False)

        # Add the build to the ato.yaml file
        ato_yaml_path = top_level_path / config.CONFIG_FILENAME
        # Check if ato.yaml exists
        if not ato_yaml_path.exists():
            print(
                f"ato.yaml not found in {top_level_path}. Please ensure the file"
                " exists before proceeding."
            )
        else:
            # Load the existing YAML configuration
            yaml = ruamel.yaml.YAML()
            with ato_yaml_path.open("r") as file:
                ato_config = yaml.load(file)

            entry_file = Path(caseconverter.kebabcase(entry)).with_suffix(".ato")
            entry_module = caseconverter.pascalcase(entry)

            # Update the ato_config with the new build information
            if "builds" not in ato_config:
                ato_config["builds"] = {}
            ato_config["builds"][name] = {
                "entry": f"elec/src/{entry_file}:{entry_module}"
            }

            # Write the updated configuration back to ato.yaml
            with ato_yaml_path.open("w") as file:
                yaml.dump(ato_config, file)

        # create a new ato file with the entry file and module
        ato_file = src_path / entry_file
        ato_file.write_text(f"module {entry_module}:\n \tsignal gnd\n")

        rich.print(
            f":sparkles: Successfully created a new build configuration for {name}!"
            " :sparkles:"
        )

configure

configure(name, repo_path)

Command useful in developing templates.

Source code in src/atopile/cli/create.py
@create_app.command(hidden=True)
def configure(name: str, repo_path: str):
    """Command useful in developing templates."""
    do_configure(name, repo_path, debug=True)

do_configure

do_configure(name, _repo_path, debug)

Configure the project.

Source code in src/atopile/cli/create.py
def do_configure(name: str, _repo_path: str, debug: bool):
    """Configure the project."""
    repo_path = Path(_repo_path)
    try:
        author = git.Repo(repo_path).git.config("user.name")
    except (git.GitCommandError, git.InvalidGitRepositoryError):
        author = "Original Author"

    template_globals = {
        "name": name,
        "caseconverter": caseconverter,
        "repo_root": repo_path,
        "python_path": sys.executable,
        "author": author,
    }

    # Load templates
    env = jinja2.Environment(loader=jinja2.FileSystemLoader(str(repo_path)))

    for template_path in repo_path.glob("**/*.j2"):
        # Figure out the target path and variables and what not
        target_path = template_path.parent / template_path.name.replace(
            ".j2", ""
        ).replace("__name__", caseconverter.kebabcase(name))

        template_globals["rel_path"] = target_path

        template = env.get_template(
            str(template_path.relative_to(repo_path).as_posix()),
            globals=template_globals,
        )

        # Make the noise!
        with target_path.open("w") as f:
            for chunk in template.generate():
                f.write(chunk)

        # Remove the template
        if not debug:
            template_path.unlink()

component

component(
    search_term=None, name=None, filename=None, type_=None
)

Create a new component.

Source code in src/atopile/cli/create.py
@create_app.command()
def component(
    search_term: Annotated[str | None, typer.Option("--search", "-s")] = None,
    name: Annotated[str | None, typer.Option("--name", "-n")] = None,
    filename: Annotated[str | None, typer.Option("--filename", "-f")] = None,
    type_: Annotated[ComponentType | None, typer.Option("--type", "-t")] = None,
):
    """Create a new component."""
    import faebryk.libs.picker.lcsc as lcsc_
    from faebryk.libs.picker.api.models import Component
    from faebryk.libs.picker.api.picker_lib import _extract_numeric_id, client
    from faebryk.libs.pycodegen import format_and_write, sanitize_name
    from faebryk.tools.libadd import Template

    try:
        project_config, project_ctx = configure_project_context(None)
    except errors.UserBadParameterError:
        project_config, project_ctx = configure_project_context(
            str(Path.cwd()), standalone=True
        )

    # FIXME: dedup path bullshit
    lcsc_.LIB_FOLDER = (project_config.location / "build" / "kicad" / "libs",)
    lcsc_.LIB_FOLDER = lcsc_.BUILD_FOLDER / "kicad" / "libs"
    lcsc_.MODEL_PATH = None

    # Find a component --------------------------------------------------------

    component: Component | None = None

    for _ in stuck_user_helper_generator:
        if not search_term:
            search_term = questionary.text(
                "Search for a component (Part Number or LCSC ID):"
            ).unsafe_ask()
            assert search_term is not None

        try:
            lcsc_id = _extract_numeric_id(search_term)
        except ValueError:
            lcsc_id = None

        try:
            if lcsc_id:
                components = client.fetch_part_by_lcsc(lcsc_id)
            else:
                # TODO: remove this once we have a fuzzy search
                mfr = questionary.text("Enter the manufacturer").unsafe_ask()
                components = client.fetch_part_by_mfr(mfr, search_term)
        except ApiHTTPError as e:
            if e.response.status_code == 404:
                components = []
            else:
                raise

        if len(components) == 0:
            rich.print(f'No components found for "{search_term}"')
            search_term = None
            continue

        component_table = Table()
        component_table.add_column("Part Number")
        component_table.add_column("Manufacturer")
        component_table.add_column("Description")

        for component in components:
            component_table.add_row(
                component.manufacturer_name,
                component.part_number,
                component.description,
            )

        rich.print(component_table)

        choices = [
            {
                "name": f"{component.manufacturer_name} {component.part_number}",
                "value": component,
            }
            for component in components
        ] + [{"name": "Search again...", "value": None}]

        component = questionary.select(
            "Select a component", choices=choices
        ).unsafe_ask()

        if component is not None:
            break

        # Reset the input terms to start over if we didn't find what we're looking for
        search_term = None

    # We have a component -----------------------------------------------------
    assert component is not None

    # TODO: templated ato components too
    if type_ is None:
        type_ = ComponentType.fab
    # if type_ is None:
    #     type_ = questionary.select(
    #         "Select the component type", choices=list(ComponentType)
    #     ).unsafe_ask()
    #     assert type_ is not None

    if name is None:
        name = questionary.text(
            "Enter the name of the component",
            default=caseconverter.pascalcase(
                sanitize_name(component.manufacturer_name + " " + component.part_number)
            ),
        ).unsafe_ask()

    sanitized_name = sanitize_name(name)
    if sanitized_name != name:
        rich.print(f"Sanitized name: {sanitized_name}")

    if type_ == ComponentType.ato:
        extension = ".ato"
    elif type_ == ComponentType.fab:
        extension = ".py"
    else:
        raise ValueError(f"Invalid component type: {type_}")

    out_path: Path | None = None
    for _ in stuck_user_helper_generator:
        if filename is None:
            filename = questionary.text(
                "Enter the filename of the component",
                default=caseconverter.snakecase(name) + extension,
            ).unsafe_ask()

        assert filename is not None

        filepath = Path(filename)
        if filepath.absolute():
            out_path = filepath.resolve()
        else:
            out_path = (project_ctx.src_path / filename).resolve()

        if out_path.exists():
            rich.print(f"File {out_path} already exists")
            filename = None
            continue

        if not out_path.parent.exists():
            rich.print(
                f"Directory {out_path.parent} does not exist. Creating it now..."
            )
            out_path.parent.mkdir(parents=True, exist_ok=True)

        break

    assert out_path is not None

    if type_ == ComponentType.ato:
        raise errors.UserNotImplementedError(
            "Creating ato components are not yet supported"
        )

    elif type_ == ComponentType.fab:
        template = Template(name=sanitized_name, base="Module")
        template.add_part(component)
        out = template.dumps()
        format_and_write(out, out_path)
        rich.print(f":sparkles: Created {out_path} !")

main

main(ctx)
Source code in src/atopile/cli/create.py
@create_app.callback(invoke_without_command=True)
def main(ctx: typer.Context):
    if ctx.resilient_parsing:
        return

    if not ctx.invoked_subcommand:
        commands = cast(dict, ctx.command.commands)  # type: ignore  # commands is an attribute of the context
        command_name = questionary.select(
            "What would you like to create?",
            choices=[n for n, c in commands.items() if not c.hidden],
        ).unsafe_ask()

        assert command_name in commands

        # Run the command
        ctx.invoke(commands[command_name].callback)

excepthook

handle_exception

handle_exception(exc_type, exc_value, exc_traceback)
Source code in src/atopile/cli/excepthook.py
def handle_exception(exc_type, exc_value, exc_traceback):
    try:
        _handle_exception(exc_type, exc_value, exc_traceback)
    except Exception as e:
        sys.__excepthook__(type(e), e, e.__traceback__)
    finally:
        with contextlib.suppress(Exception):
            telemetry.log_telemetry()

        rich.print(
            "\n\nUnfortunately errors ^^^ stopped the build. "
            "If you need a hand jump on [#9656ce]Discord[/]! [link=https://discord.gg/mjtxARsr9V]https://discord.gg/mjtxARsr9V[/] :wave:"  # noqa: E501  # pre-existing
        )
        sys.exit(1)

inspect

ato inspect

log module-attribute

log = getLogger(__name__)

odd_row module-attribute

odd_row = 'on grey11 cornflower_blue'

even_row module-attribute

even_row = 'on grey15 cornflower_blue'

odd_greyed_row module-attribute

odd_greyed_row = 'on grey11 grey0'

even_greyed_row module-attribute

even_greyed_row = 'on grey15 grey0'

DisplayEntry

DisplayEntry(net)

This class represents the nets that are below the inspected module, the equivalent net that is below the context module and the individual connections that are made to the inspect net and the context net.

Source code in src/atopile/cli/inspect.py
def __init__(self, net: list[list[AddrStr]]):
    self.inspect_net: list[AddrStr] = net
    self.inspect_consumer: list[AddrStr] = []
    self.context_net: list[AddrStr] = []
    self.context_consumer: list[AddrStr] = []
inspect_net instance-attribute
inspect_net = net
inspect_consumer instance-attribute
inspect_consumer = []
context_net instance-attribute
context_net = []
context_consumer instance-attribute
context_consumer = []

inspect

inspect(
    entry=None,
    build=[],
    target=[],
    option=[],
    inspect=None,
    context=None,
    dump_csv=None,
)

Utility to inspect what is connected to a component. The context sets the boundary where something is considered connected. For example: --inspect rp2040_micro --context rp2040_micro_ki

Source code in src/atopile/cli/inspect.py
def inspect(
    entry: Annotated[str | None, typer.Argument()] = None,
    build: Annotated[list[str], typer.Option("--build", "-b", envvar="ATO_BUILD")] = [],
    target: Annotated[
        list[str], typer.Option("--target", "-t", envvar="ATO_TARGET")
    ] = [],
    option: Annotated[
        list[str], typer.Option("--option", "-o", envvar="ATO_OPTION")
    ] = [],
    inspect: str | None = None,
    context: Annotated[
        str | None,
        typer.Option(
            "--context", "-c", help="The context from which to inspect the module"
        ),
    ] = None,
    dump_csv: Annotated[
        str | None,
        typer.Option("--dump-csv", "-d", help="Output the inspection to a CSV file"),
    ] = None,
):
    """
    Utility to inspect what is connected to a component.
    The context sets the boundary where something is considered connected.
    For example: `--inspect rp2040_micro --context rp2040_micro_ki`
    """
    raise errors.UserNotImplementedError("Inspect is not yet implemented.")

install

This CLI command provides the ato install command to: - install dependencies - download JLCPCB footprints

yaml module-attribute

yaml = YAML()

log module-attribute

log = getLogger(__name__)

install

install(
    to_install=None,
    jlcpcb=False,
    link=False,
    upgrade=False,
    path=None,
)

Install atopile packages or components from jlcpcb.com/parts

Source code in src/atopile/cli/install.py
def install(
    to_install: Annotated[str | None, typer.Argument()] = None,
    jlcpcb: Annotated[
        bool, typer.Option("--jlcpcb", "-j", help="JLCPCB component ID")
    ] = False,
    link: Annotated[
        bool,
        typer.Option("--link", "-l", help="Keep this dependency linked to the source"),
    ] = False,
    upgrade: Annotated[
        bool, typer.Option("--upgrade", "-u", help="Upgrade dependencies")
    ] = False,
    path: Annotated[Path | None, typer.Argument()] = None,
):
    """
    Install atopile packages or components from jlcpcb.com/parts
    """
    do_install(to_install, jlcpcb, link, upgrade, path)

do_install

do_install(to_install, jlcpcb, link, upgrade, path)

Actually do the installation of the dependencies. This is split in two so that it can be called from install and create

Source code in src/atopile/cli/install.py
def do_install(
    to_install: str | None, jlcpcb: bool, link: bool, upgrade: bool, path: Path | None
):
    """
    Actually do the installation of the dependencies.
    This is split in two so that it can be called from `install` and `create`
    """

    current_path = Path.cwd()
    config = atopile.config.get_project_config_from_path(Path(path or current_path))
    ctx = atopile.config.ProjectContext.from_config(config)
    top_level_path = config.location

    log.info(f"Installing {to_install + ' ' if to_install else ''}in {top_level_path}")

    if jlcpcb:
        if to_install is None:
            raise errors.UserBadParameterError("No component ID specified")
        # eg. "ato install --jlcpcb=C123"
        install_jlcpcb(to_install, top_level_path)
    elif to_install:
        # eg. "ato install some-atopile-module"
        install_single_dependency(to_install, link, upgrade, config, ctx)
    else:
        # eg. "ato install"
        install_project_dependencies(config, ctx, upgrade)

    log.info("[green]Done![/] :call_me_hand:", extra={"markup": True})

get_package_repo_from_registry

get_package_repo_from_registry(module_name)

Get the git repo for a package from the ato registry.

Source code in src/atopile/cli/install.py
def get_package_repo_from_registry(module_name: str) -> str:
    """
    Get the git repo for a package from the ato registry.
    """
    try:
        response = requests.post(
            "https://get-package-atsuhzfd5a-uc.a.run.app",
            json={"name": module_name},
            timeout=10,
        )
    except requests.exceptions.ReadTimeout as ex:
        raise errors.UserInfraError(
            f"Request to registry timed out for package '{module_name}'"
        ) from ex

    if response.status_code == 500:
        raise errors.UserException(
            f"Could not find package '{module_name}' in registry."
        )
    response.raise_for_status()
    return_data = response.json()
    try:
        return_url = return_data["data"]["repo_url"]
    except KeyError as ex:
        raise errors.UserException(
            f"No repo_url found for package '{module_name}'"
        ) from ex
    return return_url

install_single_dependency

install_single_dependency(
    to_install, link, upgrade, config, ctx
)
Source code in src/atopile/cli/install.py
def install_single_dependency(
    to_install: str,
    link: bool,
    upgrade: bool,
    config: atopile.config.ProjectConfig,
    ctx: atopile.config.ProjectContext,
):
    dependency = atopile.config.Dependency.from_str(to_install)
    name = _name_and_clone_url_helper(dependency.name)[0]
    if link:
        dependency.link_broken = False
        abs_path = ctx.module_path / name
        dependency.path = abs_path.relative_to(ctx.project_path)
    else:
        abs_path = ctx.src_path / name
        dependency.path = abs_path.relative_to(ctx.project_path)
        dependency.link_broken = True

    try:
        installed_version = install_dependency(dependency, upgrade, abs_path)
    except GitCommandError as ex:
        if "already exists and is not an empty directory" in ex.stderr:
            # FIXME: shouldn't `--upgrade` do this already?
            raise errors.UserException(
                f"Directory {abs_path} already exists and is not empty. "
                "Please move or remove it before installing this new content."
            ) from ex
        raise
    # If the link's broken, remove the .git directory so git treats it as copy-pasted code # noqa: E501  # pre-existing
    if dependency.link_broken:
        try:
            robustly_rm_dir(abs_path / ".git")
        except (PermissionError, OSError, FileNotFoundError) as ex:
            errors.UserException(f"Failed to remove .git directory: {repr(ex)}").log(
                log, logging.WARNING
            )

    if dependency.version_spec is None and installed_version:
        # If the user didn't specify a version, we'll
        # use the one we just installed as a basis
        dependency.version_spec = f"@{installed_version}"

    names = {dep.name: i for i, dep in enumerate(config.dependencies)}
    if dependency.name in names:
        config.dependencies[names[dependency.name]] = dependency
    else:
        config.dependencies.append(dependency)
    config.save_changes()

install_project_dependencies

install_project_dependencies(config, ctx, upgrade)
Source code in src/atopile/cli/install.py
def install_project_dependencies(
    config: atopile.config.ProjectConfig,
    ctx: atopile.config.ProjectContext,
    upgrade: bool,
):
    for _ctx, dependency in faebryk.libs.exceptions.iter_through_errors(
        config.dependencies
    ):
        with _ctx():
            if not dependency.link_broken:
                # FIXME: these dependency objects are a little too entangled
                name = _name_and_clone_url_helper(dependency.name)[0]
                abs_path = ctx.module_path / name
                dependency.path = abs_path.relative_to(ctx.project_path)

                try:
                    install_dependency(dependency, upgrade, abs_path)
                except GitCommandError as ex:
                    if "already exists and is not an empty directory" in ex.stderr:
                        # FIXME: shouldn't `--upgrade` do this already?
                        raise errors.UserException(
                            f"Directory {abs_path} already exists and is not empty. "
                            "Please move or remove it before installing this new content."  # noqa: E501  # pre-existing
                        ) from ex
                    raise

install_dependency

install_dependency(dependency, upgrade, abs_path)

Install a dependency of the name "module_name"

Source code in src/atopile/cli/install.py
def install_dependency(
    dependency: atopile.config.Dependency, upgrade: bool, abs_path: Path
) -> Optional[str]:
    """
    Install a dependency of the name "module_name"
    """
    # Ensure the modules path exists
    abs_path.parent.mkdir(parents=True, exist_ok=True)

    # Figure out what we're trying to install here
    module_spec = dependency.version_spec or "*"
    module_name, clone_url = _name_and_clone_url_helper(dependency.name)

    try:
        # This will raise an exception if the directory does not exist
        repo = Repo(abs_path)
    except (InvalidGitRepositoryError, NoSuchPathError):
        # Directory does not contain a valid repo, clone into it
        log.info(f"Installing dependency {module_name}")
        repo = Repo.clone_from(clone_url, abs_path)
        repo.active_branch.tracking_branch().checkout()
    else:
        # In this case the directory exists and contains a valid repo
        if upgrade:
            log.info(f"Fetching latest changes for {module_name}")
            repo.remotes.origin.fetch()
        else:
            log.info(
                f"{module_name} already exists. If you wish to upgrade, use --upgrade"
            )
            # here we're done because we don't want to play with peoples' deps under them # noqa: E501  # pre-existing
            return

    # Figure out what version of this thing we need
    semver_to_tag = {}
    installed_semver = None
    for tag in repo.tags:
        try:
            semver_to_tag[version.parse(tag.name)] = tag
        except errors.UserException:
            log.debug(f"Tag {tag.name} is not a valid semver tag. Skipping.")

    if "@" in module_spec:
        # If there's an @ in the version, we're gonna check that thing out
        best_checkout = module_spec.strip(" @")
    elif semver_to_tag:
        # Otherwise we're gonna find the best tag meeting the semver spec
        valid_versions = [v for v in semver_to_tag if version.match(module_spec, v)]
        if not valid_versions:
            raise errors.UserException(
                f"No versions of {module_name} match spec {module_spec}.\n"
                f"Available versions: {', '.join(map(str, semver_to_tag))}"
            )
        installed_semver = max(valid_versions)
        best_checkout = semver_to_tag[installed_semver]
    else:
        log.warning(
            "No semver tags found for this module. Using latest default branch :hot_pepper:.",  # noqa: E501  # pre-existing
            extra={"markup": True},
        )
        return None

    # If the repo is dirty, throw an error
    if repo.is_dirty():
        raise errors.UserException(
            f"Module {module_name} has uncommitted changes. Aborting."
        )

    # Checkout the best thing we've found
    ref_before_checkout = repo.head.commit

    # If the repo best_checkout is a branch, we need to checkout the origin/branch
    if best_checkout in repo.heads:
        best_checkout = f"origin/{best_checkout}"

    repo.git.checkout(best_checkout)

    if repo.head.commit == ref_before_checkout:
        log.info(
            f"Already on the best option ([cyan bold]{best_checkout}[/]) for {module_name}",  # noqa: E501  # pre-existing
            extra={"markup": True},
        )
    else:
        log.info(
            f"Using :sparkles: [cyan bold]{best_checkout}[/] :sparkles: of {module_name}",  # noqa: E501  # pre-existing
            extra={"markup": True},
        )

    return repo.head.commit.hexsha

install_jlcpcb

install_jlcpcb(component_id, top_level_path)

Install a component from JLCPCB

Source code in src/atopile/cli/install.py
def install_jlcpcb(component_id: str, top_level_path: Path):
    """Install a component from JLCPCB"""
    component_id = component_id.upper()
    if not component_id.startswith("C") or not component_id[1:].isdigit():
        raise errors.UserException(f"Component id {component_id} is invalid. Aborting.")

    footprints_dir = (
        top_level_path
        / atopile.config.get_project_config_from_path(top_level_path).paths.footprints
    )
    footprints_dir.mkdir(parents=True, exist_ok=True)

    ato_src_dir = (
        top_level_path
        / atopile.config.get_project_config_from_path(top_level_path).paths.src
    )
    ato_src_dir.mkdir(parents=True, exist_ok=True)

    log.info(f"Footprints directory: {footprints_dir}")

    command = [
        sys.executable,
        "-m",
        "easyeda2kicad",
        "--full",
        f"--lcsc_id={component_id}",
        f"--output={footprints_dir}",
        "--overwrite",
        "--ato",
        f"--ato_file_path={ato_src_dir}",
    ]
    result = subprocess.run(command, capture_output=True, text=True, check=False)

    # The stdout and stderr are captured due to 'capture_output=True'
    print("STDOUT:", result.stdout)
    print("STDERR:", result.stderr)

    # Check the return code to see if the command was successful
    if result.returncode == 0:
        print("Command executed successfully")
    else:
        component_link = f"https://jlcpcb.com/partdetail/{component_id}"
        raise errors.UserException(
            "Oh no! Looks like this component doesnt have a model available. "
            f"More information about the component can be found here: {component_link}"
        )

logging

logger module-attribute

logger = getLogger(__name__)

handler module-attribute

handler = LogHandler(
    console=console,
    rich_tracebacks=True,
    show_path=False,
    tracebacks_suppress=["typer"],
    tracebacks_suppress_map={
        UserPythonModuleError: [atopile, faebryk]
    },
    tracebacks_unwrap=[UserPythonModuleError],
    hide_traceback_types=(_BaseBaseUserException),
    always_show_traceback_types=(UserPythonModuleError),
    traceback_level=ERROR,
)

LogHandler

LogHandler(
    *args,
    tracebacks_suppress_map=None,
    tracebacks_unwrap=None,
    hide_traceback_types=(),
    always_show_traceback_types=(),
    traceback_level=ERROR,
    **kwargs
)

Bases: RichHandler

A logging handler that renders output with Rich.

Suppresses frames from tracebacks conditionally depending on the exception type.

Source code in src/atopile/cli/logging.py
def __init__(
    self,
    *args,
    tracebacks_suppress_map: dict[type[BaseException], list[ModuleType]]
    | None = None,
    tracebacks_unwrap: list[type[BaseException]] | None = None,
    hide_traceback_types: tuple[type[BaseException], ...] = (),
    always_show_traceback_types: tuple[type[BaseException], ...] = (),
    traceback_level: int = logging.ERROR,
    **kwargs,
):
    super().__init__(*args, **kwargs)
    self.tracebacks_suppress_map = tracebacks_suppress_map or {}
    self.tracebacks_unwrap = tracebacks_unwrap or []
    self.hide_traceback_types = hide_traceback_types
    self.always_show_traceback_types = always_show_traceback_types
    self.traceback_level = traceback_level
tracebacks_suppress_map instance-attribute
tracebacks_suppress_map = tracebacks_suppress_map or {}
tracebacks_unwrap instance-attribute
tracebacks_unwrap = tracebacks_unwrap or []
hide_traceback_types instance-attribute
hide_traceback_types = hide_traceback_types
always_show_traceback_types instance-attribute
always_show_traceback_types = always_show_traceback_types
traceback_level instance-attribute
traceback_level = traceback_level
render_message
render_message(record, message)
Source code in src/atopile/cli/logging.py
def render_message(
    self, record: logging.LogRecord, message: str
) -> ConsoleRenderable:
    # special handling for exceptions only
    if record.exc_info is None:
        return self._render_message(record, message)

    _, exc, _ = record.exc_info

    if not isinstance(exc, ConsoleRenderable):
        return self._render_message(record, message)

    return exc
emit
emit(record)

Invoked by logging.

Source code in src/atopile/cli/logging.py
def emit(self, record: logging.LogRecord) -> None:
    """Invoked by logging."""
    hashable = self._get_hashable(record)

    if hashable and hashable in _logged_exceptions:
        # we've already logged this
        return

    traceback = self._get_traceback(record)

    if self.formatter:
        record.message = record.getMessage()
        formatter = self.formatter
        if hasattr(formatter, "usesTime") and formatter.usesTime():
            record.asctime = formatter.formatTime(record, formatter.datefmt)
        message = formatter.formatMessage(record)
    else:
        message = record.getMessage()

    message_renderable = self.render_message(record, message)

    log_renderable = self.render(
        record=record, traceback=traceback, message_renderable=message_renderable
    )
    if isinstance(self.console.file, NullFile):
        # Handles pythonw, where stdout/stderr are null, and we return NullFile
        # instance from Console.file. In this case, we still want to make a log record # noqa: E501  # pre-existing
        # even though we won't be writing anything to a file.
        self.handleError(record)
    else:
        try:
            self.console.print(log_renderable, highlight=True)
        except Exception:
            self.handleError(record)

    if hashable:
        _logged_exceptions.add(hashable)

view

ato view

log module-attribute

log = getLogger(__name__)

view

view(entry=None, build=[], target=[], option=[])

View a block diagram or schematic of your project.

Source code in src/atopile/cli/view.py
def view(
    entry: Annotated[str | None, typer.Argument()] = None,
    build: Annotated[list[str], typer.Option("--build", "-b", envvar="ATO_BUILD")] = [],
    target: Annotated[
        list[str], typer.Option("--target", "-t", envvar="ATO_TARGET")
    ] = [],
    option: Annotated[
        list[str], typer.Option("--option", "-o", envvar="ATO_OPTION")
    ] = [],
):
    """
    View a block diagram or schematic of your project.
    """
    raise errors.UserNotImplementedError("View is not yet implemented.")