fixed subscription table

This commit is contained in:
2025-02-02 00:02:31 -05:00
parent a1ab31acfe
commit ef5f57e678
5389 changed files with 686710 additions and 28 deletions

View File

@@ -0,0 +1,35 @@
"""Traitlets Python configuration system"""
from __future__ import annotations
import typing as _t
from . import traitlets
from ._version import __version__, version_info
from .traitlets import *
from .utils.bunch import Bunch
from .utils.decorators import signature_has_traits
from .utils.importstring import import_item
from .utils.warnings import warn
__all__ = [
"traitlets",
"__version__",
"version_info",
"Bunch",
"signature_has_traits",
"import_item",
"Sentinel",
]
class Sentinel(traitlets.Sentinel): # type:ignore[name-defined, misc]
def __init__(self, *args: _t.Any, **kwargs: _t.Any) -> None:
super().__init__(*args, **kwargs)
warn(
"""
Sentinel is not a public part of the traitlets API.
It was published by mistake, and may be removed in the future.
""",
DeprecationWarning,
stacklevel=2,
)

View File

@@ -0,0 +1,19 @@
"""
handle the current version info of traitlets.
"""
from __future__ import annotations
import re
from typing import List
# Version string must appear intact for hatch versioning
__version__ = "5.14.3"
# Build up version_info tuple for backwards compatibility
pattern = r"(?P<major>\d+).(?P<minor>\d+).(?P<patch>\d+)(?P<rest>.*)"
match = re.match(pattern, __version__)
assert match is not None
parts: List[object] = [int(match[part]) for part in ["major", "minor", "patch"]]
if match["rest"]:
parts.append(match["rest"])
version_info = tuple(parts)

View File

@@ -0,0 +1,20 @@
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import annotations
from .application import *
from .configurable import *
from .loader import Config
__all__ = [ # noqa: F405
"Config",
"Application",
"ApplicationError",
"LevelFormatter",
"configurable",
"Configurable",
"ConfigurableError",
"MultipleInstanceError",
"LoggingConfigurable",
"SingletonConfigurable",
]

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,220 @@
"""Helper utilities for integrating argcomplete with traitlets"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import annotations
import argparse
import os
import typing as t
try:
import argcomplete
from argcomplete import CompletionFinder # type:ignore[attr-defined]
except ImportError:
# This module and its utility methods are written to not crash even
# if argcomplete is not installed.
class StubModule:
def __getattr__(self, attr: str) -> t.Any:
if not attr.startswith("__"):
raise ModuleNotFoundError("No module named 'argcomplete'")
raise AttributeError(f"argcomplete stub module has no attribute '{attr}'")
argcomplete = StubModule() # type:ignore[assignment]
CompletionFinder = object # type:ignore[assignment, misc]
def get_argcomplete_cwords() -> t.Optional[t.List[str]]:
"""Get current words prior to completion point
This is normally done in the `argcomplete.CompletionFinder` constructor,
but is exposed here to allow `traitlets` to follow dynamic code-paths such
as determining whether to evaluate a subcommand.
"""
if "_ARGCOMPLETE" not in os.environ:
return None
comp_line = os.environ["COMP_LINE"]
comp_point = int(os.environ["COMP_POINT"])
# argcomplete.debug("splitting COMP_LINE for:", comp_line, comp_point)
comp_words: t.List[str]
try:
(
cword_prequote,
cword_prefix,
cword_suffix,
comp_words,
last_wordbreak_pos,
) = argcomplete.split_line(comp_line, comp_point) # type:ignore[attr-defined,no-untyped-call]
except ModuleNotFoundError:
return None
# _ARGCOMPLETE is set by the shell script to tell us where comp_words
# should start, based on what we're completing.
# 1: <script> [args]
# 2: python <script> [args]
# 3: python -m <module> [args]
start = int(os.environ["_ARGCOMPLETE"]) - 1
comp_words = comp_words[start:]
# argcomplete.debug("prequote=", cword_prequote, "prefix=", cword_prefix, "suffix=", cword_suffix, "words=", comp_words, "last=", last_wordbreak_pos)
return comp_words # noqa: RET504
def increment_argcomplete_index() -> None:
"""Assumes ``$_ARGCOMPLETE`` is set and `argcomplete` is importable
Increment the index pointed to by ``$_ARGCOMPLETE``, which is used to
determine which word `argcomplete` should start evaluating the command-line.
This may be useful to "inform" `argcomplete` that we have already evaluated
the first word as a subcommand.
"""
try:
os.environ["_ARGCOMPLETE"] = str(int(os.environ["_ARGCOMPLETE"]) + 1)
except Exception:
try:
argcomplete.debug("Unable to increment $_ARGCOMPLETE", os.environ["_ARGCOMPLETE"]) # type:ignore[attr-defined,no-untyped-call]
except (KeyError, ModuleNotFoundError):
pass
class ExtendedCompletionFinder(CompletionFinder):
"""An extension of CompletionFinder which dynamically completes class-trait based options
This finder adds a few functionalities:
1. When completing options, it will add ``--Class.`` to the list of completions, for each
class in `Application.classes` that could complete the current option.
2. If it detects that we are currently trying to complete an option related to ``--Class.``,
it will add the corresponding config traits of Class to the `ArgumentParser` instance,
so that the traits' completers can be used.
3. If there are any subcommands, they are added as completions for the first word
Note that we are avoiding adding all config traits of all classes to the `ArgumentParser`,
which would be easier but would add more runtime overhead and would also make completions
appear more spammy.
These changes do require using the internals of `argcomplete.CompletionFinder`.
"""
_parser: argparse.ArgumentParser
config_classes: t.List[t.Any] = [] # Configurables
subcommands: t.List[str] = []
def match_class_completions(self, cword_prefix: str) -> t.List[t.Tuple[t.Any, str]]:
"""Match the word to be completed against our Configurable classes
Check if cword_prefix could potentially match against --{class}. for any class
in Application.classes.
"""
class_completions = [(cls, f"--{cls.__name__}.") for cls in self.config_classes]
matched_completions = class_completions
if "." in cword_prefix:
cword_prefix = cword_prefix[: cword_prefix.index(".") + 1]
matched_completions = [(cls, c) for (cls, c) in class_completions if c == cword_prefix]
elif len(cword_prefix) > 0:
matched_completions = [
(cls, c) for (cls, c) in class_completions if c.startswith(cword_prefix)
]
return matched_completions
def inject_class_to_parser(self, cls: t.Any) -> None:
"""Add dummy arguments to our ArgumentParser for the traits of this class
The argparse-based loader currently does not actually add any class traits to
the constructed ArgumentParser, only the flags & aliaes. In order to work nicely
with argcomplete's completers functionality, this method adds dummy arguments
of the form --Class.trait to the ArgumentParser instance.
This method should be called selectively to reduce runtime overhead and to avoid
spamming options across all of Application.classes.
"""
try:
for traitname, trait in cls.class_traits(config=True).items():
completer = trait.metadata.get("argcompleter") or getattr(
trait, "argcompleter", None
)
multiplicity = trait.metadata.get("multiplicity")
self._parser.add_argument( # type: ignore[attr-defined]
f"--{cls.__name__}.{traitname}",
type=str,
help=trait.help,
nargs=multiplicity,
# metavar=traitname,
).completer = completer
# argcomplete.debug(f"added --{cls.__name__}.{traitname}")
except AttributeError:
pass
def _get_completions(
self, comp_words: t.List[str], cword_prefix: str, *args: t.Any
) -> t.List[str]:
"""Overridden to dynamically append --Class.trait arguments if appropriate
Warning:
This does not (currently) support completions of the form
--Class1.Class2.<...>.trait, although this is valid for traitlets.
Part of the reason is that we don't currently have a way to identify
which classes may be used with Class1 as a parent.
Warning:
This is an internal method in CompletionFinder and so the API might
be subject to drift.
"""
# Try to identify if we are completing something related to --Class. for
# a known Class, if we are then add the Class config traits to our ArgumentParser.
prefix_chars = self._parser.prefix_chars
is_option = len(cword_prefix) > 0 and cword_prefix[0] in prefix_chars
if is_option:
# If we are currently completing an option, check if it could
# match with any of the --Class. completions. If there's exactly
# one matched class, then expand out the --Class.trait options.
matched_completions = self.match_class_completions(cword_prefix)
if len(matched_completions) == 1:
matched_cls = matched_completions[0][0]
self.inject_class_to_parser(matched_cls)
elif len(comp_words) > 0 and "." in comp_words[-1] and not is_option:
# If not an option, perform a hacky check to see if we are completing
# an argument for an already present --Class.trait option. Search backwards
# for last option (based on last word starting with prefix_chars), and see
# if it is of the form --Class.trait. Note that if multiplicity="+", these
# arguments might conflict with positional arguments.
for prev_word in comp_words[::-1]:
if len(prev_word) > 0 and prev_word[0] in prefix_chars:
matched_completions = self.match_class_completions(prev_word)
if matched_completions:
matched_cls = matched_completions[0][0]
self.inject_class_to_parser(matched_cls)
break
completions: t.List[str]
completions = super()._get_completions(comp_words, cword_prefix, *args) # type:ignore[no-untyped-call]
# For subcommand-handling: it is difficult to get this to work
# using argparse subparsers, because the ArgumentParser accepts
# arbitrary extra_args, which ends up masking subparsers.
# Instead, check if comp_words only consists of the script,
# if so check if any subcommands start with cword_prefix.
if self.subcommands and len(comp_words) == 1:
argcomplete.debug("Adding subcommands for", cword_prefix) # type:ignore[attr-defined,no-untyped-call]
completions.extend(subc for subc in self.subcommands if subc.startswith(cword_prefix))
return completions
def _get_option_completions(
self, parser: argparse.ArgumentParser, cword_prefix: str
) -> t.List[str]:
"""Overridden to add --Class. completions when appropriate"""
completions: t.List[str]
completions = super()._get_option_completions(parser, cword_prefix) # type:ignore[no-untyped-call]
if cword_prefix.endswith("."):
return completions
matched_completions = self.match_class_completions(cword_prefix)
if len(matched_completions) > 1:
completions.extend(opt for cls, opt in matched_completions)
# If there is exactly one match, we would expect it to have already
# been handled by the options dynamically added in _get_completions().
# However, maybe there's an edge cases missed here, for example if the
# matched class has no configurable traits.
return completions

View File

@@ -0,0 +1,600 @@
"""A base class for objects that are configurable."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import annotations
import logging
import typing as t
from copy import deepcopy
from textwrap import dedent
from traitlets.traitlets import (
Any,
Container,
Dict,
HasTraits,
Instance,
TraitType,
default,
observe,
observe_compat,
validate,
)
from traitlets.utils import warnings
from traitlets.utils.bunch import Bunch
from traitlets.utils.text import indent, wrap_paragraphs
from .loader import Config, DeferredConfig, LazyConfigValue, _is_section_key
# -----------------------------------------------------------------------------
# Helper classes for Configurables
# -----------------------------------------------------------------------------
if t.TYPE_CHECKING:
LoggerType = t.Union[logging.Logger, logging.LoggerAdapter[t.Any]]
else:
LoggerType = t.Any
class ConfigurableError(Exception):
pass
class MultipleInstanceError(ConfigurableError):
pass
# -----------------------------------------------------------------------------
# Configurable implementation
# -----------------------------------------------------------------------------
class Configurable(HasTraits):
config = Instance(Config, (), {})
parent = Instance("traitlets.config.configurable.Configurable", allow_none=True)
def __init__(self, **kwargs: t.Any) -> None:
"""Create a configurable given a config config.
Parameters
----------
config : Config
If this is empty, default values are used. If config is a
:class:`Config` instance, it will be used to configure the
instance.
parent : Configurable instance, optional
The parent Configurable instance of this object.
Notes
-----
Subclasses of Configurable must call the :meth:`__init__` method of
:class:`Configurable` *before* doing anything else and using
:func:`super`::
class MyConfigurable(Configurable):
def __init__(self, config=None):
super(MyConfigurable, self).__init__(config=config)
# Then any other code you need to finish initialization.
This ensures that instances will be configured properly.
"""
parent = kwargs.pop("parent", None)
if parent is not None:
# config is implied from parent
if kwargs.get("config", None) is None:
kwargs["config"] = parent.config
self.parent = parent
config = kwargs.pop("config", None)
# load kwarg traits, other than config
super().__init__(**kwargs)
# record traits set by config
config_override_names = set()
def notice_config_override(change: Bunch) -> None:
"""Record traits set by both config and kwargs.
They will need to be overridden again after loading config.
"""
if change.name in kwargs:
config_override_names.add(change.name)
self.observe(notice_config_override)
# load config
if config is not None:
# We used to deepcopy, but for now we are trying to just save
# by reference. This *could* have side effects as all components
# will share config. In fact, I did find such a side effect in
# _config_changed below. If a config attribute value was a mutable type
# all instances of a component were getting the same copy, effectively
# making that a class attribute.
# self.config = deepcopy(config)
self.config = config
else:
# allow _config_default to return something
self._load_config(self.config)
self.unobserve(notice_config_override)
for name in config_override_names:
setattr(self, name, kwargs[name])
# -------------------------------------------------------------------------
# Static trait notifications
# -------------------------------------------------------------------------
@classmethod
def section_names(cls) -> list[str]:
"""return section names as a list"""
return [
c.__name__
for c in reversed(cls.__mro__)
if issubclass(c, Configurable) and issubclass(cls, c)
]
def _find_my_config(self, cfg: Config) -> t.Any:
"""extract my config from a global Config object
will construct a Config object of only the config values that apply to me
based on my mro(), as well as those of my parent(s) if they exist.
If I am Bar and my parent is Foo, and their parent is Tim,
this will return merge following config sections, in this order::
[Bar, Foo.Bar, Tim.Foo.Bar]
With the last item being the highest priority.
"""
cfgs = [cfg]
if self.parent:
cfgs.append(self.parent._find_my_config(cfg))
my_config = Config()
for c in cfgs:
for sname in self.section_names():
# Don't do a blind getattr as that would cause the config to
# dynamically create the section with name Class.__name__.
if c._has_section(sname):
my_config.merge(c[sname])
return my_config
def _load_config(
self,
cfg: Config,
section_names: list[str] | None = None,
traits: dict[str, TraitType[t.Any, t.Any]] | None = None,
) -> None:
"""load traits from a Config object"""
if traits is None:
traits = self.traits(config=True)
if section_names is None:
section_names = self.section_names()
my_config = self._find_my_config(cfg)
# hold trait notifications until after all config has been loaded
with self.hold_trait_notifications():
for name, config_value in my_config.items():
if name in traits:
if isinstance(config_value, LazyConfigValue):
# ConfigValue is a wrapper for using append / update on containers
# without having to copy the initial value
initial = getattr(self, name)
config_value = config_value.get_value(initial)
elif isinstance(config_value, DeferredConfig):
# DeferredConfig tends to come from CLI/environment variables
config_value = config_value.get_value(traits[name])
# We have to do a deepcopy here if we don't deepcopy the entire
# config object. If we don't, a mutable config_value will be
# shared by all instances, effectively making it a class attribute.
setattr(self, name, deepcopy(config_value))
elif not _is_section_key(name) and not isinstance(config_value, Config):
from difflib import get_close_matches
if isinstance(self, LoggingConfigurable):
assert self.log is not None
warn = self.log.warning
else:
def warn(msg: t.Any) -> None:
return warnings.warn(msg, UserWarning, stacklevel=9)
matches = get_close_matches(name, traits)
msg = f"Config option `{name}` not recognized by `{self.__class__.__name__}`."
if len(matches) == 1:
msg += f" Did you mean `{matches[0]}`?"
elif len(matches) >= 1:
msg += " Did you mean one of: `{matches}`?".format(
matches=", ".join(sorted(matches))
)
warn(msg)
@observe("config")
@observe_compat
def _config_changed(self, change: Bunch) -> None:
"""Update all the class traits having ``config=True`` in metadata.
For any class trait with a ``config`` metadata attribute that is
``True``, we update the trait with the value of the corresponding
config entry.
"""
# Get all traits with a config metadata entry that is True
traits = self.traits(config=True)
# We auto-load config section for this class as well as any parent
# classes that are Configurable subclasses. This starts with Configurable
# and works down the mro loading the config for each section.
section_names = self.section_names()
self._load_config(change.new, traits=traits, section_names=section_names)
def update_config(self, config: Config) -> None:
"""Update config and load the new values"""
# traitlets prior to 4.2 created a copy of self.config in order to trigger change events.
# Some projects (IPython < 5) relied upon one side effect of this,
# that self.config prior to update_config was not modified in-place.
# For backward-compatibility, we must ensure that self.config
# is a new object and not modified in-place,
# but config consumers should not rely on this behavior.
self.config = deepcopy(self.config)
# load config
self._load_config(config)
# merge it into self.config
self.config.merge(config)
# TODO: trigger change event if/when dict-update change events take place
# DO NOT trigger full trait-change
@classmethod
def class_get_help(cls, inst: HasTraits | None = None) -> str:
"""Get the help string for this class in ReST format.
If `inst` is given, its current trait values will be used in place of
class defaults.
"""
assert inst is None or isinstance(inst, cls)
final_help = []
base_classes = ", ".join(p.__name__ for p in cls.__bases__)
final_help.append(f"{cls.__name__}({base_classes}) options")
final_help.append(len(final_help[0]) * "-")
for _, v in sorted(cls.class_traits(config=True).items()):
help = cls.class_get_trait_help(v, inst)
final_help.append(help)
return "\n".join(final_help)
@classmethod
def class_get_trait_help(
cls,
trait: TraitType[t.Any, t.Any],
inst: HasTraits | None = None,
helptext: str | None = None,
) -> str:
"""Get the helptext string for a single trait.
:param inst:
If given, its current trait values will be used in place of
the class default.
:param helptext:
If not given, uses the `help` attribute of the current trait.
"""
assert inst is None or isinstance(inst, cls)
lines = []
header = f"--{cls.__name__}.{trait.name}"
if isinstance(trait, (Container, Dict)):
multiplicity = trait.metadata.get("multiplicity", "append")
if isinstance(trait, Dict):
sample_value = "<key-1>=<value-1>"
else:
sample_value = "<%s-item-1>" % trait.__class__.__name__.lower()
if multiplicity == "append":
header = f"{header}={sample_value}..."
else:
header = f"{header} {sample_value}..."
else:
header = f"{header}=<{trait.__class__.__name__}>"
# header = "--%s.%s=<%s>" % (cls.__name__, trait.name, trait.__class__.__name__)
lines.append(header)
if helptext is None:
helptext = trait.help
if helptext != "":
helptext = "\n".join(wrap_paragraphs(helptext, 76))
lines.append(indent(helptext))
if "Enum" in trait.__class__.__name__:
# include Enum choices
lines.append(indent("Choices: %s" % trait.info()))
if inst is not None:
lines.append(indent(f"Current: {getattr(inst, trait.name or '')!r}"))
else:
try:
dvr = trait.default_value_repr()
except Exception:
dvr = None # ignore defaults we can't construct
if dvr is not None:
if len(dvr) > 64:
dvr = dvr[:61] + "..."
lines.append(indent("Default: %s" % dvr))
return "\n".join(lines)
@classmethod
def class_print_help(cls, inst: HasTraits | None = None) -> None:
"""Get the help string for a single trait and print it."""
print(cls.class_get_help(inst)) # noqa: T201
@classmethod
def _defining_class(
cls, trait: TraitType[t.Any, t.Any], classes: t.Sequence[type[HasTraits]]
) -> type[Configurable]:
"""Get the class that defines a trait
For reducing redundant help output in config files.
Returns the current class if:
- the trait is defined on this class, or
- the class where it is defined would not be in the config file
Parameters
----------
trait : Trait
The trait to look for
classes : list
The list of other classes to consider for redundancy.
Will return `cls` even if it is not defined on `cls`
if the defining class is not in `classes`.
"""
defining_cls = cls
assert trait.name is not None
for parent in cls.mro():
if (
issubclass(parent, Configurable)
and parent in classes
and parent.class_own_traits(config=True).get(trait.name, None) is trait
):
defining_cls = parent
return defining_cls
@classmethod
def class_config_section(cls, classes: t.Sequence[type[HasTraits]] | None = None) -> str:
"""Get the config section for this class.
Parameters
----------
classes : list, optional
The list of other classes in the config file.
Used to reduce redundant information.
"""
def c(s: str) -> str:
"""return a commented, wrapped block."""
s = "\n\n".join(wrap_paragraphs(s, 78))
return "## " + s.replace("\n", "\n# ")
# section header
breaker = "#" + "-" * 78
parent_classes = ", ".join(p.__name__ for p in cls.__bases__ if issubclass(p, Configurable))
s = f"# {cls.__name__}({parent_classes}) configuration"
lines = [breaker, s, breaker]
# get the description trait
desc = cls.class_traits().get("description")
if desc:
desc = desc.default_value
if not desc:
# no description from trait, use __doc__
desc = getattr(cls, "__doc__", "") # type:ignore[arg-type]
if desc:
lines.append(c(desc)) # type:ignore[arg-type]
lines.append("")
for name, trait in sorted(cls.class_traits(config=True).items()):
default_repr = trait.default_value_repr()
if classes:
defining_class = cls._defining_class(trait, classes)
else:
defining_class = cls
if defining_class is cls:
# cls owns the trait, show full help
if trait.help:
lines.append(c(trait.help))
if "Enum" in type(trait).__name__:
# include Enum choices
lines.append("# Choices: %s" % trait.info())
lines.append("# Default: %s" % default_repr)
else:
# Trait appears multiple times and isn't defined here.
# Truncate help to first line + "See also Original.trait"
if trait.help:
lines.append(c(trait.help.split("\n", 1)[0]))
lines.append(f"# See also: {defining_class.__name__}.{name}")
lines.append(f"# c.{cls.__name__}.{name} = {default_repr}")
lines.append("")
return "\n".join(lines)
@classmethod
def class_config_rst_doc(cls) -> str:
"""Generate rST documentation for this class' config options.
Excludes traits defined on parent classes.
"""
lines = []
classname = cls.__name__
for _, trait in sorted(cls.class_traits(config=True).items()):
ttype = trait.__class__.__name__
if not trait.name:
continue
termline = classname + "." + trait.name
# Choices or type
if "Enum" in ttype:
# include Enum choices
termline += " : " + trait.info_rst() # type:ignore[attr-defined]
else:
termline += " : " + ttype
lines.append(termline)
# Default value
try:
dvr = trait.default_value_repr()
except Exception:
dvr = None # ignore defaults we can't construct
if dvr is not None:
if len(dvr) > 64:
dvr = dvr[:61] + "..."
# Double up backslashes, so they get to the rendered docs
dvr = dvr.replace("\\n", "\\\\n")
lines.append(indent("Default: ``%s``" % dvr))
lines.append("")
help = trait.help or "No description"
lines.append(indent(dedent(help)))
# Blank line
lines.append("")
return "\n".join(lines)
class LoggingConfigurable(Configurable):
"""A parent class for Configurables that log.
Subclasses have a log trait, and the default behavior
is to get the logger from the currently running Application.
"""
log = Any(help="Logger or LoggerAdapter instance", allow_none=False)
@validate("log")
def _validate_log(self, proposal: Bunch) -> LoggerType:
if not isinstance(proposal.value, (logging.Logger, logging.LoggerAdapter)):
# warn about unsupported type, but be lenient to allow for duck typing
warnings.warn(
f"{self.__class__.__name__}.log should be a Logger or LoggerAdapter,"
f" got {proposal.value}.",
UserWarning,
stacklevel=2,
)
return t.cast(LoggerType, proposal.value)
@default("log")
def _log_default(self) -> LoggerType:
if isinstance(self.parent, LoggingConfigurable):
assert self.parent is not None
return t.cast(logging.Logger, self.parent.log)
from traitlets import log
return log.get_logger()
def _get_log_handler(self) -> logging.Handler | None:
"""Return the default Handler
Returns None if none can be found
Deprecated, this now returns the first log handler which may or may
not be the default one.
"""
if not self.log:
return None
logger: logging.Logger = (
self.log if isinstance(self.log, logging.Logger) else self.log.logger
)
if not getattr(logger, "handlers", None):
# no handlers attribute or empty handlers list
return None
return logger.handlers[0]
CT = t.TypeVar("CT", bound="SingletonConfigurable")
class SingletonConfigurable(LoggingConfigurable):
"""A configurable that only allows one instance.
This class is for classes that should only have one instance of itself
or *any* subclass. To create and retrieve such a class use the
:meth:`SingletonConfigurable.instance` method.
"""
_instance = None
@classmethod
def _walk_mro(cls) -> t.Generator[type[SingletonConfigurable], None, None]:
"""Walk the cls.mro() for parent classes that are also singletons
For use in instance()
"""
for subclass in cls.mro():
if (
issubclass(cls, subclass)
and issubclass(subclass, SingletonConfigurable)
and subclass != SingletonConfigurable
):
yield subclass
@classmethod
def clear_instance(cls) -> None:
"""unset _instance for this class and singleton parents."""
if not cls.initialized():
return
for subclass in cls._walk_mro():
if isinstance(subclass._instance, cls):
# only clear instances that are instances
# of the calling class
subclass._instance = None # type:ignore[unreachable]
@classmethod
def instance(cls: type[CT], *args: t.Any, **kwargs: t.Any) -> CT:
"""Returns a global instance of this class.
This method create a new instance if none have previously been created
and returns a previously created instance is one already exists.
The arguments and keyword arguments passed to this method are passed
on to the :meth:`__init__` method of the class upon instantiation.
Examples
--------
Create a singleton class using instance, and retrieve it::
>>> from traitlets.config.configurable import SingletonConfigurable
>>> class Foo(SingletonConfigurable): pass
>>> foo = Foo.instance()
>>> foo == Foo.instance()
True
Create a subclass that is retrieved using the base class instance::
>>> class Bar(SingletonConfigurable): pass
>>> class Bam(Bar): pass
>>> bam = Bam.instance()
>>> bam == Bar.instance()
True
"""
# Create and save the instance
if cls._instance is None:
inst = cls(*args, **kwargs)
# Now make sure that the instance will also be returned by
# parent classes' _instance attribute.
for subclass in cls._walk_mro():
subclass._instance = inst
if isinstance(cls._instance, cls):
return cls._instance
else:
raise MultipleInstanceError(
f"An incompatible sibling of '{cls.__name__}' is already instantiated"
f" as singleton: {type(cls._instance).__name__}"
)
@classmethod
def initialized(cls) -> bool:
"""Has an instance been created?"""
return hasattr(cls, "_instance") and cls._instance is not None

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,84 @@
"""Manager to read and modify config data in JSON files.
"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import annotations
import errno
import json
import os
from typing import Any
from traitlets.config import LoggingConfigurable
from traitlets.traitlets import Unicode
def recursive_update(target: dict[Any, Any], new: dict[Any, Any]) -> None:
"""Recursively update one dictionary using another.
None values will delete their keys.
"""
for k, v in new.items():
if isinstance(v, dict):
if k not in target:
target[k] = {}
recursive_update(target[k], v)
if not target[k]:
# Prune empty subdicts
del target[k]
elif v is None:
target.pop(k, None)
else:
target[k] = v
class BaseJSONConfigManager(LoggingConfigurable):
"""General JSON config manager
Deals with persisting/storing config in a json file
"""
config_dir = Unicode(".")
def ensure_config_dir_exists(self) -> None:
try:
os.makedirs(self.config_dir, 0o755)
except OSError as e:
if e.errno != errno.EEXIST:
raise
def file_name(self, section_name: str) -> str:
return os.path.join(self.config_dir, section_name + ".json")
def get(self, section_name: str) -> Any:
"""Retrieve the config data for the specified section.
Returns the data as a dictionary, or an empty dictionary if the file
doesn't exist.
"""
filename = self.file_name(section_name)
if os.path.isfile(filename):
with open(filename, encoding="utf-8") as f:
return json.load(f)
else:
return {}
def set(self, section_name: str, data: Any) -> None:
"""Store the given config data."""
filename = self.file_name(section_name)
self.ensure_config_dir_exists()
with open(filename, "w", encoding="utf-8") as f:
json.dump(data, f, indent=2)
def update(self, section_name: str, new_data: Any) -> Any:
"""Modify the config section by recursively updating it with new_data.
Returns the modified config data as a dictionary.
"""
data = self.get(section_name)
recursive_update(data, new_data)
self.set(section_name, data)
return data

View File

@@ -0,0 +1,164 @@
"""Machinery for documenting traitlets config options with Sphinx.
This includes:
- A Sphinx extension defining directives and roles for config options.
- A function to generate an rst file given an Application instance.
To make this documentation, first set this module as an extension in Sphinx's
conf.py::
extensions = [
# ...
'traitlets.config.sphinxdoc',
]
Autogenerate the config documentation by running code like this before
Sphinx builds::
from traitlets.config.sphinxdoc import write_doc
from myapp import MyApplication
writedoc('config/options.rst', # File to write
'MyApp config options', # Title
MyApplication()
)
The generated rST syntax looks like this::
.. configtrait:: Application.log_datefmt
Description goes here.
Cross reference like this: :configtrait:`Application.log_datefmt`.
"""
from __future__ import annotations
import typing as t
from collections import defaultdict
from textwrap import dedent
from traitlets import HasTraits, Undefined
from traitlets.config.application import Application
from traitlets.utils.text import indent
def setup(app: t.Any) -> dict[str, t.Any]:
"""Registers the Sphinx extension.
You shouldn't need to call this directly; configure Sphinx to use this
module instead.
"""
app.add_object_type("configtrait", "configtrait", objname="Config option")
return {"parallel_read_safe": True, "parallel_write_safe": True}
def interesting_default_value(dv: t.Any) -> bool:
if (dv is None) or (dv is Undefined):
return False
if isinstance(dv, (str, list, tuple, dict, set)):
return bool(dv)
return True
def format_aliases(aliases: list[str]) -> str:
fmted = []
for a in aliases:
dashes = "-" if len(a) == 1 else "--"
fmted.append(f"``{dashes}{a}``")
return ", ".join(fmted)
def class_config_rst_doc(cls: type[HasTraits], trait_aliases: dict[str, t.Any]) -> str:
"""Generate rST documentation for this class' config options.
Excludes traits defined on parent classes.
"""
lines = []
classname = cls.__name__
for _, trait in sorted(cls.class_traits(config=True).items()):
ttype = trait.__class__.__name__
fullname = classname + "." + (trait.name or "")
lines += [".. configtrait:: " + fullname, ""]
help = trait.help.rstrip() or "No description"
lines.append(indent(dedent(help)) + "\n")
# Choices or type
if "Enum" in ttype:
# include Enum choices
lines.append(indent(":options: " + ", ".join("``%r``" % x for x in trait.values))) # type:ignore[attr-defined]
else:
lines.append(indent(":trait type: " + ttype))
# Default value
# Ignore boring default values like None, [] or ''
if interesting_default_value(trait.default_value):
try:
dvr = trait.default_value_repr()
except Exception:
dvr = None # ignore defaults we can't construct
if dvr is not None:
if len(dvr) > 64:
dvr = dvr[:61] + "..."
# Double up backslashes, so they get to the rendered docs
dvr = dvr.replace("\\n", "\\\\n")
lines.append(indent(":default: ``%s``" % dvr))
# Command line aliases
if trait_aliases[fullname]:
fmt_aliases = format_aliases(trait_aliases[fullname])
lines.append(indent(":CLI option: " + fmt_aliases))
# Blank line
lines.append("")
return "\n".join(lines)
def reverse_aliases(app: Application) -> dict[str, list[str]]:
"""Produce a mapping of trait names to lists of command line aliases."""
res = defaultdict(list)
for alias, trait in app.aliases.items():
res[trait].append(alias)
# Flags also often act as aliases for a boolean trait.
# Treat flags which set one trait to True as aliases.
for flag, (cfg, _) in app.flags.items():
if len(cfg) == 1:
classname = next(iter(cfg))
cls_cfg = cfg[classname]
if len(cls_cfg) == 1:
traitname = next(iter(cls_cfg))
if cls_cfg[traitname] is True:
res[classname + "." + traitname].append(flag)
return res
def write_doc(path: str, title: str, app: Application, preamble: str | None = None) -> None:
"""Write a rst file documenting config options for a traitlets application.
Parameters
----------
path : str
The file to be written
title : str
The human-readable title of the document
app : traitlets.config.Application
An instance of the application class to be documented
preamble : str
Extra text to add just after the title (optional)
"""
trait_aliases = reverse_aliases(app)
with open(path, "w") as f:
f.write(title + "\n")
f.write(("=" * len(title)) + "\n")
f.write("\n")
if preamble is not None:
f.write(preamble + "\n\n")
for c in app._classes_inc_parents():
f.write(class_config_rst_doc(c, trait_aliases))
f.write("\n")

View File

@@ -0,0 +1,31 @@
"""Grab the global logger instance."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import annotations
import logging
from typing import Any
_logger: logging.Logger | logging.LoggerAdapter[Any] | None = None
def get_logger() -> logging.Logger | logging.LoggerAdapter[Any]:
"""Grab the global logger instance.
If a global Application is instantiated, grab its logger.
Otherwise, grab the root logger.
"""
global _logger # noqa: PLW0603
if _logger is None:
from .config import Application
if Application.initialized():
_logger = Application.instance().log
else:
_logger = logging.getLogger("traitlets")
# Add a NullHandler to silence warnings about not being
# initialized, per best practice for libraries.
_logger.addHandler(logging.NullHandler())
return _logger

View File

@@ -0,0 +1,59 @@
from __future__ import annotations
from typing import Any
from unittest import TestCase
from traitlets import TraitError
class TraitTestBase(TestCase):
"""A best testing class for basic trait types."""
def assign(self, value: Any) -> None:
self.obj.value = value # type:ignore[attr-defined]
def coerce(self, value: Any) -> Any:
return value
def test_good_values(self) -> None:
if hasattr(self, "_good_values"):
for value in self._good_values:
self.assign(value)
self.assertEqual(self.obj.value, self.coerce(value)) # type:ignore[attr-defined]
def test_bad_values(self) -> None:
if hasattr(self, "_bad_values"):
for value in self._bad_values:
try:
self.assertRaises(TraitError, self.assign, value)
except AssertionError:
raise AssertionError(value) from None
def test_default_value(self) -> None:
if hasattr(self, "_default_value"):
self.assertEqual(self._default_value, self.obj.value) # type:ignore[attr-defined]
def test_allow_none(self) -> None:
if (
hasattr(self, "_bad_values")
and hasattr(self, "_good_values")
and None in self._bad_values
):
trait = self.obj.traits()["value"] # type:ignore[attr-defined]
try:
trait.allow_none = True
self._bad_values.remove(None)
# skip coerce. Allow None casts None to None.
self.assign(None)
self.assertEqual(self.obj.value, None) # type:ignore[attr-defined]
self.test_good_values()
self.test_bad_values()
finally:
# tear down
trait.allow_none = False
self._bad_values.append(None)
def tearDown(self) -> None:
# restore default value after tests, if set
if hasattr(self, "_default_value"):
self.obj.value = self._default_value # type:ignore[attr-defined]

View File

@@ -0,0 +1,42 @@
from __future__ import annotations
import sys
from subprocess import PIPE, Popen
from typing import Any, Sequence
def get_output_error_code(cmd: str | Sequence[str]) -> tuple[str, str, Any]:
"""Get stdout, stderr, and exit code from running a command"""
p = Popen(cmd, stdout=PIPE, stderr=PIPE) # noqa: S603
out, err = p.communicate()
out_str = out.decode("utf8", "replace")
err_str = err.decode("utf8", "replace")
return out_str, err_str, p.returncode
def check_help_output(pkg: str, subcommand: Sequence[str] | None = None) -> tuple[str, str]:
"""test that `python -m PKG [subcommand] -h` works"""
cmd = [sys.executable, "-m", pkg]
if subcommand:
cmd.extend(subcommand)
cmd.append("-h")
out, err, rc = get_output_error_code(cmd)
assert rc == 0, err
assert "Traceback" not in err
assert "Options" in out
assert "--help-all" in out
return out, err
def check_help_all_output(pkg: str, subcommand: Sequence[str] | None = None) -> tuple[str, str]:
"""test that `python -m PKG --help-all` works"""
cmd = [sys.executable, "-m", pkg]
if subcommand:
cmd.extend(subcommand)
cmd.append("--help-all")
out, err, rc = get_output_error_code(cmd)
assert rc == 0, err
assert "Traceback" not in err
assert "Options" in out
assert "Class options" in out
return out, err

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,91 @@
from __future__ import annotations
import os
import pathlib
from typing import Sequence
# vestigal things from IPython_genutils.
def cast_unicode(s: str | bytes, encoding: str = "utf-8") -> str:
if isinstance(s, bytes):
return s.decode(encoding, "replace")
return s
def filefind(filename: str, path_dirs: Sequence[str] | None = None) -> str:
"""Find a file by looking through a sequence of paths.
This iterates through a sequence of paths looking for a file and returns
the full, absolute path of the first occurrence of the file. If no set of
path dirs is given, the filename is tested as is, after running through
:func:`expandvars` and :func:`expanduser`. Thus a simple call::
filefind('myfile.txt')
will find the file in the current working dir, but::
filefind('~/myfile.txt')
Will find the file in the users home directory. This function does not
automatically try any paths, such as the cwd or the user's home directory.
Parameters
----------
filename : str
The filename to look for.
path_dirs : str, None or sequence of str
The sequence of paths to look for the file in. If None, the filename
need to be absolute or be in the cwd. If a string, the string is
put into a sequence and the searched. If a sequence, walk through
each element and join with ``filename``, calling :func:`expandvars`
and :func:`expanduser` before testing for existence.
Returns
-------
Raises :exc:`IOError` or returns absolute path to file.
"""
# If paths are quoted, abspath gets confused, strip them...
filename = filename.strip('"').strip("'")
# If the input is an absolute path, just check it exists
if os.path.isabs(filename) and os.path.isfile(filename):
return filename
if path_dirs is None:
path_dirs = ("",)
elif isinstance(path_dirs, str):
path_dirs = (path_dirs,)
elif isinstance(path_dirs, pathlib.Path):
path_dirs = (str(path_dirs),)
for path in path_dirs:
if path == ".":
path = os.getcwd()
testname = expand_path(os.path.join(path, filename))
if os.path.isfile(testname):
return os.path.abspath(testname)
raise OSError(f"File {filename!r} does not exist in any of the search paths: {path_dirs!r}")
def expand_path(s: str) -> str:
"""Expand $VARS and ~names in a string, like a shell
:Examples:
In [2]: os.environ['FOO']='test'
In [3]: expand_path('variable FOO is $FOO')
Out[3]: 'variable FOO is test'
"""
# This is a pretty subtle hack. When expand user is given a UNC path
# on Windows (\\server\share$\%username%), os.path.expandvars, removes
# the $ to get (\\server\share\%username%). I think it considered $
# alone an empty var. But, we need the $ to remains there (it indicates
# a hidden share).
if os.name == "nt":
s = s.replace("$\\", "IPYTHON_TEMP")
s = os.path.expandvars(os.path.expanduser(s))
if os.name == "nt":
s = s.replace("IPYTHON_TEMP", "$\\")
return s

View File

@@ -0,0 +1,29 @@
"""Yet another implementation of bunch
attribute-access of items on a dict.
"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import annotations
from typing import Any
class Bunch(dict): # type:ignore[type-arg]
"""A dict with attribute-access"""
def __getattr__(self, key: str) -> Any:
try:
return self.__getitem__(key)
except KeyError as e:
raise AttributeError(key) from e
def __setattr__(self, key: str, value: Any) -> None:
self.__setitem__(key, value)
def __dir__(self) -> list[str]:
# py2-compat: can't use super because dict doesn't have __dir__
names = dir({})
names.extend(self.keys())
return names

View File

@@ -0,0 +1,86 @@
"""Useful decorators for Traitlets users."""
from __future__ import annotations
import copy
from inspect import Parameter, Signature, signature
from typing import Any, Type, TypeVar
from ..traitlets import HasTraits, Undefined
def _get_default(value: Any) -> Any:
"""Get default argument value, given the trait default value."""
return Parameter.empty if value == Undefined else value
T = TypeVar("T", bound=HasTraits)
def signature_has_traits(cls: Type[T]) -> Type[T]:
"""Return a decorated class with a constructor signature that contain Trait names as kwargs."""
traits = [
(name, _get_default(value.default_value))
for name, value in cls.class_traits().items()
if not name.startswith("_")
]
# Taking the __init__ signature, as the cls signature is not initialized yet
old_signature = signature(cls.__init__)
old_parameter_names = list(old_signature.parameters)
old_positional_parameters = []
old_var_positional_parameter = None # This won't be None if the old signature contains *args
old_keyword_only_parameters = []
old_var_keyword_parameter = None # This won't be None if the old signature contains **kwargs
for parameter_name in old_signature.parameters:
# Copy the parameter
parameter = copy.copy(old_signature.parameters[parameter_name])
if (
parameter.kind is Parameter.POSITIONAL_ONLY
or parameter.kind is Parameter.POSITIONAL_OR_KEYWORD
):
old_positional_parameters.append(parameter)
elif parameter.kind is Parameter.VAR_POSITIONAL:
old_var_positional_parameter = parameter
elif parameter.kind is Parameter.KEYWORD_ONLY:
old_keyword_only_parameters.append(parameter)
elif parameter.kind is Parameter.VAR_KEYWORD:
old_var_keyword_parameter = parameter
# Unfortunately, if the old signature does not contain **kwargs, we can't do anything,
# because it can't accept traits as keyword arguments
if old_var_keyword_parameter is None:
raise RuntimeError(
f"The {cls} constructor does not take **kwargs, which means that the signature can not be expanded with trait names"
)
new_parameters = []
# Append the old positional parameters (except `self` which is the first parameter)
new_parameters += old_positional_parameters[1:]
# Append *args if the old signature had it
if old_var_positional_parameter is not None:
new_parameters.append(old_var_positional_parameter)
# Append the old keyword only parameters
new_parameters += old_keyword_only_parameters
# Append trait names as keyword only parameters in the signature
new_parameters += [
Parameter(name, kind=Parameter.KEYWORD_ONLY, default=default)
for name, default in traits
if name not in old_parameter_names
]
# Append **kwargs
new_parameters.append(old_var_keyword_parameter)
cls.__signature__ = Signature(new_parameters) # type:ignore[attr-defined]
return cls

View File

@@ -0,0 +1,182 @@
from __future__ import annotations
import inspect
import re
import types
from typing import Any
def describe(
article: str | None,
value: Any,
name: str | None = None,
verbose: bool = False,
capital: bool = False,
) -> str:
"""Return string that describes a value
Parameters
----------
article : str or None
A definite or indefinite article. If the article is
indefinite (i.e. "a" or "an") the appropriate one
will be inferred. Thus, the arguments of ``describe``
can themselves represent what the resulting string
will actually look like. If None, then no article
will be prepended to the result. For non-articled
description, values that are instances are treated
definitely, while classes are handled indefinitely.
value : any
The value which will be named.
name : str or None (default: None)
Only applies when ``article`` is "the" - this
``name`` is a definite reference to the value.
By default one will be inferred from the value's
type and repr methods.
verbose : bool (default: False)
Whether the name should be concise or verbose. When
possible, verbose names include the module, and/or
class name where an object was defined.
capital : bool (default: False)
Whether the first letter of the article should
be capitalized or not. By default it is not.
Examples
--------
Indefinite description:
>>> describe("a", object())
'an object'
>>> describe("a", object)
'an object'
>>> describe("a", type(object))
'a type'
Definite description:
>>> describe("the", object())
"the object at '...'"
>>> describe("the", object)
'the object object'
>>> describe("the", type(object))
'the type type'
Definitely named description:
>>> describe("the", object(), "I made")
'the object I made'
>>> describe("the", object, "I will use")
'the object I will use'
"""
if isinstance(article, str):
article = article.lower()
if not inspect.isclass(value):
typename = type(value).__name__
else:
typename = value.__name__
if verbose:
typename = _prefix(value) + typename
if article == "the" or (article is None and not inspect.isclass(value)):
if name is not None:
result = f"{typename} {name}"
if article is not None:
return add_article(result, True, capital)
else:
return result
else:
tick_wrap = False
if inspect.isclass(value):
name = value.__name__
elif isinstance(value, types.FunctionType):
name = value.__name__
tick_wrap = True
elif isinstance(value, types.MethodType):
name = value.__func__.__name__
tick_wrap = True
elif type(value).__repr__ in (
object.__repr__,
type.__repr__,
): # type:ignore[comparison-overlap]
name = "at '%s'" % hex(id(value))
verbose = False
else:
name = repr(value)
verbose = False
if verbose:
name = _prefix(value) + name
if tick_wrap:
name = name.join("''")
return describe(article, value, name=name, verbose=verbose, capital=capital)
elif article in ("a", "an") or article is None:
if article is None:
return typename
return add_article(typename, False, capital)
else:
raise ValueError(
"The 'article' argument should be 'the', 'a', 'an', or None not %r" % article
)
def _prefix(value: Any) -> str:
if isinstance(value, types.MethodType):
name = describe(None, value.__self__, verbose=True) + "."
else:
module = inspect.getmodule(value)
if module is not None and module.__name__ != "builtins":
name = module.__name__ + "."
else:
name = ""
return name
def class_of(value: Any) -> Any:
"""Returns a string of the value's type with an indefinite article.
For example 'an Image' or 'a PlotValue'.
"""
if inspect.isclass(value):
return add_article(value.__name__)
else:
return class_of(type(value))
def add_article(name: str, definite: bool = False, capital: bool = False) -> str:
"""Returns the string with a prepended article.
The input does not need to begin with a character.
Parameters
----------
name : str
Name to which to prepend an article
definite : bool (default: False)
Whether the article is definite or not.
Indefinite articles being 'a' and 'an',
while 'the' is definite.
capital : bool (default: False)
Whether the added article should have
its first letter capitalized or not.
"""
if definite:
result = "the " + name
else:
first_letters = re.compile(r"[\W_]+").sub("", name)
if first_letters[:1].lower() in "aeiou":
result = "an " + name
else:
result = "a " + name
if capital:
return result[0].upper() + result[1:]
else:
return result
def repr_type(obj: Any) -> str:
"""Return a string representation of a value and its type for readable
error messages.
"""
the_type = type(obj)
return f"{obj!r} {the_type!r}"

View File

@@ -0,0 +1,51 @@
"""
getargspec excerpted from:
sphinx.util.inspect
~~~~~~~~~~~~~~~~~~~
Helpers for inspecting Python modules.
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import annotations
import inspect
from functools import partial
from typing import Any
# Unmodified from sphinx below this line
def getargspec(func: Any) -> inspect.FullArgSpec:
"""Like inspect.getargspec but supports functools.partial as well."""
if inspect.ismethod(func):
func = func.__func__
if type(func) is partial:
orig_func = func.func
argspec = getargspec(orig_func)
args = list(argspec[0])
defaults = list(argspec[3] or ())
kwoargs = list(argspec[4])
kwodefs = dict(argspec[5] or {})
if func.args:
args = args[len(func.args) :]
for arg in func.keywords or ():
try:
i = args.index(arg) - len(args)
del args[i]
try:
del defaults[i]
except IndexError:
pass
except ValueError: # must be a kwonly arg
i = kwoargs.index(arg)
del kwoargs[i]
del kwodefs[arg]
return inspect.FullArgSpec(
args, argspec[1], argspec[2], tuple(defaults), kwoargs, kwodefs, argspec[6]
)
while hasattr(func, "__wrapped__"):
func = func.__wrapped__
if not inspect.isfunction(func):
raise TypeError("%r is not a Python function" % func)
return inspect.getfullargspec(func)

View File

@@ -0,0 +1,41 @@
"""
A simple utility to import something by its string name.
"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import annotations
from typing import Any
def import_item(name: str) -> Any:
"""Import and return ``bar`` given the string ``foo.bar``.
Calling ``bar = import_item("foo.bar")`` is the functional equivalent of
executing the code ``from foo import bar``.
Parameters
----------
name : string
The fully qualified name of the module/package being imported.
Returns
-------
mod : module object
The module that was imported.
"""
if not isinstance(name, str):
raise TypeError("import_item accepts strings, not '%s'." % type(name))
parts = name.rsplit(".", 1)
if len(parts) == 2:
# called with 'foo.bar....'
package, obj = parts
module = __import__(package, fromlist=[obj])
try:
pak = getattr(module, obj)
except AttributeError as e:
raise ImportError("No module named %s" % obj) from e
return pak
else:
# called with un-dotted string
return __import__(parts[0])

View File

@@ -0,0 +1,41 @@
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import annotations
from typing import Any, Dict
def nested_update(this: Dict[Any, Any], that: Dict[Any, Any]) -> Dict[Any, Any]:
"""Merge two nested dictionaries.
Effectively a recursive ``dict.update``.
Examples
--------
Merge two flat dictionaries:
>>> nested_update(
... {'a': 1, 'b': 2},
... {'b': 3, 'c': 4}
... )
{'a': 1, 'b': 3, 'c': 4}
Merge two nested dictionaries:
>>> nested_update(
... {'x': {'a': 1, 'b': 2}, 'y': 5, 'z': 6},
... {'x': {'b': 3, 'c': 4}, 'z': 7, '0': 8},
... )
{'x': {'a': 1, 'b': 3, 'c': 4}, 'y': 5, 'z': 7, '0': 8}
"""
for key, value in this.items():
if isinstance(value, dict):
if key in that and isinstance(that[key], dict):
nested_update(this[key], that[key])
elif key in that:
this[key] = that[key]
for key, value in that.items():
if key not in this:
this[key] = value
return this

View File

@@ -0,0 +1,24 @@
"""Sentinel class for constants with useful reprs"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import annotations
import typing as t
class Sentinel:
def __init__(self, name: str, module: t.Any, docstring: str | None = None) -> None:
self.name = name
self.module = module
if docstring:
self.__doc__ = docstring
def __repr__(self) -> str:
return str(self.module) + "." + self.name
def __copy__(self) -> Sentinel:
return self
def __deepcopy__(self, memo: t.Any) -> Sentinel:
return self

View File

@@ -0,0 +1,40 @@
"""
Utilities imported from ipython_genutils
"""
from __future__ import annotations
import re
import textwrap
from textwrap import dedent
from textwrap import indent as _indent
from typing import List
def indent(val: str) -> str:
return _indent(val, " ")
def wrap_paragraphs(text: str, ncols: int = 80) -> List[str]:
"""Wrap multiple paragraphs to fit a specified width.
This is equivalent to textwrap.wrap, but with support for multiple
paragraphs, as separated by empty lines.
Returns
-------
list of complete paragraphs, wrapped to fill `ncols` columns.
"""
paragraph_re = re.compile(r"\n(\s*\n)+", re.MULTILINE)
text = dedent(text).strip()
paragraphs = paragraph_re.split(text)[::2] # every other entry is space
out_ps = []
indent_re = re.compile(r"\n\s+", re.MULTILINE)
for p in paragraphs:
# presume indentation that survives dedent is meaningful formatting,
# so don't fill unless text is flush.
if indent_re.search(p) is None:
# wrap paragraph
p = textwrap.fill(p, ncols)
out_ps.append(p)
return out_ps

View File

@@ -0,0 +1,64 @@
from __future__ import annotations
import inspect
import os
import typing as t
import warnings
def warn(msg: str, category: t.Any, *, stacklevel: int, source: t.Any = None) -> None:
"""Like warnings.warn(), but category and stacklevel are required.
You pretty much never want the default stacklevel of 1, so this helps
encourage setting it explicitly."""
warnings.warn(msg, category=category, stacklevel=stacklevel, source=source)
def deprecated_method(method: t.Any, cls: t.Any, method_name: str, msg: str) -> None:
"""Show deprecation warning about a magic method definition.
Uses warn_explicit to bind warning to method definition instead of triggering code,
which isn't relevant.
"""
warn_msg = f"{cls.__name__}.{method_name} is deprecated in traitlets 4.1: {msg}"
for parent in inspect.getmro(cls):
if method_name in parent.__dict__:
cls = parent
break
# limit deprecation messages to once per package
package_name = cls.__module__.split(".", 1)[0]
key = (package_name, msg)
if not should_warn(key):
return
try:
fname = inspect.getsourcefile(method) or "<unknown>"
lineno = inspect.getsourcelines(method)[1] or 0
except (OSError, TypeError) as e:
# Failed to inspect for some reason
warn(
warn_msg + ("\n(inspection failed) %s" % e),
DeprecationWarning,
stacklevel=2,
)
else:
warnings.warn_explicit(warn_msg, DeprecationWarning, fname, lineno)
_deprecations_shown = set()
def should_warn(key: t.Any) -> bool:
"""Add our own checks for too many deprecation warnings.
Limit to once per package.
"""
env_flag = os.environ.get("TRAITLETS_ALL_DEPRECATIONS")
if env_flag and env_flag != "0":
return True
if key not in _deprecations_shown:
_deprecations_shown.add(key)
return True
else:
return False