diff --git a/.idea/.gitignore b/.idea/.gitignore
deleted file mode 100644
index 26d3352..0000000
--- a/.idea/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-# Default ignored files
-/shelf/
-/workspace.xml
diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml
deleted file mode 100644
index 105ce2d..0000000
--- a/.idea/inspectionProfiles/profiles_settings.xml
+++ /dev/null
@@ -1,6 +0,0 @@
-
-
-
-
-
-
\ No newline at end of file
diff --git a/.idea/misc.xml b/.idea/misc.xml
deleted file mode 100644
index d460eee..0000000
--- a/.idea/misc.xml
+++ /dev/null
@@ -1,6 +0,0 @@
-
-
-
-
-
-
\ No newline at end of file
diff --git a/.idea/modules.xml b/.idea/modules.xml
deleted file mode 100644
index af20f60..0000000
--- a/.idea/modules.xml
+++ /dev/null
@@ -1,8 +0,0 @@
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/.idea/testpks.iml b/.idea/testpks.iml
deleted file mode 100644
index 5c0fbc8..0000000
--- a/.idea/testpks.iml
+++ /dev/null
@@ -1,10 +0,0 @@
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/.idea/vcs.xml b/.idea/vcs.xml
deleted file mode 100644
index 94a25f7..0000000
--- a/.idea/vcs.xml
+++ /dev/null
@@ -1,6 +0,0 @@
-
-
-
-
-
-
\ No newline at end of file
diff --git a/.read_stats.py.swp b/.read_stats.py.swp
deleted file mode 100644
index d016989..0000000
Binary files a/.read_stats.py.swp and /dev/null differ
diff --git a/.venv/bin/Activate.ps1 b/.venv/bin/Activate.ps1
deleted file mode 100644
index b49d77b..0000000
--- a/.venv/bin/Activate.ps1
+++ /dev/null
@@ -1,247 +0,0 @@
-<#
-.Synopsis
-Activate a Python virtual environment for the current PowerShell session.
-
-.Description
-Pushes the python executable for a virtual environment to the front of the
-$Env:PATH environment variable and sets the prompt to signify that you are
-in a Python virtual environment. Makes use of the command line switches as
-well as the `pyvenv.cfg` file values present in the virtual environment.
-
-.Parameter VenvDir
-Path to the directory that contains the virtual environment to activate. The
-default value for this is the parent of the directory that the Activate.ps1
-script is located within.
-
-.Parameter Prompt
-The prompt prefix to display when this virtual environment is activated. By
-default, this prompt is the name of the virtual environment folder (VenvDir)
-surrounded by parentheses and followed by a single space (ie. '(.venv) ').
-
-.Example
-Activate.ps1
-Activates the Python virtual environment that contains the Activate.ps1 script.
-
-.Example
-Activate.ps1 -Verbose
-Activates the Python virtual environment that contains the Activate.ps1 script,
-and shows extra information about the activation as it executes.
-
-.Example
-Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
-Activates the Python virtual environment located in the specified location.
-
-.Example
-Activate.ps1 -Prompt "MyPython"
-Activates the Python virtual environment that contains the Activate.ps1 script,
-and prefixes the current prompt with the specified string (surrounded in
-parentheses) while the virtual environment is active.
-
-.Notes
-On Windows, it may be required to enable this Activate.ps1 script by setting the
-execution policy for the user. You can do this by issuing the following PowerShell
-command:
-
-PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
-
-For more information on Execution Policies:
-https://go.microsoft.com/fwlink/?LinkID=135170
-
-#>
-Param(
- [Parameter(Mandatory = $false)]
- [String]
- $VenvDir,
- [Parameter(Mandatory = $false)]
- [String]
- $Prompt
-)
-
-<# Function declarations --------------------------------------------------- #>
-
-<#
-.Synopsis
-Remove all shell session elements added by the Activate script, including the
-addition of the virtual environment's Python executable from the beginning of
-the PATH variable.
-
-.Parameter NonDestructive
-If present, do not remove this function from the global namespace for the
-session.
-
-#>
-function global:deactivate ([switch]$NonDestructive) {
- # Revert to original values
-
- # The prior prompt:
- if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
- Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
- Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
- }
-
- # The prior PYTHONHOME:
- if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
- Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
- Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
- }
-
- # The prior PATH:
- if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
- Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
- Remove-Item -Path Env:_OLD_VIRTUAL_PATH
- }
-
- # Just remove the VIRTUAL_ENV altogether:
- if (Test-Path -Path Env:VIRTUAL_ENV) {
- Remove-Item -Path env:VIRTUAL_ENV
- }
-
- # Just remove VIRTUAL_ENV_PROMPT altogether.
- if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
- Remove-Item -Path env:VIRTUAL_ENV_PROMPT
- }
-
- # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
- if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
- Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
- }
-
- # Leave deactivate function in the global namespace if requested:
- if (-not $NonDestructive) {
- Remove-Item -Path function:deactivate
- }
-}
-
-<#
-.Description
-Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
-given folder, and returns them in a map.
-
-For each line in the pyvenv.cfg file, if that line can be parsed into exactly
-two strings separated by `=` (with any amount of whitespace surrounding the =)
-then it is considered a `key = value` line. The left hand string is the key,
-the right hand is the value.
-
-If the value starts with a `'` or a `"` then the first and last character is
-stripped from the value before being captured.
-
-.Parameter ConfigDir
-Path to the directory that contains the `pyvenv.cfg` file.
-#>
-function Get-PyVenvConfig(
- [String]
- $ConfigDir
-) {
- Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
-
- # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
- $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
-
- # An empty map will be returned if no config file is found.
- $pyvenvConfig = @{ }
-
- if ($pyvenvConfigPath) {
-
- Write-Verbose "File exists, parse `key = value` lines"
- $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
-
- $pyvenvConfigContent | ForEach-Object {
- $keyval = $PSItem -split "\s*=\s*", 2
- if ($keyval[0] -and $keyval[1]) {
- $val = $keyval[1]
-
- # Remove extraneous quotations around a string value.
- if ("'""".Contains($val.Substring(0, 1))) {
- $val = $val.Substring(1, $val.Length - 2)
- }
-
- $pyvenvConfig[$keyval[0]] = $val
- Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
- }
- }
- }
- return $pyvenvConfig
-}
-
-
-<# Begin Activate script --------------------------------------------------- #>
-
-# Determine the containing directory of this script
-$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
-$VenvExecDir = Get-Item -Path $VenvExecPath
-
-Write-Verbose "Activation script is located in path: '$VenvExecPath'"
-Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
-Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
-
-# Set values required in priority: CmdLine, ConfigFile, Default
-# First, get the location of the virtual environment, it might not be
-# VenvExecDir if specified on the command line.
-if ($VenvDir) {
- Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
-}
-else {
- Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
- $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
- Write-Verbose "VenvDir=$VenvDir"
-}
-
-# Next, read the `pyvenv.cfg` file to determine any required value such
-# as `prompt`.
-$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
-
-# Next, set the prompt from the command line, or the config file, or
-# just use the name of the virtual environment folder.
-if ($Prompt) {
- Write-Verbose "Prompt specified as argument, using '$Prompt'"
-}
-else {
- Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
- if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
- Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
- $Prompt = $pyvenvCfg['prompt'];
- }
- else {
- Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
- Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
- $Prompt = Split-Path -Path $venvDir -Leaf
- }
-}
-
-Write-Verbose "Prompt = '$Prompt'"
-Write-Verbose "VenvDir='$VenvDir'"
-
-# Deactivate any currently active virtual environment, but leave the
-# deactivate function in place.
-deactivate -nondestructive
-
-# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
-# that there is an activated venv.
-$env:VIRTUAL_ENV = $VenvDir
-
-if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
-
- Write-Verbose "Setting prompt to '$Prompt'"
-
- # Set the prompt to include the env name
- # Make sure _OLD_VIRTUAL_PROMPT is global
- function global:_OLD_VIRTUAL_PROMPT { "" }
- Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
- New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
-
- function global:prompt {
- Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
- _OLD_VIRTUAL_PROMPT
- }
- $env:VIRTUAL_ENV_PROMPT = $Prompt
-}
-
-# Clear PYTHONHOME
-if (Test-Path -Path Env:PYTHONHOME) {
- Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
- Remove-Item -Path Env:PYTHONHOME
-}
-
-# Add the venv to the PATH
-Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
-$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
diff --git a/.venv/bin/activate b/.venv/bin/activate
deleted file mode 100644
index 285e663..0000000
--- a/.venv/bin/activate
+++ /dev/null
@@ -1,70 +0,0 @@
-# This file must be used with "source bin/activate" *from bash*
-# You cannot run it directly
-
-deactivate () {
- # reset old environment variables
- if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
- PATH="${_OLD_VIRTUAL_PATH:-}"
- export PATH
- unset _OLD_VIRTUAL_PATH
- fi
- if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
- PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
- export PYTHONHOME
- unset _OLD_VIRTUAL_PYTHONHOME
- fi
-
- # Call hash to forget past commands. Without forgetting
- # past commands the $PATH changes we made may not be respected
- hash -r 2> /dev/null
-
- if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
- PS1="${_OLD_VIRTUAL_PS1:-}"
- export PS1
- unset _OLD_VIRTUAL_PS1
- fi
-
- unset VIRTUAL_ENV
- unset VIRTUAL_ENV_PROMPT
- if [ ! "${1:-}" = "nondestructive" ] ; then
- # Self destruct!
- unset -f deactivate
- fi
-}
-
-# unset irrelevant variables
-deactivate nondestructive
-
-# on Windows, a path can contain colons and backslashes and has to be converted:
-if [ "${OSTYPE:-}" = "cygwin" ] || [ "${OSTYPE:-}" = "msys" ] ; then
- # transform D:\path\to\venv to /d/path/to/venv on MSYS
- # and to /cygdrive/d/path/to/venv on Cygwin
- export VIRTUAL_ENV=$(cygpath /home/jvved/dev/testpks/.venv)
-else
- # use the path as-is
- export VIRTUAL_ENV=/home/jvved/dev/testpks/.venv
-fi
-
-_OLD_VIRTUAL_PATH="$PATH"
-PATH="$VIRTUAL_ENV/"bin":$PATH"
-export PATH
-
-# unset PYTHONHOME if set
-# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
-# could use `if (set -u; : $PYTHONHOME) ;` in bash
-if [ -n "${PYTHONHOME:-}" ] ; then
- _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
- unset PYTHONHOME
-fi
-
-if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
- _OLD_VIRTUAL_PS1="${PS1:-}"
- PS1='(.venv) '"${PS1:-}"
- export PS1
- VIRTUAL_ENV_PROMPT='(.venv) '
- export VIRTUAL_ENV_PROMPT
-fi
-
-# Call hash to forget past commands. Without forgetting
-# past commands the $PATH changes we made may not be respected
-hash -r 2> /dev/null
diff --git a/.venv/bin/activate.csh b/.venv/bin/activate.csh
deleted file mode 100644
index aed6eca..0000000
--- a/.venv/bin/activate.csh
+++ /dev/null
@@ -1,27 +0,0 @@
-# This file must be used with "source bin/activate.csh" *from csh*.
-# You cannot run it directly.
-
-# Created by Davide Di Blasi .
-# Ported to Python 3.3 venv by Andrew Svetlov
-
-alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
-
-# Unset irrelevant variables.
-deactivate nondestructive
-
-setenv VIRTUAL_ENV /home/jvved/dev/testpks/.venv
-
-set _OLD_VIRTUAL_PATH="$PATH"
-setenv PATH "$VIRTUAL_ENV/"bin":$PATH"
-
-
-set _OLD_VIRTUAL_PROMPT="$prompt"
-
-if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
- set prompt = '(.venv) '"$prompt"
- setenv VIRTUAL_ENV_PROMPT '(.venv) '
-endif
-
-alias pydoc python -m pydoc
-
-rehash
diff --git a/.venv/bin/activate.fish b/.venv/bin/activate.fish
deleted file mode 100644
index 5666715..0000000
--- a/.venv/bin/activate.fish
+++ /dev/null
@@ -1,69 +0,0 @@
-# This file must be used with "source /bin/activate.fish" *from fish*
-# (https://fishshell.com/). You cannot run it directly.
-
-function deactivate -d "Exit virtual environment and return to normal shell environment"
- # reset old environment variables
- if test -n "$_OLD_VIRTUAL_PATH"
- set -gx PATH $_OLD_VIRTUAL_PATH
- set -e _OLD_VIRTUAL_PATH
- end
- if test -n "$_OLD_VIRTUAL_PYTHONHOME"
- set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
- set -e _OLD_VIRTUAL_PYTHONHOME
- end
-
- if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
- set -e _OLD_FISH_PROMPT_OVERRIDE
- # prevents error when using nested fish instances (Issue #93858)
- if functions -q _old_fish_prompt
- functions -e fish_prompt
- functions -c _old_fish_prompt fish_prompt
- functions -e _old_fish_prompt
- end
- end
-
- set -e VIRTUAL_ENV
- set -e VIRTUAL_ENV_PROMPT
- if test "$argv[1]" != "nondestructive"
- # Self-destruct!
- functions -e deactivate
- end
-end
-
-# Unset irrelevant variables.
-deactivate nondestructive
-
-set -gx VIRTUAL_ENV /home/jvved/dev/testpks/.venv
-
-set -gx _OLD_VIRTUAL_PATH $PATH
-set -gx PATH "$VIRTUAL_ENV/"bin $PATH
-
-# Unset PYTHONHOME if set.
-if set -q PYTHONHOME
- set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
- set -e PYTHONHOME
-end
-
-if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
- # fish uses a function instead of an env var to generate the prompt.
-
- # Save the current fish_prompt function as the function _old_fish_prompt.
- functions -c fish_prompt _old_fish_prompt
-
- # With the original prompt function renamed, we can override with our own.
- function fish_prompt
- # Save the return status of the last command.
- set -l old_status $status
-
- # Output the venv prompt; color taken from the blue of the Python logo.
- printf "%s%s%s" (set_color 4B8BBE) '(.venv) ' (set_color normal)
-
- # Restore the return status of the previous command.
- echo "exit $old_status" | .
- # Output the original/"old" prompt.
- _old_fish_prompt
- end
-
- set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
- set -gx VIRTUAL_ENV_PROMPT '(.venv) '
-end
diff --git a/.venv/bin/ipython b/.venv/bin/ipython
deleted file mode 100755
index 0f53eb3..0000000
--- a/.venv/bin/ipython
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/home/jvved/dev/testpks/.venv/bin/python3
-# -*- coding: utf-8 -*-
-import re
-import sys
-from IPython import start_ipython
-if __name__ == '__main__':
- sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
- sys.exit(start_ipython())
diff --git a/.venv/bin/ipython3 b/.venv/bin/ipython3
deleted file mode 100755
index 0f53eb3..0000000
--- a/.venv/bin/ipython3
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/home/jvved/dev/testpks/.venv/bin/python3
-# -*- coding: utf-8 -*-
-import re
-import sys
-from IPython import start_ipython
-if __name__ == '__main__':
- sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
- sys.exit(start_ipython())
diff --git a/.venv/bin/markdown-it b/.venv/bin/markdown-it
deleted file mode 100755
index 0fa5488..0000000
--- a/.venv/bin/markdown-it
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/home/jvved/dev/testpks/.venv/bin/python3
-# -*- coding: utf-8 -*-
-import re
-import sys
-from markdown_it.cli.parse import main
-if __name__ == '__main__':
- sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
- sys.exit(main())
diff --git a/.venv/bin/pip b/.venv/bin/pip
deleted file mode 100755
index 063c294..0000000
--- a/.venv/bin/pip
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/home/jvved/dev/testpks/.venv/bin/python3
-# -*- coding: utf-8 -*-
-import re
-import sys
-from pip._internal.cli.main import main
-if __name__ == '__main__':
- sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
- sys.exit(main())
diff --git a/.venv/bin/pip3 b/.venv/bin/pip3
deleted file mode 100755
index 063c294..0000000
--- a/.venv/bin/pip3
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/home/jvved/dev/testpks/.venv/bin/python3
-# -*- coding: utf-8 -*-
-import re
-import sys
-from pip._internal.cli.main import main
-if __name__ == '__main__':
- sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
- sys.exit(main())
diff --git a/.venv/bin/pip3.12 b/.venv/bin/pip3.12
deleted file mode 100755
index 063c294..0000000
--- a/.venv/bin/pip3.12
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/home/jvved/dev/testpks/.venv/bin/python3
-# -*- coding: utf-8 -*-
-import re
-import sys
-from pip._internal.cli.main import main
-if __name__ == '__main__':
- sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
- sys.exit(main())
diff --git a/.venv/bin/pygmentize b/.venv/bin/pygmentize
deleted file mode 100755
index 860e239..0000000
--- a/.venv/bin/pygmentize
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/home/jvved/dev/testpks/.venv/bin/python3
-# -*- coding: utf-8 -*-
-import re
-import sys
-from pygments.cmdline import main
-if __name__ == '__main__':
- sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
- sys.exit(main())
diff --git a/.venv/bin/python b/.venv/bin/python
deleted file mode 120000
index b8a0adb..0000000
--- a/.venv/bin/python
+++ /dev/null
@@ -1 +0,0 @@
-python3
\ No newline at end of file
diff --git a/.venv/bin/python3 b/.venv/bin/python3
deleted file mode 120000
index ae65fda..0000000
--- a/.venv/bin/python3
+++ /dev/null
@@ -1 +0,0 @@
-/usr/bin/python3
\ No newline at end of file
diff --git a/.venv/bin/python3.12 b/.venv/bin/python3.12
deleted file mode 120000
index b8a0adb..0000000
--- a/.venv/bin/python3.12
+++ /dev/null
@@ -1 +0,0 @@
-python3
\ No newline at end of file
diff --git a/.venv/bin/uwsgi b/.venv/bin/uwsgi
deleted file mode 100755
index 64f1994..0000000
Binary files a/.venv/bin/uwsgi and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/__init__.py b/.venv/lib/python3.12/site-packages/IPython/__init__.py
deleted file mode 100644
index b723548..0000000
--- a/.venv/lib/python3.12/site-packages/IPython/__init__.py
+++ /dev/null
@@ -1,163 +0,0 @@
-# PYTHON_ARGCOMPLETE_OK
-"""
-IPython: tools for interactive and parallel computing in Python.
-
-https://ipython.org
-"""
-#-----------------------------------------------------------------------------
-# Copyright (c) 2008-2011, IPython Development Team.
-# Copyright (c) 2001-2007, Fernando Perez
-# Copyright (c) 2001, Janko Hauser
-# Copyright (c) 2001, Nathaniel Gray
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-import sys
-
-#-----------------------------------------------------------------------------
-# Setup everything
-#-----------------------------------------------------------------------------
-
-# Don't forget to also update setup.py when this changes!
-if sys.version_info < (3, 10):
- raise ImportError(
- """
-IPython 8.19+ supports Python 3.10 and above, following SPEC0.
-IPython 8.13+ supports Python 3.9 and above, following NEP 29.
-IPython 8.0-8.12 supports Python 3.8 and above, following NEP 29.
-When using Python 2.7, please install IPython 5.x LTS Long Term Support version.
-Python 3.3 and 3.4 were supported up to IPython 6.x.
-Python 3.5 was supported with IPython 7.0 to 7.9.
-Python 3.6 was supported with IPython up to 7.16.
-Python 3.7 was still supported with the 7.x branch.
-
-See IPython `README.rst` file for more information:
-
- https://github.com/ipython/ipython/blob/main/README.rst
-
-"""
- )
-
-#-----------------------------------------------------------------------------
-# Setup the top level names
-#-----------------------------------------------------------------------------
-
-from .core.getipython import get_ipython
-from .core import release
-from .core.application import Application
-from .terminal.embed import embed
-
-from .core.interactiveshell import InteractiveShell
-from .utils.sysinfo import sys_info
-from .utils.frame import extract_module_locals
-
-__all__ = ["start_ipython", "embed", "start_kernel", "embed_kernel"]
-
-# Release data
-__author__ = '%s <%s>' % (release.author, release.author_email)
-__license__ = release.license
-__version__ = release.version
-version_info = release.version_info
-# list of CVEs that should have been patched in this release.
-# this is informational and should not be relied upon.
-__patched_cves__ = {"CVE-2022-21699", "CVE-2023-24816"}
-
-
-def embed_kernel(module=None, local_ns=None, **kwargs):
- """Embed and start an IPython kernel in a given scope.
-
- If you don't want the kernel to initialize the namespace
- from the scope of the surrounding function,
- and/or you want to load full IPython configuration,
- you probably want `IPython.start_kernel()` instead.
-
- Parameters
- ----------
- module : types.ModuleType, optional
- The module to load into IPython globals (default: caller)
- local_ns : dict, optional
- The namespace to load into IPython user namespace (default: caller)
- **kwargs : various, optional
- Further keyword args are relayed to the IPKernelApp constructor,
- such as `config`, a traitlets :class:`Config` object (see :ref:`configure_start_ipython`),
- allowing configuration of the kernel (see :ref:`kernel_options`). Will only have an effect
- on the first embed_kernel call for a given process.
- """
-
- (caller_module, caller_locals) = extract_module_locals(1)
- if module is None:
- module = caller_module
- if local_ns is None:
- local_ns = caller_locals
-
- # Only import .zmq when we really need it
- from ipykernel.embed import embed_kernel as real_embed_kernel
- real_embed_kernel(module=module, local_ns=local_ns, **kwargs)
-
-def start_ipython(argv=None, **kwargs):
- """Launch a normal IPython instance (as opposed to embedded)
-
- `IPython.embed()` puts a shell in a particular calling scope,
- such as a function or method for debugging purposes,
- which is often not desirable.
-
- `start_ipython()` does full, regular IPython initialization,
- including loading startup files, configuration, etc.
- much of which is skipped by `embed()`.
-
- This is a public API method, and will survive implementation changes.
-
- Parameters
- ----------
- argv : list or None, optional
- If unspecified or None, IPython will parse command-line options from sys.argv.
- To prevent any command-line parsing, pass an empty list: `argv=[]`.
- user_ns : dict, optional
- specify this dictionary to initialize the IPython user namespace with particular values.
- **kwargs : various, optional
- Any other kwargs will be passed to the Application constructor,
- such as `config`, a traitlets :class:`Config` object (see :ref:`configure_start_ipython`),
- allowing configuration of the instance (see :ref:`terminal_options`).
- """
- from IPython.terminal.ipapp import launch_new_instance
- return launch_new_instance(argv=argv, **kwargs)
-
-def start_kernel(argv=None, **kwargs):
- """Launch a normal IPython kernel instance (as opposed to embedded)
-
- `IPython.embed_kernel()` puts a shell in a particular calling scope,
- such as a function or method for debugging purposes,
- which is often not desirable.
-
- `start_kernel()` does full, regular IPython initialization,
- including loading startup files, configuration, etc.
- much of which is skipped by `embed_kernel()`.
-
- Parameters
- ----------
- argv : list or None, optional
- If unspecified or None, IPython will parse command-line options from sys.argv.
- To prevent any command-line parsing, pass an empty list: `argv=[]`.
- user_ns : dict, optional
- specify this dictionary to initialize the IPython user namespace with particular values.
- **kwargs : various, optional
- Any other kwargs will be passed to the Application constructor,
- such as `config`, a traitlets :class:`Config` object (see :ref:`configure_start_ipython`),
- allowing configuration of the kernel (see :ref:`kernel_options`).
- """
- import warnings
-
- warnings.warn(
- "start_kernel is deprecated since IPython 8.0, use from `ipykernel.kernelapp.launch_new_instance`",
- DeprecationWarning,
- stacklevel=2,
- )
- from ipykernel.kernelapp import launch_new_instance
- return launch_new_instance(argv=argv, **kwargs)
diff --git a/.venv/lib/python3.12/site-packages/IPython/__main__.py b/.venv/lib/python3.12/site-packages/IPython/__main__.py
deleted file mode 100644
index 3b46056..0000000
--- a/.venv/lib/python3.12/site-packages/IPython/__main__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-# PYTHON_ARGCOMPLETE_OK
-# encoding: utf-8
-"""Terminal-based IPython entry point.
-"""
-# -----------------------------------------------------------------------------
-# Copyright (c) 2012, IPython Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-# -----------------------------------------------------------------------------
-
-from IPython import start_ipython
-
-start_ipython()
diff --git a/.venv/lib/python3.12/site-packages/IPython/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/__pycache__/__init__.cpython-312.pyc
deleted file mode 100644
index 4d0925b..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/__pycache__/__init__.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/__pycache__/__main__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/__pycache__/__main__.cpython-312.pyc
deleted file mode 100644
index 6e662e7..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/__pycache__/__main__.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/__pycache__/conftest.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/__pycache__/conftest.cpython-312.pyc
deleted file mode 100644
index 3863884..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/__pycache__/conftest.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/__pycache__/consoleapp.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/__pycache__/consoleapp.cpython-312.pyc
deleted file mode 100644
index 01c1063..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/__pycache__/consoleapp.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/__pycache__/display.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/__pycache__/display.cpython-312.pyc
deleted file mode 100644
index bda6164..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/__pycache__/display.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/__pycache__/paths.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/__pycache__/paths.cpython-312.pyc
deleted file mode 100644
index db4be62..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/__pycache__/paths.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/conftest.py b/.venv/lib/python3.12/site-packages/IPython/conftest.py
deleted file mode 100644
index abf6131..0000000
--- a/.venv/lib/python3.12/site-packages/IPython/conftest.py
+++ /dev/null
@@ -1,87 +0,0 @@
-import builtins
-import inspect
-import os
-import pathlib
-import shutil
-import sys
-import types
-
-import pytest
-
-# Must register before it gets imported
-pytest.register_assert_rewrite("IPython.testing.tools")
-
-from .testing import tools
-
-
-def pytest_collection_modifyitems(items):
- """This function is automatically run by pytest passing all collected test
- functions.
-
- We use it to add asyncio marker to all async tests and assert we don't use
- test functions that are async generators which wouldn't make sense.
- """
- for item in items:
- if inspect.iscoroutinefunction(item.obj):
- item.add_marker("asyncio")
- assert not inspect.isasyncgenfunction(item.obj)
-
-
-def get_ipython():
- from .terminal.interactiveshell import TerminalInteractiveShell
- if TerminalInteractiveShell._instance:
- return TerminalInteractiveShell.instance()
-
- config = tools.default_config()
- config.TerminalInteractiveShell.simple_prompt = True
-
- # Create and initialize our test-friendly IPython instance.
- shell = TerminalInteractiveShell.instance(config=config)
- return shell
-
-
-@pytest.fixture(scope='session', autouse=True)
-def work_path():
- path = pathlib.Path("./tmp-ipython-pytest-profiledir")
- os.environ["IPYTHONDIR"] = str(path.absolute())
- if path.exists():
- raise ValueError('IPython dir temporary path already exists ! Did previous test run exit successfully ?')
- path.mkdir()
- yield
- shutil.rmtree(str(path.resolve()))
-
-
-def nopage(strng, start=0, screen_lines=0, pager_cmd=None):
- if isinstance(strng, dict):
- strng = strng.get("text/plain", "")
- print(strng)
-
-
-def xsys(self, cmd):
- """Replace the default system call with a capturing one for doctest.
- """
- # We use getoutput, but we need to strip it because pexpect captures
- # the trailing newline differently from commands.getoutput
- print(self.getoutput(cmd, split=False, depth=1).rstrip(), end="", file=sys.stdout)
- sys.stdout.flush()
-
-
-# for things to work correctly we would need this as a session fixture;
-# unfortunately this will fail on some test that get executed as _collection_
-# time (before the fixture run), in particular parametrized test that contain
-# yields. so for now execute at import time.
-#@pytest.fixture(autouse=True, scope='session')
-def inject():
-
- builtins.get_ipython = get_ipython
- builtins._ip = get_ipython()
- builtins.ip = get_ipython()
- builtins.ip.system = types.MethodType(xsys, ip)
- builtins.ip.builtin_trap.activate()
- from .core import page
-
- page.pager_page = nopage
- # yield
-
-
-inject()
diff --git a/.venv/lib/python3.12/site-packages/IPython/consoleapp.py b/.venv/lib/python3.12/site-packages/IPython/consoleapp.py
deleted file mode 100644
index c2bbe18..0000000
--- a/.venv/lib/python3.12/site-packages/IPython/consoleapp.py
+++ /dev/null
@@ -1,12 +0,0 @@
-"""
-Shim to maintain backwards compatibility with old IPython.consoleapp imports.
-"""
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-from warnings import warn
-
-warn("The `IPython.consoleapp` package has been deprecated since IPython 4.0."
- "You should import from jupyter_client.consoleapp instead.", stacklevel=2)
-
-from jupyter_client.consoleapp import *
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__init__.py b/.venv/lib/python3.12/site-packages/IPython/core/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/__init__.cpython-312.pyc
deleted file mode 100644
index c98e21c..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/__init__.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/alias.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/alias.cpython-312.pyc
deleted file mode 100644
index 307e267..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/alias.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/application.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/application.cpython-312.pyc
deleted file mode 100644
index 1306fcc..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/application.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/async_helpers.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/async_helpers.cpython-312.pyc
deleted file mode 100644
index 996e299..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/async_helpers.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/autocall.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/autocall.cpython-312.pyc
deleted file mode 100644
index 4e8bea4..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/autocall.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/builtin_trap.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/builtin_trap.cpython-312.pyc
deleted file mode 100644
index 5818aa0..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/builtin_trap.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/compilerop.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/compilerop.cpython-312.pyc
deleted file mode 100644
index d2707d3..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/compilerop.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/completer.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/completer.cpython-312.pyc
deleted file mode 100644
index 57fec08..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/completer.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/completerlib.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/completerlib.cpython-312.pyc
deleted file mode 100644
index 548ed2d..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/completerlib.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/crashhandler.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/crashhandler.cpython-312.pyc
deleted file mode 100644
index 9f3e8d8..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/crashhandler.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/debugger.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/debugger.cpython-312.pyc
deleted file mode 100644
index 070f704..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/debugger.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/display.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/display.cpython-312.pyc
deleted file mode 100644
index bbebd04..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/display.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/display_functions.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/display_functions.cpython-312.pyc
deleted file mode 100644
index c19e4e5..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/display_functions.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/display_trap.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/display_trap.cpython-312.pyc
deleted file mode 100644
index 3a63582..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/display_trap.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/displayhook.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/displayhook.cpython-312.pyc
deleted file mode 100644
index b35f5ef..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/displayhook.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/displaypub.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/displaypub.cpython-312.pyc
deleted file mode 100644
index d494d70..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/displaypub.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/error.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/error.cpython-312.pyc
deleted file mode 100644
index 30b6103..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/error.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/events.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/events.cpython-312.pyc
deleted file mode 100644
index 8a8e239..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/events.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/excolors.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/excolors.cpython-312.pyc
deleted file mode 100644
index c207aaf..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/excolors.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/extensions.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/extensions.cpython-312.pyc
deleted file mode 100644
index 62a4bf9..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/extensions.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/formatters.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/formatters.cpython-312.pyc
deleted file mode 100644
index c1c56db..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/formatters.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/getipython.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/getipython.cpython-312.pyc
deleted file mode 100644
index 2dae445..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/getipython.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/guarded_eval.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/guarded_eval.cpython-312.pyc
deleted file mode 100644
index 65aee41..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/guarded_eval.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/history.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/history.cpython-312.pyc
deleted file mode 100644
index 8cf0ca7..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/history.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/historyapp.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/historyapp.cpython-312.pyc
deleted file mode 100644
index cd59e16..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/historyapp.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/hooks.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/hooks.cpython-312.pyc
deleted file mode 100644
index f7b533d..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/hooks.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/inputsplitter.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/inputsplitter.cpython-312.pyc
deleted file mode 100644
index 54cbddd..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/inputsplitter.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/inputtransformer.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/inputtransformer.cpython-312.pyc
deleted file mode 100644
index fe30503..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/inputtransformer.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/inputtransformer2.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/inputtransformer2.cpython-312.pyc
deleted file mode 100644
index b556a35..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/inputtransformer2.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/interactiveshell.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/interactiveshell.cpython-312.pyc
deleted file mode 100644
index 67ca9b0..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/interactiveshell.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/latex_symbols.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/latex_symbols.cpython-312.pyc
deleted file mode 100644
index c02fd4f..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/latex_symbols.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/logger.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/logger.cpython-312.pyc
deleted file mode 100644
index 695e840..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/logger.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/macro.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/macro.cpython-312.pyc
deleted file mode 100644
index 2d8a288..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/macro.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/magic.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/magic.cpython-312.pyc
deleted file mode 100644
index 8306380..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/magic.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/magic_arguments.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/magic_arguments.cpython-312.pyc
deleted file mode 100644
index d0d690c..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/magic_arguments.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/oinspect.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/oinspect.cpython-312.pyc
deleted file mode 100644
index 352d26c..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/oinspect.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/page.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/page.cpython-312.pyc
deleted file mode 100644
index 58a3cda..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/page.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/payload.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/payload.cpython-312.pyc
deleted file mode 100644
index 049723e..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/payload.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/payloadpage.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/payloadpage.cpython-312.pyc
deleted file mode 100644
index 7e55801..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/payloadpage.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/prefilter.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/prefilter.cpython-312.pyc
deleted file mode 100644
index e4403a1..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/prefilter.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/profileapp.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/profileapp.cpython-312.pyc
deleted file mode 100644
index c9c1331..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/profileapp.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/profiledir.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/profiledir.cpython-312.pyc
deleted file mode 100644
index 19eaa78..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/profiledir.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/prompts.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/prompts.cpython-312.pyc
deleted file mode 100644
index d7e7eef..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/prompts.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/pylabtools.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/pylabtools.cpython-312.pyc
deleted file mode 100644
index 71a46fd..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/pylabtools.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/release.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/release.cpython-312.pyc
deleted file mode 100644
index c33494f..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/release.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/shellapp.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/shellapp.cpython-312.pyc
deleted file mode 100644
index dca3055..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/shellapp.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/splitinput.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/splitinput.cpython-312.pyc
deleted file mode 100644
index aac67db..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/splitinput.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/ultratb.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/ultratb.cpython-312.pyc
deleted file mode 100644
index 829fa56..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/ultratb.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/usage.cpython-312.pyc b/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/usage.cpython-312.pyc
deleted file mode 100644
index 72829a9..0000000
Binary files a/.venv/lib/python3.12/site-packages/IPython/core/__pycache__/usage.cpython-312.pyc and /dev/null differ
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/alias.py b/.venv/lib/python3.12/site-packages/IPython/core/alias.py
deleted file mode 100644
index 845e6b7..0000000
--- a/.venv/lib/python3.12/site-packages/IPython/core/alias.py
+++ /dev/null
@@ -1,267 +0,0 @@
-# encoding: utf-8
-"""
-System command aliases.
-
-Authors:
-
-* Fernando Perez
-* Brian Granger
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2008-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-import os
-import re
-import sys
-
-from traitlets.config.configurable import Configurable
-from .error import UsageError
-
-from traitlets import List, Instance
-from logging import error
-
-import typing as t
-
-
-#-----------------------------------------------------------------------------
-# Utilities
-#-----------------------------------------------------------------------------
-
-# This is used as the pattern for calls to split_user_input.
-shell_line_split = re.compile(r'^(\s*)()(\S+)(.*$)')
-
-def default_aliases() -> t.List[t.Tuple[str, str]]:
- """Return list of shell aliases to auto-define.
- """
- # Note: the aliases defined here should be safe to use on a kernel
- # regardless of what frontend it is attached to. Frontends that use a
- # kernel in-process can define additional aliases that will only work in
- # their case. For example, things like 'less' or 'clear' that manipulate
- # the terminal should NOT be declared here, as they will only work if the
- # kernel is running inside a true terminal, and not over the network.
-
- if os.name == 'posix':
- default_aliases = [('mkdir', 'mkdir'), ('rmdir', 'rmdir'),
- ('mv', 'mv'), ('rm', 'rm'), ('cp', 'cp'),
- ('cat', 'cat'),
- ]
- # Useful set of ls aliases. The GNU and BSD options are a little
- # different, so we make aliases that provide as similar as possible
- # behavior in ipython, by passing the right flags for each platform
- if sys.platform.startswith('linux'):
- ls_aliases = [('ls', 'ls -F --color'),
- # long ls
- ('ll', 'ls -F -o --color'),
- # ls normal files only
- ('lf', 'ls -F -o --color %l | grep ^-'),
- # ls symbolic links
- ('lk', 'ls -F -o --color %l | grep ^l'),
- # directories or links to directories,
- ('ldir', 'ls -F -o --color %l | grep /$'),
- # things which are executable
- ('lx', 'ls -F -o --color %l | grep ^-..x'),
- ]
- elif sys.platform.startswith('openbsd') or sys.platform.startswith('netbsd'):
- # OpenBSD, NetBSD. The ls implementation on these platforms do not support
- # the -G switch and lack the ability to use colorized output.
- ls_aliases = [('ls', 'ls -F'),
- # long ls
- ('ll', 'ls -F -l'),
- # ls normal files only
- ('lf', 'ls -F -l %l | grep ^-'),
- # ls symbolic links
- ('lk', 'ls -F -l %l | grep ^l'),
- # directories or links to directories,
- ('ldir', 'ls -F -l %l | grep /$'),
- # things which are executable
- ('lx', 'ls -F -l %l | grep ^-..x'),
- ]
- else:
- # BSD, OSX, etc.
- ls_aliases = [('ls', 'ls -F -G'),
- # long ls
- ('ll', 'ls -F -l -G'),
- # ls normal files only
- ('lf', 'ls -F -l -G %l | grep ^-'),
- # ls symbolic links
- ('lk', 'ls -F -l -G %l | grep ^l'),
- # directories or links to directories,
- ('ldir', 'ls -F -G -l %l | grep /$'),
- # things which are executable
- ('lx', 'ls -F -l -G %l | grep ^-..x'),
- ]
- default_aliases = default_aliases + ls_aliases
- elif os.name in ['nt', 'dos']:
- default_aliases = [('ls', 'dir /on'),
- ('ddir', 'dir /ad /on'), ('ldir', 'dir /ad /on'),
- ('mkdir', 'mkdir'), ('rmdir', 'rmdir'),
- ('echo', 'echo'), ('ren', 'ren'), ('copy', 'copy'),
- ]
- else:
- default_aliases = []
-
- return default_aliases
-
-
-class AliasError(Exception):
- pass
-
-
-class InvalidAliasError(AliasError):
- pass
-
-class Alias(object):
- """Callable object storing the details of one alias.
-
- Instances are registered as magic functions to allow use of aliases.
- """
-
- # Prepare blacklist
- blacklist = {'cd','popd','pushd','dhist','alias','unalias'}
-
- def __init__(self, shell, name, cmd):
- self.shell = shell
- self.name = name
- self.cmd = cmd
- self.__doc__ = "Alias for `!{}`".format(cmd)
- self.nargs = self.validate()
-
- def validate(self):
- """Validate the alias, and return the number of arguments."""
- if self.name in self.blacklist:
- raise InvalidAliasError("The name %s can't be aliased "
- "because it is a keyword or builtin." % self.name)
- try:
- caller = self.shell.magics_manager.magics['line'][self.name]
- except KeyError:
- pass
- else:
- if not isinstance(caller, Alias):
- raise InvalidAliasError("The name %s can't be aliased "
- "because it is another magic command." % self.name)
-
- if not (isinstance(self.cmd, str)):
- raise InvalidAliasError("An alias command must be a string, "
- "got: %r" % self.cmd)
-
- nargs = self.cmd.count('%s') - self.cmd.count('%%s')
-
- if (nargs > 0) and (self.cmd.find('%l') >= 0):
- raise InvalidAliasError('The %s and %l specifiers are mutually '
- 'exclusive in alias definitions.')
-
- return nargs
-
- def __repr__(self):
- return "".format(self.name, self.cmd)
-
- def __call__(self, rest=''):
- cmd = self.cmd
- nargs = self.nargs
- # Expand the %l special to be the user's input line
- if cmd.find('%l') >= 0:
- cmd = cmd.replace('%l', rest)
- rest = ''
-
- if nargs==0:
- if cmd.find('%%s') >= 1:
- cmd = cmd.replace('%%s', '%s')
- # Simple, argument-less aliases
- cmd = '%s %s' % (cmd, rest)
- else:
- # Handle aliases with positional arguments
- args = rest.split(None, nargs)
- if len(args) < nargs:
- raise UsageError('Alias <%s> requires %s arguments, %s given.' %
- (self.name, nargs, len(args)))
- cmd = '%s %s' % (cmd % tuple(args[:nargs]),' '.join(args[nargs:]))
-
- self.shell.system(cmd)
-
-#-----------------------------------------------------------------------------
-# Main AliasManager class
-#-----------------------------------------------------------------------------
-
-class AliasManager(Configurable):
- default_aliases: List = List(default_aliases()).tag(config=True)
- user_aliases: List = List(default_value=[]).tag(config=True)
- shell = Instance(
- "IPython.core.interactiveshell.InteractiveShellABC", allow_none=True
- )
-
- def __init__(self, shell=None, **kwargs):
- super(AliasManager, self).__init__(shell=shell, **kwargs)
- # For convenient access
- if self.shell is not None:
- self.linemagics = self.shell.magics_manager.magics["line"]
- self.init_aliases()
-
- def init_aliases(self):
- # Load default & user aliases
- for name, cmd in self.default_aliases + self.user_aliases:
- if (
- cmd.startswith("ls ")
- and self.shell is not None
- and self.shell.colors == "NoColor"
- ):
- cmd = cmd.replace(" --color", "")
- self.soft_define_alias(name, cmd)
-
- @property
- def aliases(self):
- return [(n, func.cmd) for (n, func) in self.linemagics.items()
- if isinstance(func, Alias)]
-
- def soft_define_alias(self, name, cmd):
- """Define an alias, but don't raise on an AliasError."""
- try:
- self.define_alias(name, cmd)
- except AliasError as e:
- error("Invalid alias: %s" % e)
-
- def define_alias(self, name, cmd):
- """Define a new alias after validating it.
-
- This will raise an :exc:`AliasError` if there are validation
- problems.
- """
- caller = Alias(shell=self.shell, name=name, cmd=cmd)
- self.shell.magics_manager.register_function(caller, magic_kind='line',
- magic_name=name)
-
- def get_alias(self, name):
- """Return an alias, or None if no alias by that name exists."""
- aname = self.linemagics.get(name, None)
- return aname if isinstance(aname, Alias) else None
-
- def is_alias(self, name):
- """Return whether or not a given name has been defined as an alias"""
- return self.get_alias(name) is not None
-
- def undefine_alias(self, name):
- if self.is_alias(name):
- del self.linemagics[name]
- else:
- raise ValueError('%s is not an alias' % name)
-
- def clear_aliases(self):
- for name, _ in self.aliases:
- self.undefine_alias(name)
-
- def retrieve_alias(self, name):
- """Retrieve the command to which an alias expands."""
- caller = self.get_alias(name)
- if caller:
- return caller.cmd
- else:
- raise ValueError('%s is not an alias' % name)
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/application.py b/.venv/lib/python3.12/site-packages/IPython/core/application.py
deleted file mode 100644
index 841e867..0000000
--- a/.venv/lib/python3.12/site-packages/IPython/core/application.py
+++ /dev/null
@@ -1,492 +0,0 @@
-# encoding: utf-8
-"""
-An application for IPython.
-
-All top-level applications should use the classes in this module for
-handling configuration and creating configurables.
-
-The job of an :class:`Application` is to create the master configuration
-object and then create the configurable objects, passing the config to them.
-"""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-import atexit
-from copy import deepcopy
-import logging
-import os
-import shutil
-import sys
-
-from pathlib import Path
-
-from traitlets.config.application import Application, catch_config_error
-from traitlets.config.loader import ConfigFileNotFound, PyFileConfigLoader
-from IPython.core import release, crashhandler
-from IPython.core.profiledir import ProfileDir, ProfileDirError
-from IPython.paths import get_ipython_dir, get_ipython_package_dir
-from IPython.utils.path import ensure_dir_exists
-from traitlets import (
- List, Unicode, Type, Bool, Set, Instance, Undefined,
- default, observe,
-)
-
-if os.name == "nt":
- programdata = os.environ.get("PROGRAMDATA", None)
- if programdata is not None:
- SYSTEM_CONFIG_DIRS = [str(Path(programdata) / "ipython")]
- else: # PROGRAMDATA is not defined by default on XP.
- SYSTEM_CONFIG_DIRS = []
-else:
- SYSTEM_CONFIG_DIRS = [
- "/usr/local/etc/ipython",
- "/etc/ipython",
- ]
-
-
-ENV_CONFIG_DIRS = []
-_env_config_dir = os.path.join(sys.prefix, 'etc', 'ipython')
-if _env_config_dir not in SYSTEM_CONFIG_DIRS:
- # only add ENV_CONFIG if sys.prefix is not already included
- ENV_CONFIG_DIRS.append(_env_config_dir)
-
-
-_envvar = os.environ.get('IPYTHON_SUPPRESS_CONFIG_ERRORS')
-if _envvar in {None, ''}:
- IPYTHON_SUPPRESS_CONFIG_ERRORS = None
-else:
- if _envvar.lower() in {'1','true'}:
- IPYTHON_SUPPRESS_CONFIG_ERRORS = True
- elif _envvar.lower() in {'0','false'} :
- IPYTHON_SUPPRESS_CONFIG_ERRORS = False
- else:
- sys.exit("Unsupported value for environment variable: 'IPYTHON_SUPPRESS_CONFIG_ERRORS' is set to '%s' which is none of {'0', '1', 'false', 'true', ''}."% _envvar )
-
-# aliases and flags
-
-base_aliases = {}
-if isinstance(Application.aliases, dict):
- # traitlets 5
- base_aliases.update(Application.aliases)
-base_aliases.update(
- {
- "profile-dir": "ProfileDir.location",
- "profile": "BaseIPythonApplication.profile",
- "ipython-dir": "BaseIPythonApplication.ipython_dir",
- "log-level": "Application.log_level",
- "config": "BaseIPythonApplication.extra_config_file",
- }
-)
-
-base_flags = dict()
-if isinstance(Application.flags, dict):
- # traitlets 5
- base_flags.update(Application.flags)
-base_flags.update(
- dict(
- debug=(
- {"Application": {"log_level": logging.DEBUG}},
- "set log level to logging.DEBUG (maximize logging output)",
- ),
- quiet=(
- {"Application": {"log_level": logging.CRITICAL}},
- "set log level to logging.CRITICAL (minimize logging output)",
- ),
- init=(
- {
- "BaseIPythonApplication": {
- "copy_config_files": True,
- "auto_create": True,
- }
- },
- """Initialize profile with default config files. This is equivalent
- to running `ipython profile create ` prior to startup.
- """,
- ),
- )
-)
-
-
-class ProfileAwareConfigLoader(PyFileConfigLoader):
- """A Python file config loader that is aware of IPython profiles."""
- def load_subconfig(self, fname, path=None, profile=None):
- if profile is not None:
- try:
- profile_dir = ProfileDir.find_profile_dir_by_name(
- get_ipython_dir(),
- profile,
- )
- except ProfileDirError:
- return
- path = profile_dir.location
- return super(ProfileAwareConfigLoader, self).load_subconfig(fname, path=path)
-
-class BaseIPythonApplication(Application):
- name = "ipython"
- description = "IPython: an enhanced interactive Python shell."
- version = Unicode(release.version)
-
- aliases = base_aliases
- flags = base_flags
- classes = List([ProfileDir])
-
- # enable `load_subconfig('cfg.py', profile='name')`
- python_config_loader_class = ProfileAwareConfigLoader
-
- # Track whether the config_file has changed,
- # because some logic happens only if we aren't using the default.
- config_file_specified = Set()
-
- config_file_name = Unicode()
- @default('config_file_name')
- def _config_file_name_default(self):
- return self.name.replace('-','_') + u'_config.py'
- @observe('config_file_name')
- def _config_file_name_changed(self, change):
- if change['new'] != change['old']:
- self.config_file_specified.add(change['new'])
-
- # The directory that contains IPython's builtin profiles.
- builtin_profile_dir = Unicode(
- os.path.join(get_ipython_package_dir(), u'config', u'profile', u'default')
- )
-
- config_file_paths = List(Unicode())
- @default('config_file_paths')
- def _config_file_paths_default(self):
- return []
-
- extra_config_file = Unicode(
- help="""Path to an extra config file to load.
-
- If specified, load this config file in addition to any other IPython config.
- """).tag(config=True)
- @observe('extra_config_file')
- def _extra_config_file_changed(self, change):
- old = change['old']
- new = change['new']
- try:
- self.config_files.remove(old)
- except ValueError:
- pass
- self.config_file_specified.add(new)
- self.config_files.append(new)
-
- profile = Unicode(u'default',
- help="""The IPython profile to use."""
- ).tag(config=True)
-
- @observe('profile')
- def _profile_changed(self, change):
- self.builtin_profile_dir = os.path.join(
- get_ipython_package_dir(), u'config', u'profile', change['new']
- )
-
- add_ipython_dir_to_sys_path = Bool(
- False,
- """Should the IPython profile directory be added to sys path ?
-
- This option was non-existing before IPython 8.0, and ipython_dir was added to
- sys path to allow import of extensions present there. This was historical
- baggage from when pip did not exist. This now default to false,
- but can be set to true for legacy reasons.
- """,
- ).tag(config=True)
-
- ipython_dir = Unicode(
- help="""
- The name of the IPython directory. This directory is used for logging
- configuration (through profiles), history storage, etc. The default
- is usually $HOME/.ipython. This option can also be specified through
- the environment variable IPYTHONDIR.
- """
- ).tag(config=True)
- @default('ipython_dir')
- def _ipython_dir_default(self):
- d = get_ipython_dir()
- self._ipython_dir_changed({
- 'name': 'ipython_dir',
- 'old': d,
- 'new': d,
- })
- return d
-
- _in_init_profile_dir = False
-
- profile_dir = Instance(ProfileDir, allow_none=True)
-
- @default('profile_dir')
- def _profile_dir_default(self):
- # avoid recursion
- if self._in_init_profile_dir:
- return
- # profile_dir requested early, force initialization
- self.init_profile_dir()
- return self.profile_dir
-
- overwrite = Bool(False,
- help="""Whether to overwrite existing config files when copying"""
- ).tag(config=True)
-
- auto_create = Bool(False,
- help="""Whether to create profile dir if it doesn't exist"""
- ).tag(config=True)
-
- config_files = List(Unicode())
-
- @default('config_files')
- def _config_files_default(self):
- return [self.config_file_name]
-
- copy_config_files = Bool(False,
- help="""Whether to install the default config files into the profile dir.
- If a new profile is being created, and IPython contains config files for that
- profile, then they will be staged into the new directory. Otherwise,
- default config files will be automatically generated.
- """).tag(config=True)
-
- verbose_crash = Bool(False,
- help="""Create a massive crash report when IPython encounters what may be an
- internal error. The default is to append a short message to the
- usual traceback""").tag(config=True)
-
- # The class to use as the crash handler.
- crash_handler_class = Type(crashhandler.CrashHandler)
-
- @catch_config_error
- def __init__(self, **kwargs):
- super(BaseIPythonApplication, self).__init__(**kwargs)
- # ensure current working directory exists
- try:
- os.getcwd()
- except:
- # exit if cwd doesn't exist
- self.log.error("Current working directory doesn't exist.")
- self.exit(1)
-
- #-------------------------------------------------------------------------
- # Various stages of Application creation
- #-------------------------------------------------------------------------
-
- def init_crash_handler(self):
- """Create a crash handler, typically setting sys.excepthook to it."""
- self.crash_handler = self.crash_handler_class(self)
- sys.excepthook = self.excepthook
- def unset_crashhandler():
- sys.excepthook = sys.__excepthook__
- atexit.register(unset_crashhandler)
-
- def excepthook(self, etype, evalue, tb):
- """this is sys.excepthook after init_crashhandler
-
- set self.verbose_crash=True to use our full crashhandler, instead of
- a regular traceback with a short message (crash_handler_lite)
- """
-
- if self.verbose_crash:
- return self.crash_handler(etype, evalue, tb)
- else:
- return crashhandler.crash_handler_lite(etype, evalue, tb)
-
- @observe('ipython_dir')
- def _ipython_dir_changed(self, change):
- old = change['old']
- new = change['new']
- if old is not Undefined:
- str_old = os.path.abspath(old)
- if str_old in sys.path:
- sys.path.remove(str_old)
- if self.add_ipython_dir_to_sys_path:
- str_path = os.path.abspath(new)
- sys.path.append(str_path)
- ensure_dir_exists(new)
- readme = os.path.join(new, "README")
- readme_src = os.path.join(
- get_ipython_package_dir(), "config", "profile", "README"
- )
- if not os.path.exists(readme) and os.path.exists(readme_src):
- shutil.copy(readme_src, readme)
- for d in ("extensions", "nbextensions"):
- path = os.path.join(new, d)
- try:
- ensure_dir_exists(path)
- except OSError as e:
- # this will not be EEXIST
- self.log.error("couldn't create path %s: %s", path, e)
- self.log.debug("IPYTHONDIR set to: %s", new)
-
- def load_config_file(self, suppress_errors=IPYTHON_SUPPRESS_CONFIG_ERRORS):
- """Load the config file.
-
- By default, errors in loading config are handled, and a warning
- printed on screen. For testing, the suppress_errors option is set
- to False, so errors will make tests fail.
-
- `suppress_errors` default value is to be `None` in which case the
- behavior default to the one of `traitlets.Application`.
-
- The default value can be set :
- - to `False` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '0', or 'false' (case insensitive).
- - to `True` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '1' or 'true' (case insensitive).
- - to `None` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '' (empty string) or leaving it unset.
-
- Any other value are invalid, and will make IPython exit with a non-zero return code.
- """
-
-
- self.log.debug("Searching path %s for config files", self.config_file_paths)
- base_config = 'ipython_config.py'
- self.log.debug("Attempting to load config file: %s" %
- base_config)
- try:
- if suppress_errors is not None:
- old_value = Application.raise_config_file_errors
- Application.raise_config_file_errors = not suppress_errors;
- Application.load_config_file(
- self,
- base_config,
- path=self.config_file_paths
- )
- except ConfigFileNotFound:
- # ignore errors loading parent
- self.log.debug("Config file %s not found", base_config)
- pass
- if suppress_errors is not None:
- Application.raise_config_file_errors = old_value
-
- for config_file_name in self.config_files:
- if not config_file_name or config_file_name == base_config:
- continue
- self.log.debug("Attempting to load config file: %s" %
- self.config_file_name)
- try:
- Application.load_config_file(
- self,
- config_file_name,
- path=self.config_file_paths
- )
- except ConfigFileNotFound:
- # Only warn if the default config file was NOT being used.
- if config_file_name in self.config_file_specified:
- msg = self.log.warning
- else:
- msg = self.log.debug
- msg("Config file not found, skipping: %s", config_file_name)
- except Exception:
- # For testing purposes.
- if not suppress_errors:
- raise
- self.log.warning("Error loading config file: %s" %
- self.config_file_name, exc_info=True)
-
- def init_profile_dir(self):
- """initialize the profile dir"""
- self._in_init_profile_dir = True
- if self.profile_dir is not None:
- # already ran
- return
- if 'ProfileDir.location' not in self.config:
- # location not specified, find by profile name
- try:
- p = ProfileDir.find_profile_dir_by_name(self.ipython_dir, self.profile, self.config)
- except ProfileDirError:
- # not found, maybe create it (always create default profile)
- if self.auto_create or self.profile == 'default':
- try:
- p = ProfileDir.create_profile_dir_by_name(self.ipython_dir, self.profile, self.config)
- except ProfileDirError:
- self.log.fatal("Could not create profile: %r"%self.profile)
- self.exit(1)
- else:
- self.log.info("Created profile dir: %r"%p.location)
- else:
- self.log.fatal("Profile %r not found."%self.profile)
- self.exit(1)
- else:
- self.log.debug("Using existing profile dir: %r", p.location)
- else:
- location = self.config.ProfileDir.location
- # location is fully specified
- try:
- p = ProfileDir.find_profile_dir(location, self.config)
- except ProfileDirError:
- # not found, maybe create it
- if self.auto_create:
- try:
- p = ProfileDir.create_profile_dir(location, self.config)
- except ProfileDirError:
- self.log.fatal("Could not create profile directory: %r"%location)
- self.exit(1)
- else:
- self.log.debug("Creating new profile dir: %r"%location)
- else:
- self.log.fatal("Profile directory %r not found."%location)
- self.exit(1)
- else:
- self.log.debug("Using existing profile dir: %r", p.location)
- # if profile_dir is specified explicitly, set profile name
- dir_name = os.path.basename(p.location)
- if dir_name.startswith('profile_'):
- self.profile = dir_name[8:]
-
- self.profile_dir = p
- self.config_file_paths.append(p.location)
- self._in_init_profile_dir = False
-
- def init_config_files(self):
- """[optionally] copy default config files into profile dir."""
- self.config_file_paths.extend(ENV_CONFIG_DIRS)
- self.config_file_paths.extend(SYSTEM_CONFIG_DIRS)
- # copy config files
- path = Path(self.builtin_profile_dir)
- if self.copy_config_files:
- src = self.profile
-
- cfg = self.config_file_name
- if path and (path / cfg).exists():
- self.log.warning(
- "Staging %r from %s into %r [overwrite=%s]"
- % (cfg, src, self.profile_dir.location, self.overwrite)
- )
- self.profile_dir.copy_config_file(cfg, path=path, overwrite=self.overwrite)
- else:
- self.stage_default_config_file()
- else:
- # Still stage *bundled* config files, but not generated ones
- # This is necessary for `ipython profile=sympy` to load the profile
- # on the first go
- files = path.glob("*.py")
- for fullpath in files:
- cfg = fullpath.name
- if self.profile_dir.copy_config_file(cfg, path=path, overwrite=False):
- # file was copied
- self.log.warning("Staging bundled %s from %s into %r"%(
- cfg, self.profile, self.profile_dir.location)
- )
-
-
- def stage_default_config_file(self):
- """auto generate default config file, and stage it into the profile."""
- s = self.generate_config_file()
- config_file = Path(self.profile_dir.location) / self.config_file_name
- if self.overwrite or not config_file.exists():
- self.log.warning("Generating default config file: %r", (config_file))
- config_file.write_text(s, encoding="utf-8")
-
- @catch_config_error
- def initialize(self, argv=None):
- # don't hook up crash handler before parsing command-line
- self.parse_command_line(argv)
- self.init_crash_handler()
- if self.subapp is not None:
- # stop here if subapp is taking over
- return
- # save a copy of CLI config to re-load after config files
- # so that it has highest priority
- cl_config = deepcopy(self.config)
- self.init_profile_dir()
- self.init_config_files()
- self.load_config_file()
- # enforce cl-opts override configfile opts:
- self.update_config(cl_config)
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/async_helpers.py b/.venv/lib/python3.12/site-packages/IPython/core/async_helpers.py
deleted file mode 100644
index 4dfac54..0000000
--- a/.venv/lib/python3.12/site-packages/IPython/core/async_helpers.py
+++ /dev/null
@@ -1,155 +0,0 @@
-"""
-Async helper function that are invalid syntax on Python 3.5 and below.
-
-This code is best effort, and may have edge cases not behaving as expected. In
-particular it contain a number of heuristics to detect whether code is
-effectively async and need to run in an event loop or not.
-
-Some constructs (like top-level `return`, or `yield`) are taken care of
-explicitly to actually raise a SyntaxError and stay as close as possible to
-Python semantics.
-"""
-
-import ast
-import asyncio
-import inspect
-from functools import wraps
-
-_asyncio_event_loop = None
-
-
-def get_asyncio_loop():
- """asyncio has deprecated get_event_loop
-
- Replicate it here, with our desired semantics:
-
- - always returns a valid, not-closed loop
- - not thread-local like asyncio's,
- because we only want one loop for IPython
- - if called from inside a coroutine (e.g. in ipykernel),
- return the running loop
-
- .. versionadded:: 8.0
- """
- try:
- return asyncio.get_running_loop()
- except RuntimeError:
- # not inside a coroutine,
- # track our own global
- pass
-
- # not thread-local like asyncio's,
- # because we only track one event loop to run for IPython itself,
- # always in the main thread.
- global _asyncio_event_loop
- if _asyncio_event_loop is None or _asyncio_event_loop.is_closed():
- _asyncio_event_loop = asyncio.new_event_loop()
- return _asyncio_event_loop
-
-
-class _AsyncIORunner:
- def __call__(self, coro):
- """
- Handler for asyncio autoawait
- """
- return get_asyncio_loop().run_until_complete(coro)
-
- def __str__(self):
- return "asyncio"
-
-
-_asyncio_runner = _AsyncIORunner()
-
-
-class _AsyncIOProxy:
- """Proxy-object for an asyncio
-
- Any coroutine methods will be wrapped in event_loop.run_
- """
-
- def __init__(self, obj, event_loop):
- self._obj = obj
- self._event_loop = event_loop
-
- def __repr__(self):
- return f"<_AsyncIOProxy({self._obj!r})>"
-
- def __getattr__(self, key):
- attr = getattr(self._obj, key)
- if inspect.iscoroutinefunction(attr):
- # if it's a coroutine method,
- # return a threadsafe wrapper onto the _current_ asyncio loop
- @wraps(attr)
- def _wrapped(*args, **kwargs):
- concurrent_future = asyncio.run_coroutine_threadsafe(
- attr(*args, **kwargs), self._event_loop
- )
- return asyncio.wrap_future(concurrent_future)
-
- return _wrapped
- else:
- return attr
-
- def __dir__(self):
- return dir(self._obj)
-
-
-def _curio_runner(coroutine):
- """
- handler for curio autoawait
- """
- import curio
-
- return curio.run(coroutine)
-
-
-def _trio_runner(async_fn):
- import trio
-
- async def loc(coro):
- """
- We need the dummy no-op async def to protect from
- trio's internal. See https://github.com/python-trio/trio/issues/89
- """
- return await coro
-
- return trio.run(loc, async_fn)
-
-
-def _pseudo_sync_runner(coro):
- """
- A runner that does not really allow async execution, and just advance the coroutine.
-
- See discussion in https://github.com/python-trio/trio/issues/608,
-
- Credit to Nathaniel Smith
- """
- try:
- coro.send(None)
- except StopIteration as exc:
- return exc.value
- else:
- # TODO: do not raise but return an execution result with the right info.
- raise RuntimeError(
- "{coro_name!r} needs a real async loop".format(coro_name=coro.__name__)
- )
-
-
-def _should_be_async(cell: str) -> bool:
- """Detect if a block of code need to be wrapped in an `async def`
-
- Attempt to parse the block of code, it it compile we're fine.
- Otherwise we wrap if and try to compile.
-
- If it works, assume it should be async. Otherwise Return False.
-
- Not handled yet: If the block of code has a return statement as the top
- level, it will be seen as async. This is a know limitation.
- """
- try:
- code = compile(
- cell, "<>", "exec", flags=getattr(ast, "PyCF_ALLOW_TOP_LEVEL_AWAIT", 0x0)
- )
- return inspect.CO_COROUTINE & code.co_flags == inspect.CO_COROUTINE
- except (SyntaxError, MemoryError):
- return False
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/autocall.py b/.venv/lib/python3.12/site-packages/IPython/core/autocall.py
deleted file mode 100644
index 54beec3..0000000
--- a/.venv/lib/python3.12/site-packages/IPython/core/autocall.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# encoding: utf-8
-"""
-Autocall capabilities for IPython.core.
-
-Authors:
-
-* Brian Granger
-* Fernando Perez
-* Thomas Kluyver
-
-Notes
------
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2008-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-
-#-----------------------------------------------------------------------------
-# Code
-#-----------------------------------------------------------------------------
-
-class IPyAutocall(object):
- """ Instances of this class are always autocalled
-
- This happens regardless of 'autocall' variable state. Use this to
- develop macro-like mechanisms.
- """
- _ip = None
- rewrite = True
- def __init__(self, ip=None):
- self._ip = ip
-
- def set_ip(self, ip):
- """Will be used to set _ip point to current ipython instance b/f call
-
- Override this method if you don't want this to happen.
-
- """
- self._ip = ip
-
-
-class ExitAutocall(IPyAutocall):
- """An autocallable object which will be added to the user namespace so that
- exit, exit(), quit or quit() are all valid ways to close the shell."""
- rewrite = False
-
- def __call__(self):
- self._ip.ask_exit()
-
-class ZMQExitAutocall(ExitAutocall):
- """Exit IPython. Autocallable, so it needn't be explicitly called.
-
- Parameters
- ----------
- keep_kernel : bool
- If True, leave the kernel alive. Otherwise, tell the kernel to exit too
- (default).
- """
- def __call__(self, keep_kernel=False):
- self._ip.keepkernel_on_exit = keep_kernel
- self._ip.ask_exit()
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/builtin_trap.py b/.venv/lib/python3.12/site-packages/IPython/core/builtin_trap.py
deleted file mode 100644
index a8ea4ab..0000000
--- a/.venv/lib/python3.12/site-packages/IPython/core/builtin_trap.py
+++ /dev/null
@@ -1,86 +0,0 @@
-"""
-A context manager for managing things injected into :mod:`builtins`.
-"""
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-import builtins as builtin_mod
-
-from traitlets.config.configurable import Configurable
-
-from traitlets import Instance
-
-
-class __BuiltinUndefined(object): pass
-BuiltinUndefined = __BuiltinUndefined()
-
-class __HideBuiltin(object): pass
-HideBuiltin = __HideBuiltin()
-
-
-class BuiltinTrap(Configurable):
-
- shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
- allow_none=True)
-
- def __init__(self, shell=None):
- super(BuiltinTrap, self).__init__(shell=shell, config=None)
- self._orig_builtins = {}
- # We define this to track if a single BuiltinTrap is nested.
- # Only turn off the trap when the outermost call to __exit__ is made.
- self._nested_level = 0
- self.shell = shell
- # builtins we always add - if set to HideBuiltin, they will just
- # be removed instead of being replaced by something else
- self.auto_builtins = {'exit': HideBuiltin,
- 'quit': HideBuiltin,
- 'get_ipython': self.shell.get_ipython,
- }
-
- def __enter__(self):
- if self._nested_level == 0:
- self.activate()
- self._nested_level += 1
- # I return self, so callers can use add_builtin in a with clause.
- return self
-
- def __exit__(self, type, value, traceback):
- if self._nested_level == 1:
- self.deactivate()
- self._nested_level -= 1
- # Returning False will cause exceptions to propagate
- return False
-
- def add_builtin(self, key, value):
- """Add a builtin and save the original."""
- bdict = builtin_mod.__dict__
- orig = bdict.get(key, BuiltinUndefined)
- if value is HideBuiltin:
- if orig is not BuiltinUndefined: #same as 'key in bdict'
- self._orig_builtins[key] = orig
- del bdict[key]
- else:
- self._orig_builtins[key] = orig
- bdict[key] = value
-
- def remove_builtin(self, key, orig):
- """Remove an added builtin and re-set the original."""
- if orig is BuiltinUndefined:
- del builtin_mod.__dict__[key]
- else:
- builtin_mod.__dict__[key] = orig
-
- def activate(self):
- """Store ipython references in the __builtin__ namespace."""
-
- add_builtin = self.add_builtin
- for name, func in self.auto_builtins.items():
- add_builtin(name, func)
-
- def deactivate(self):
- """Remove any builtins which might have been added by add_builtins, or
- restore overwritten ones to their previous values."""
- remove_builtin = self.remove_builtin
- for key, val in self._orig_builtins.items():
- remove_builtin(key, val)
- self._orig_builtins.clear()
- self._builtins_added = False
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/compilerop.py b/.venv/lib/python3.12/site-packages/IPython/core/compilerop.py
deleted file mode 100644
index 7799a4f..0000000
--- a/.venv/lib/python3.12/site-packages/IPython/core/compilerop.py
+++ /dev/null
@@ -1,214 +0,0 @@
-"""Compiler tools with improved interactive support.
-
-Provides compilation machinery similar to codeop, but with caching support so
-we can provide interactive tracebacks.
-
-Authors
--------
-* Robert Kern
-* Fernando Perez
-* Thomas Kluyver
-"""
-
-# Note: though it might be more natural to name this module 'compiler', that
-# name is in the stdlib and name collisions with the stdlib tend to produce
-# weird problems (often with third-party tools).
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2010-2011 The IPython Development Team.
-#
-# Distributed under the terms of the BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-# Stdlib imports
-import __future__
-from ast import PyCF_ONLY_AST
-import codeop
-import functools
-import hashlib
-import linecache
-import operator
-import time
-from contextlib import contextmanager
-
-#-----------------------------------------------------------------------------
-# Constants
-#-----------------------------------------------------------------------------
-
-# Roughly equal to PyCF_MASK | PyCF_MASK_OBSOLETE as defined in pythonrun.h,
-# this is used as a bitmask to extract future-related code flags.
-PyCF_MASK = functools.reduce(operator.or_,
- (getattr(__future__, fname).compiler_flag
- for fname in __future__.all_feature_names))
-
-#-----------------------------------------------------------------------------
-# Local utilities
-#-----------------------------------------------------------------------------
-
-def code_name(code, number=0):
- """ Compute a (probably) unique name for code for caching.
-
- This now expects code to be unicode.
- """
- hash_digest = hashlib.sha1(code.encode("utf-8")).hexdigest()
- # Include the number and 12 characters of the hash in the name. It's
- # pretty much impossible that in a single session we'll have collisions
- # even with truncated hashes, and the full one makes tracebacks too long
- return ''.format(number, hash_digest[:12])
-
-#-----------------------------------------------------------------------------
-# Classes and functions
-#-----------------------------------------------------------------------------
-
-class CachingCompiler(codeop.Compile):
- """A compiler that caches code compiled from interactive statements.
- """
-
- def __init__(self):
- codeop.Compile.__init__(self)
-
- # Caching a dictionary { filename: execution_count } for nicely
- # rendered tracebacks. The filename corresponds to the filename
- # argument used for the builtins.compile function.
- self._filename_map = {}
-
- def ast_parse(self, source, filename='', symbol='exec'):
- """Parse code to an AST with the current compiler flags active.
-
- Arguments are exactly the same as ast.parse (in the standard library),
- and are passed to the built-in compile function."""
- return compile(source, filename, symbol, self.flags | PyCF_ONLY_AST, 1)
-
- def reset_compiler_flags(self):
- """Reset compiler flags to default state."""
- # This value is copied from codeop.Compile.__init__, so if that ever
- # changes, it will need to be updated.
- self.flags = codeop.PyCF_DONT_IMPLY_DEDENT
-
- @property
- def compiler_flags(self):
- """Flags currently active in the compilation process.
- """
- return self.flags
-
- def get_code_name(self, raw_code, transformed_code, number):
- """Compute filename given the code, and the cell number.
-
- Parameters
- ----------
- raw_code : str
- The raw cell code.
- transformed_code : str
- The executable Python source code to cache and compile.
- number : int
- A number which forms part of the code's name. Used for the execution
- counter.
-
- Returns
- -------
- The computed filename.
- """
- return code_name(transformed_code, number)
-
- def format_code_name(self, name):
- """Return a user-friendly label and name for a code block.
-
- Parameters
- ----------
- name : str
- The name for the code block returned from get_code_name
-
- Returns
- -------
- A (label, name) pair that can be used in tracebacks, or None if the default formatting should be used.
- """
- if name in self._filename_map:
- return "Cell", "In[%s]" % self._filename_map[name]
-
- def cache(self, transformed_code, number=0, raw_code=None):
- """Make a name for a block of code, and cache the code.
-
- Parameters
- ----------
- transformed_code : str
- The executable Python source code to cache and compile.
- number : int
- A number which forms part of the code's name. Used for the execution
- counter.
- raw_code : str
- The raw code before transformation, if None, set to `transformed_code`.
-
- Returns
- -------
- The name of the cached code (as a string). Pass this as the filename
- argument to compilation, so that tracebacks are correctly hooked up.
- """
- if raw_code is None:
- raw_code = transformed_code
-
- name = self.get_code_name(raw_code, transformed_code, number)
-
- # Save the execution count
- self._filename_map[name] = number
-
- # Since Python 2.5, setting mtime to `None` means the lines will
- # never be removed by `linecache.checkcache`. This means all the
- # monkeypatching has *never* been necessary, since this code was
- # only added in 2010, at which point IPython had already stopped
- # supporting Python 2.4.
- #
- # Note that `linecache.clearcache` and `linecache.updatecache` may
- # still remove our code from the cache, but those show explicit
- # intent, and we should not try to interfere. Normally the former
- # is never called except when out of memory, and the latter is only
- # called for lines *not* in the cache.
- entry = (
- len(transformed_code),
- None,
- [line + "\n" for line in transformed_code.splitlines()],
- name,
- )
- linecache.cache[name] = entry
- return name
-
- @contextmanager
- def extra_flags(self, flags):
- ## bits that we'll set to 1
- turn_on_bits = ~self.flags & flags
-
-
- self.flags = self.flags | flags
- try:
- yield
- finally:
- # turn off only the bits we turned on so that something like
- # __future__ that set flags stays.
- self.flags &= ~turn_on_bits
-
-
-def check_linecache_ipython(*args):
- """Deprecated since IPython 8.6. Call linecache.checkcache() directly.
-
- It was already not necessary to call this function directly. If no
- CachingCompiler had been created, this function would fail badly. If
- an instance had been created, this function would've been monkeypatched
- into place.
-
- As of IPython 8.6, the monkeypatching has gone away entirely. But there
- were still internal callers of this function, so maybe external callers
- also existed?
- """
- import warnings
-
- warnings.warn(
- "Deprecated Since IPython 8.6, Just call linecache.checkcache() directly.",
- DeprecationWarning,
- stacklevel=2,
- )
- linecache.checkcache()
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/completer.py b/.venv/lib/python3.12/site-packages/IPython/core/completer.py
deleted file mode 100644
index b39a922..0000000
--- a/.venv/lib/python3.12/site-packages/IPython/core/completer.py
+++ /dev/null
@@ -1,3421 +0,0 @@
-"""Completion for IPython.
-
-This module started as fork of the rlcompleter module in the Python standard
-library. The original enhancements made to rlcompleter have been sent
-upstream and were accepted as of Python 2.3,
-
-This module now support a wide variety of completion mechanism both available
-for normal classic Python code, as well as completer for IPython specific
-Syntax like magics.
-
-Latex and Unicode completion
-============================
-
-IPython and compatible frontends not only can complete your code, but can help
-you to input a wide range of characters. In particular we allow you to insert
-a unicode character using the tab completion mechanism.
-
-Forward latex/unicode completion
---------------------------------
-
-Forward completion allows you to easily type a unicode character using its latex
-name, or unicode long description. To do so type a backslash follow by the
-relevant name and press tab:
-
-
-Using latex completion:
-
-.. code::
-
- \\alpha
- α
-
-or using unicode completion:
-
-
-.. code::
-
- \\GREEK SMALL LETTER ALPHA
- α
-
-
-Only valid Python identifiers will complete. Combining characters (like arrow or
-dots) are also available, unlike latex they need to be put after the their
-counterpart that is to say, ``F\\\\vec`` is correct, not ``\\\\vecF``.
-
-Some browsers are known to display combining characters incorrectly.
-
-Backward latex completion
--------------------------
-
-It is sometime challenging to know how to type a character, if you are using
-IPython, or any compatible frontend you can prepend backslash to the character
-and press :kbd:`Tab` to expand it to its latex form.
-
-.. code::
-
- \\α
- \\alpha
-
-
-Both forward and backward completions can be deactivated by setting the
-:std:configtrait:`Completer.backslash_combining_completions` option to
-``False``.
-
-
-Experimental
-============
-
-Starting with IPython 6.0, this module can make use of the Jedi library to
-generate completions both using static analysis of the code, and dynamically
-inspecting multiple namespaces. Jedi is an autocompletion and static analysis
-for Python. The APIs attached to this new mechanism is unstable and will
-raise unless use in an :any:`provisionalcompleter` context manager.
-
-You will find that the following are experimental:
-
- - :any:`provisionalcompleter`
- - :any:`IPCompleter.completions`
- - :any:`Completion`
- - :any:`rectify_completions`
-
-.. note::
-
- better name for :any:`rectify_completions` ?
-
-We welcome any feedback on these new API, and we also encourage you to try this
-module in debug mode (start IPython with ``--Completer.debug=True``) in order
-to have extra logging information if :any:`jedi` is crashing, or if current
-IPython completer pending deprecations are returning results not yet handled
-by :any:`jedi`
-
-Using Jedi for tab completion allow snippets like the following to work without
-having to execute any code:
-
- >>> myvar = ['hello', 42]
- ... myvar[1].bi
-
-Tab completion will be able to infer that ``myvar[1]`` is a real number without
-executing almost any code unlike the deprecated :any:`IPCompleter.greedy`
-option.
-
-Be sure to update :any:`jedi` to the latest stable version or to try the
-current development version to get better completions.
-
-Matchers
-========
-
-All completions routines are implemented using unified *Matchers* API.
-The matchers API is provisional and subject to change without notice.
-
-The built-in matchers include:
-
-- :any:`IPCompleter.dict_key_matcher`: dictionary key completions,
-- :any:`IPCompleter.magic_matcher`: completions for magics,
-- :any:`IPCompleter.unicode_name_matcher`,
- :any:`IPCompleter.fwd_unicode_matcher`
- and :any:`IPCompleter.latex_name_matcher`: see `Forward latex/unicode completion`_,
-- :any:`back_unicode_name_matcher` and :any:`back_latex_name_matcher`: see `Backward latex completion`_,
-- :any:`IPCompleter.file_matcher`: paths to files and directories,
-- :any:`IPCompleter.python_func_kw_matcher` - function keywords,
-- :any:`IPCompleter.python_matches` - globals and attributes (v1 API),
-- ``IPCompleter.jedi_matcher`` - static analysis with Jedi,
-- :any:`IPCompleter.custom_completer_matcher` - pluggable completer with a default
- implementation in :any:`InteractiveShell` which uses IPython hooks system
- (`complete_command`) with string dispatch (including regular expressions).
- Differently to other matchers, ``custom_completer_matcher`` will not suppress
- Jedi results to match behaviour in earlier IPython versions.
-
-Custom matchers can be added by appending to ``IPCompleter.custom_matchers`` list.
-
-Matcher API
------------
-
-Simplifying some details, the ``Matcher`` interface can described as
-
-.. code-block::
-
- MatcherAPIv1 = Callable[[str], list[str]]
- MatcherAPIv2 = Callable[[CompletionContext], SimpleMatcherResult]
-
- Matcher = MatcherAPIv1 | MatcherAPIv2
-
-The ``MatcherAPIv1`` reflects the matcher API as available prior to IPython 8.6.0
-and remains supported as a simplest way for generating completions. This is also
-currently the only API supported by the IPython hooks system `complete_command`.
-
-To distinguish between matcher versions ``matcher_api_version`` attribute is used.
-More precisely, the API allows to omit ``matcher_api_version`` for v1 Matchers,
-and requires a literal ``2`` for v2 Matchers.
-
-Once the API stabilises future versions may relax the requirement for specifying
-``matcher_api_version`` by switching to :any:`functools.singledispatch`, therefore
-please do not rely on the presence of ``matcher_api_version`` for any purposes.
-
-Suppression of competing matchers
----------------------------------
-
-By default results from all matchers are combined, in the order determined by
-their priority. Matchers can request to suppress results from subsequent
-matchers by setting ``suppress`` to ``True`` in the ``MatcherResult``.
-
-When multiple matchers simultaneously request suppression, the results from of
-the matcher with higher priority will be returned.
-
-Sometimes it is desirable to suppress most but not all other matchers;
-this can be achieved by adding a set of identifiers of matchers which
-should not be suppressed to ``MatcherResult`` under ``do_not_suppress`` key.
-
-The suppression behaviour can is user-configurable via
-:std:configtrait:`IPCompleter.suppress_competing_matchers`.
-"""
-
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-#
-# Some of this code originated from rlcompleter in the Python standard library
-# Copyright (C) 2001 Python Software Foundation, www.python.org
-
-from __future__ import annotations
-import builtins as builtin_mod
-import enum
-import glob
-import inspect
-import itertools
-import keyword
-import ast
-import os
-import re
-import string
-import sys
-import tokenize
-import time
-import unicodedata
-import uuid
-import warnings
-from ast import literal_eval
-from collections import defaultdict
-from contextlib import contextmanager
-from dataclasses import dataclass
-from functools import cached_property, partial
-from types import SimpleNamespace
-from typing import (
- Iterable,
- Iterator,
- List,
- Tuple,
- Union,
- Any,
- Sequence,
- Dict,
- Optional,
- TYPE_CHECKING,
- Set,
- Sized,
- TypeVar,
- Literal,
-)
-
-from IPython.core.guarded_eval import guarded_eval, EvaluationContext
-from IPython.core.error import TryNext
-from IPython.core.inputtransformer2 import ESC_MAGIC
-from IPython.core.latex_symbols import latex_symbols, reverse_latex_symbol
-from IPython.core.oinspect import InspectColors
-from IPython.testing.skipdoctest import skip_doctest
-from IPython.utils import generics
-from IPython.utils.decorators import sphinx_options
-from IPython.utils.dir2 import dir2, get_real_method
-from IPython.utils.docs import GENERATING_DOCUMENTATION
-from IPython.utils.path import ensure_dir_exists
-from IPython.utils.process import arg_split
-from traitlets import (
- Bool,
- Enum,
- Int,
- List as ListTrait,
- Unicode,
- Dict as DictTrait,
- Union as UnionTrait,
- observe,
-)
-from traitlets.config.configurable import Configurable
-
-import __main__
-
-# skip module docstests
-__skip_doctest__ = True
-
-
-try:
- import jedi
- jedi.settings.case_insensitive_completion = False
- import jedi.api.helpers
- import jedi.api.classes
- JEDI_INSTALLED = True
-except ImportError:
- JEDI_INSTALLED = False
-
-
-if TYPE_CHECKING or GENERATING_DOCUMENTATION and sys.version_info >= (3, 11):
- from typing import cast
- from typing_extensions import TypedDict, NotRequired, Protocol, TypeAlias, TypeGuard
-else:
- from typing import Generic
-
- def cast(type_, obj):
- """Workaround for `TypeError: MatcherAPIv2() takes no arguments`"""
- return obj
-
- # do not require on runtime
- NotRequired = Tuple # requires Python >=3.11
- TypedDict = Dict # by extension of `NotRequired` requires 3.11 too
- Protocol = object # requires Python >=3.8
- TypeAlias = Any # requires Python >=3.10
- TypeGuard = Generic # requires Python >=3.10
-if GENERATING_DOCUMENTATION:
- from typing import TypedDict
-
-# -----------------------------------------------------------------------------
-# Globals
-#-----------------------------------------------------------------------------
-
-# ranges where we have most of the valid unicode names. We could be more finer
-# grained but is it worth it for performance While unicode have character in the
-# range 0, 0x110000, we seem to have name for about 10% of those. (131808 as I
-# write this). With below range we cover them all, with a density of ~67%
-# biggest next gap we consider only adds up about 1% density and there are 600
-# gaps that would need hard coding.
-_UNICODE_RANGES = [(32, 0x323B0), (0xE0001, 0xE01F0)]
-
-# Public API
-__all__ = ["Completer", "IPCompleter"]
-
-if sys.platform == 'win32':
- PROTECTABLES = ' '
-else:
- PROTECTABLES = ' ()[]{}?=\\|;:\'#*"^&'
-
-# Protect against returning an enormous number of completions which the frontend
-# may have trouble processing.
-MATCHES_LIMIT = 500
-
-# Completion type reported when no type can be inferred.
-_UNKNOWN_TYPE = ""
-
-# sentinel value to signal lack of a match
-not_found = object()
-
-class ProvisionalCompleterWarning(FutureWarning):
- """
- Exception raise by an experimental feature in this module.
-
- Wrap code in :any:`provisionalcompleter` context manager if you
- are certain you want to use an unstable feature.
- """
- pass
-
-warnings.filterwarnings('error', category=ProvisionalCompleterWarning)
-
-
-@skip_doctest
-@contextmanager
-def provisionalcompleter(action='ignore'):
- """
- This context manager has to be used in any place where unstable completer
- behavior and API may be called.
-
- >>> with provisionalcompleter():
- ... completer.do_experimental_things() # works
-
- >>> completer.do_experimental_things() # raises.
-
- .. note::
-
- Unstable
-
- By using this context manager you agree that the API in use may change
- without warning, and that you won't complain if they do so.
-
- You also understand that, if the API is not to your liking, you should report
- a bug to explain your use case upstream.
-
- We'll be happy to get your feedback, feature requests, and improvements on
- any of the unstable APIs!
- """
- with warnings.catch_warnings():
- warnings.filterwarnings(action, category=ProvisionalCompleterWarning)
- yield
-
-
-def has_open_quotes(s: str) -> Union[str, bool]:
- """Return whether a string has open quotes.
-
- This simply counts whether the number of quote characters of either type in
- the string is odd.
-
- Returns
- -------
- If there is an open quote, the quote character is returned. Else, return
- False.
- """
- # We check " first, then ', so complex cases with nested quotes will get
- # the " to take precedence.
- if s.count('"') % 2:
- return '"'
- elif s.count("'") % 2:
- return "'"
- else:
- return False
-
-
-def protect_filename(s: str, protectables: str = PROTECTABLES) -> str:
- """Escape a string to protect certain characters."""
- if set(s) & set(protectables):
- if sys.platform == "win32":
- return '"' + s + '"'
- else:
- return "".join(("\\" + c if c in protectables else c) for c in s)
- else:
- return s
-
-
-def expand_user(path:str) -> Tuple[str, bool, str]:
- """Expand ``~``-style usernames in strings.
-
- This is similar to :func:`os.path.expanduser`, but it computes and returns
- extra information that will be useful if the input was being used in
- computing completions, and you wish to return the completions with the
- original '~' instead of its expanded value.
-
- Parameters
- ----------
- path : str
- String to be expanded. If no ~ is present, the output is the same as the
- input.
-
- Returns
- -------
- newpath : str
- Result of ~ expansion in the input path.
- tilde_expand : bool
- Whether any expansion was performed or not.
- tilde_val : str
- The value that ~ was replaced with.
- """
- # Default values
- tilde_expand = False
- tilde_val = ''
- newpath = path
-
- if path.startswith('~'):
- tilde_expand = True
- rest = len(path)-1
- newpath = os.path.expanduser(path)
- if rest:
- tilde_val = newpath[:-rest]
- else:
- tilde_val = newpath
-
- return newpath, tilde_expand, tilde_val
-
-
-def compress_user(path:str, tilde_expand:bool, tilde_val:str) -> str:
- """Does the opposite of expand_user, with its outputs.
- """
- if tilde_expand:
- return path.replace(tilde_val, '~')
- else:
- return path
-
-
-def completions_sorting_key(word):
- """key for sorting completions
-
- This does several things:
-
- - Demote any completions starting with underscores to the end
- - Insert any %magic and %%cellmagic completions in the alphabetical order
- by their name
- """
- prio1, prio2 = 0, 0
-
- if word.startswith('__'):
- prio1 = 2
- elif word.startswith('_'):
- prio1 = 1
-
- if word.endswith('='):
- prio1 = -1
-
- if word.startswith('%%'):
- # If there's another % in there, this is something else, so leave it alone
- if "%" not in word[2:]:
- word = word[2:]
- prio2 = 2
- elif word.startswith('%'):
- if "%" not in word[1:]:
- word = word[1:]
- prio2 = 1
-
- return prio1, word, prio2
-
-
-class _FakeJediCompletion:
- """
- This is a workaround to communicate to the UI that Jedi has crashed and to
- report a bug. Will be used only id :any:`IPCompleter.debug` is set to true.
-
- Added in IPython 6.0 so should likely be removed for 7.0
-
- """
-
- def __init__(self, name):
-
- self.name = name
- self.complete = name
- self.type = 'crashed'
- self.name_with_symbols = name
- self.signature = ""
- self._origin = "fake"
- self.text = "crashed"
-
- def __repr__(self):
- return ''
-
-
-_JediCompletionLike = Union["jedi.api.Completion", _FakeJediCompletion]
-
-
-class Completion:
- """
- Completion object used and returned by IPython completers.
-
- .. warning::
-
- Unstable
-
- This function is unstable, API may change without warning.
- It will also raise unless use in proper context manager.
-
- This act as a middle ground :any:`Completion` object between the
- :any:`jedi.api.classes.Completion` object and the Prompt Toolkit completion
- object. While Jedi need a lot of information about evaluator and how the
- code should be ran/inspected, PromptToolkit (and other frontend) mostly
- need user facing information.
-
- - Which range should be replaced replaced by what.
- - Some metadata (like completion type), or meta information to displayed to
- the use user.
-
- For debugging purpose we can also store the origin of the completion (``jedi``,
- ``IPython.python_matches``, ``IPython.magics_matches``...).
- """
-
- __slots__ = ['start', 'end', 'text', 'type', 'signature', '_origin']
-
- def __init__(
- self,
- start: int,
- end: int,
- text: str,
- *,
- type: Optional[str] = None,
- _origin="",
- signature="",
- ) -> None:
- warnings.warn(
- "``Completion`` is a provisional API (as of IPython 6.0). "
- "It may change without warnings. "
- "Use in corresponding context manager.",
- category=ProvisionalCompleterWarning,
- stacklevel=2,
- )
-
- self.start = start
- self.end = end
- self.text = text
- self.type = type
- self.signature = signature
- self._origin = _origin
-
- def __repr__(self):
- return '' % \
- (self.start, self.end, self.text, self.type or '?', self.signature or '?')
-
- def __eq__(self, other) -> bool:
- """
- Equality and hash do not hash the type (as some completer may not be
- able to infer the type), but are use to (partially) de-duplicate
- completion.
-
- Completely de-duplicating completion is a bit tricker that just
- comparing as it depends on surrounding text, which Completions are not
- aware of.
- """
- return self.start == other.start and \
- self.end == other.end and \
- self.text == other.text
-
- def __hash__(self):
- return hash((self.start, self.end, self.text))
-
-
-class SimpleCompletion:
- """Completion item to be included in the dictionary returned by new-style Matcher (API v2).
-
- .. warning::
-
- Provisional
-
- This class is used to describe the currently supported attributes of
- simple completion items, and any additional implementation details
- should not be relied on. Additional attributes may be included in
- future versions, and meaning of text disambiguated from the current
- dual meaning of "text to insert" and "text to used as a label".
- """
-
- __slots__ = ["text", "type"]
-
- def __init__(self, text: str, *, type: Optional[str] = None):
- self.text = text
- self.type = type
-
- def __repr__(self):
- return f""
-
-
-class _MatcherResultBase(TypedDict):
- """Definition of dictionary to be returned by new-style Matcher (API v2)."""
-
- #: Suffix of the provided ``CompletionContext.token``, if not given defaults to full token.
- matched_fragment: NotRequired[str]
-
- #: Whether to suppress results from all other matchers (True), some
- #: matchers (set of identifiers) or none (False); default is False.
- suppress: NotRequired[Union[bool, Set[str]]]
-
- #: Identifiers of matchers which should NOT be suppressed when this matcher
- #: requests to suppress all other matchers; defaults to an empty set.
- do_not_suppress: NotRequired[Set[str]]
-
- #: Are completions already ordered and should be left as-is? default is False.
- ordered: NotRequired[bool]
-
-
-@sphinx_options(show_inherited_members=True, exclude_inherited_from=["dict"])
-class SimpleMatcherResult(_MatcherResultBase, TypedDict):
- """Result of new-style completion matcher."""
-
- # note: TypedDict is added again to the inheritance chain
- # in order to get __orig_bases__ for documentation
-
- #: List of candidate completions
- completions: Sequence[SimpleCompletion] | Iterator[SimpleCompletion]
-
-
-class _JediMatcherResult(_MatcherResultBase):
- """Matching result returned by Jedi (will be processed differently)"""
-
- #: list of candidate completions
- completions: Iterator[_JediCompletionLike]
-
-
-AnyMatcherCompletion = Union[_JediCompletionLike, SimpleCompletion]
-AnyCompletion = TypeVar("AnyCompletion", AnyMatcherCompletion, Completion)
-
-
-@dataclass
-class CompletionContext:
- """Completion context provided as an argument to matchers in the Matcher API v2."""
-
- # rationale: many legacy matchers relied on completer state (`self.text_until_cursor`)
- # which was not explicitly visible as an argument of the matcher, making any refactor
- # prone to errors; by explicitly passing `cursor_position` we can decouple the matchers
- # from the completer, and make substituting them in sub-classes easier.
-
- #: Relevant fragment of code directly preceding the cursor.
- #: The extraction of token is implemented via splitter heuristic
- #: (following readline behaviour for legacy reasons), which is user configurable
- #: (by switching the greedy mode).
- token: str
-
- #: The full available content of the editor or buffer
- full_text: str
-
- #: Cursor position in the line (the same for ``full_text`` and ``text``).
- cursor_position: int
-
- #: Cursor line in ``full_text``.
- cursor_line: int
-
- #: The maximum number of completions that will be used downstream.
- #: Matchers can use this information to abort early.
- #: The built-in Jedi matcher is currently excepted from this limit.
- # If not given, return all possible completions.
- limit: Optional[int]
-
- @cached_property
- def text_until_cursor(self) -> str:
- return self.line_with_cursor[: self.cursor_position]
-
- @cached_property
- def line_with_cursor(self) -> str:
- return self.full_text.split("\n")[self.cursor_line]
-
-
-#: Matcher results for API v2.
-MatcherResult = Union[SimpleMatcherResult, _JediMatcherResult]
-
-
-class _MatcherAPIv1Base(Protocol):
- def __call__(self, text: str) -> List[str]:
- """Call signature."""
- ...
-
- #: Used to construct the default matcher identifier
- __qualname__: str
-
-
-class _MatcherAPIv1Total(_MatcherAPIv1Base, Protocol):
- #: API version
- matcher_api_version: Optional[Literal[1]]
-
- def __call__(self, text: str) -> List[str]:
- """Call signature."""
- ...
-
-
-#: Protocol describing Matcher API v1.
-MatcherAPIv1: TypeAlias = Union[_MatcherAPIv1Base, _MatcherAPIv1Total]
-
-
-class MatcherAPIv2(Protocol):
- """Protocol describing Matcher API v2."""
-
- #: API version
- matcher_api_version: Literal[2] = 2
-
- def __call__(self, context: CompletionContext) -> MatcherResult:
- """Call signature."""
- ...
-
- #: Used to construct the default matcher identifier
- __qualname__: str
-
-
-Matcher: TypeAlias = Union[MatcherAPIv1, MatcherAPIv2]
-
-
-def _is_matcher_v1(matcher: Matcher) -> TypeGuard[MatcherAPIv1]:
- api_version = _get_matcher_api_version(matcher)
- return api_version == 1
-
-
-def _is_matcher_v2(matcher: Matcher) -> TypeGuard[MatcherAPIv2]:
- api_version = _get_matcher_api_version(matcher)
- return api_version == 2
-
-
-def _is_sizable(value: Any) -> TypeGuard[Sized]:
- """Determines whether objects is sizable"""
- return hasattr(value, "__len__")
-
-
-def _is_iterator(value: Any) -> TypeGuard[Iterator]:
- """Determines whether objects is sizable"""
- return hasattr(value, "__next__")
-
-
-def has_any_completions(result: MatcherResult) -> bool:
- """Check if any result includes any completions."""
- completions = result["completions"]
- if _is_sizable(completions):
- return len(completions) != 0
- if _is_iterator(completions):
- try:
- old_iterator = completions
- first = next(old_iterator)
- result["completions"] = cast(
- Iterator[SimpleCompletion],
- itertools.chain([first], old_iterator),
- )
- return True
- except StopIteration:
- return False
- raise ValueError(
- "Completions returned by matcher need to be an Iterator or a Sizable"
- )
-
-
-def completion_matcher(
- *,
- priority: Optional[float] = None,
- identifier: Optional[str] = None,
- api_version: int = 1,
-) -> Callable[[Matcher], Matcher]:
- """Adds attributes describing the matcher.
-
- Parameters
- ----------
- priority : Optional[float]
- The priority of the matcher, determines the order of execution of matchers.
- Higher priority means that the matcher will be executed first. Defaults to 0.
- identifier : Optional[str]
- identifier of the matcher allowing users to modify the behaviour via traitlets,
- and also used to for debugging (will be passed as ``origin`` with the completions).
-
- Defaults to matcher function's ``__qualname__`` (for example,
- ``IPCompleter.file_matcher`` for the built-in matched defined
- as a ``file_matcher`` method of the ``IPCompleter`` class).
- api_version: Optional[int]
- version of the Matcher API used by this matcher.
- Currently supported values are 1 and 2.
- Defaults to 1.
- """
-
- def wrapper(func: Matcher):
- func.matcher_priority = priority or 0 # type: ignore
- func.matcher_identifier = identifier or func.__qualname__ # type: ignore
- func.matcher_api_version = api_version # type: ignore
- if TYPE_CHECKING:
- if api_version == 1:
- func = cast(MatcherAPIv1, func)
- elif api_version == 2:
- func = cast(MatcherAPIv2, func)
- return func
-
- return wrapper
-
-
-def _get_matcher_priority(matcher: Matcher):
- return getattr(matcher, "matcher_priority", 0)
-
-
-def _get_matcher_id(matcher: Matcher):
- return getattr(matcher, "matcher_identifier", matcher.__qualname__)
-
-
-def _get_matcher_api_version(matcher):
- return getattr(matcher, "matcher_api_version", 1)
-
-
-context_matcher = partial(completion_matcher, api_version=2)
-
-
-_IC = Iterable[Completion]
-
-
-def _deduplicate_completions(text: str, completions: _IC)-> _IC:
- """
- Deduplicate a set of completions.
-
- .. warning::
-
- Unstable
-
- This function is unstable, API may change without warning.
-
- Parameters
- ----------
- text : str
- text that should be completed.
- completions : Iterator[Completion]
- iterator over the completions to deduplicate
-
- Yields
- ------
- `Completions` objects
- Completions coming from multiple sources, may be different but end up having
- the same effect when applied to ``text``. If this is the case, this will
- consider completions as equal and only emit the first encountered.
- Not folded in `completions()` yet for debugging purpose, and to detect when
- the IPython completer does return things that Jedi does not, but should be
- at some point.
- """
- completions = list(completions)
- if not completions:
- return
-
- new_start = min(c.start for c in completions)
- new_end = max(c.end for c in completions)
-
- seen = set()
- for c in completions:
- new_text = text[new_start:c.start] + c.text + text[c.end:new_end]
- if new_text not in seen:
- yield c
- seen.add(new_text)
-
-
-def rectify_completions(text: str, completions: _IC, *, _debug: bool = False) -> _IC:
- """
- Rectify a set of completions to all have the same ``start`` and ``end``
-
- .. warning::
-
- Unstable
-
- This function is unstable, API may change without warning.
- It will also raise unless use in proper context manager.
-
- Parameters
- ----------
- text : str
- text that should be completed.
- completions : Iterator[Completion]
- iterator over the completions to rectify
- _debug : bool
- Log failed completion
-
- Notes
- -----
- :any:`jedi.api.classes.Completion` s returned by Jedi may not have the same start and end, though
- the Jupyter Protocol requires them to behave like so. This will readjust
- the completion to have the same ``start`` and ``end`` by padding both
- extremities with surrounding text.
-
- During stabilisation should support a ``_debug`` option to log which
- completion are return by the IPython completer and not found in Jedi in
- order to make upstream bug report.
- """
- warnings.warn("`rectify_completions` is a provisional API (as of IPython 6.0). "
- "It may change without warnings. "
- "Use in corresponding context manager.",
- category=ProvisionalCompleterWarning, stacklevel=2)
-
- completions = list(completions)
- if not completions:
- return
- starts = (c.start for c in completions)
- ends = (c.end for c in completions)
-
- new_start = min(starts)
- new_end = max(ends)
-
- seen_jedi = set()
- seen_python_matches = set()
- for c in completions:
- new_text = text[new_start:c.start] + c.text + text[c.end:new_end]
- if c._origin == 'jedi':
- seen_jedi.add(new_text)
- elif c._origin == "IPCompleter.python_matcher":
- seen_python_matches.add(new_text)
- yield Completion(new_start, new_end, new_text, type=c.type, _origin=c._origin, signature=c.signature)
- diff = seen_python_matches.difference(seen_jedi)
- if diff and _debug:
- print('IPython.python matches have extras:', diff)
-
-
-if sys.platform == 'win32':
- DELIMS = ' \t\n`!@#$^&*()=+[{]}|;\'",<>?'
-else:
- DELIMS = ' \t\n`!@#$^&*()=+[{]}\\|;:\'",<>?'
-
-GREEDY_DELIMS = ' =\r\n'
-
-
-class CompletionSplitter(object):
- """An object to split an input line in a manner similar to readline.
-
- By having our own implementation, we can expose readline-like completion in
- a uniform manner to all frontends. This object only needs to be given the
- line of text to be split and the cursor position on said line, and it
- returns the 'word' to be completed on at the cursor after splitting the
- entire line.
-
- What characters are used as splitting delimiters can be controlled by
- setting the ``delims`` attribute (this is a property that internally
- automatically builds the necessary regular expression)"""
-
- # Private interface
-
- # A string of delimiter characters. The default value makes sense for
- # IPython's most typical usage patterns.
- _delims = DELIMS
-
- # The expression (a normal string) to be compiled into a regular expression
- # for actual splitting. We store it as an attribute mostly for ease of
- # debugging, since this type of code can be so tricky to debug.
- _delim_expr = None
-
- # The regular expression that does the actual splitting
- _delim_re = None
-
- def __init__(self, delims=None):
- delims = CompletionSplitter._delims if delims is None else delims
- self.delims = delims
-
- @property
- def delims(self):
- """Return the string of delimiter characters."""
- return self._delims
-
- @delims.setter
- def delims(self, delims):
- """Set the delimiters for line splitting."""
- expr = '[' + ''.join('\\'+ c for c in delims) + ']'
- self._delim_re = re.compile(expr)
- self._delims = delims
- self._delim_expr = expr
-
- def split_line(self, line, cursor_pos=None):
- """Split a line of text with a cursor at the given position.
- """
- cut_line = line if cursor_pos is None else line[:cursor_pos]
- return self._delim_re.split(cut_line)[-1]
-
-
-
-class Completer(Configurable):
-
- greedy = Bool(
- False,
- help="""Activate greedy completion.
-
- .. deprecated:: 8.8
- Use :std:configtrait:`Completer.evaluation` and :std:configtrait:`Completer.auto_close_dict_keys` instead.
-
- When enabled in IPython 8.8 or newer, changes configuration as follows:
-
- - ``Completer.evaluation = 'unsafe'``
- - ``Completer.auto_close_dict_keys = True``
- """,
- ).tag(config=True)
-
- evaluation = Enum(
- ("forbidden", "minimal", "limited", "unsafe", "dangerous"),
- default_value="limited",
- help="""Policy for code evaluation under completion.
-
- Successive options allow to enable more eager evaluation for better
- completion suggestions, including for nested dictionaries, nested lists,
- or even results of function calls.
- Setting ``unsafe`` or higher can lead to evaluation of arbitrary user
- code on :kbd:`Tab` with potentially unwanted or dangerous side effects.
-
- Allowed values are:
-
- - ``forbidden``: no evaluation of code is permitted,
- - ``minimal``: evaluation of literals and access to built-in namespace;
- no item/attribute evaluationm no access to locals/globals,
- no evaluation of any operations or comparisons.
- - ``limited``: access to all namespaces, evaluation of hard-coded methods
- (for example: :any:`dict.keys`, :any:`object.__getattr__`,
- :any:`object.__getitem__`) on allow-listed objects (for example:
- :any:`dict`, :any:`list`, :any:`tuple`, ``pandas.Series``),
- - ``unsafe``: evaluation of all methods and function calls but not of
- syntax with side-effects like `del x`,
- - ``dangerous``: completely arbitrary evaluation.
- """,
- ).tag(config=True)
-
- use_jedi = Bool(default_value=JEDI_INSTALLED,
- help="Experimental: Use Jedi to generate autocompletions. "
- "Default to True if jedi is installed.").tag(config=True)
-
- jedi_compute_type_timeout = Int(default_value=400,
- help="""Experimental: restrict time (in milliseconds) during which Jedi can compute types.
- Set to 0 to stop computing types. Non-zero value lower than 100ms may hurt
- performance by preventing jedi to build its cache.
- """).tag(config=True)
-
- debug = Bool(default_value=False,
- help='Enable debug for the Completer. Mostly print extra '
- 'information for experimental jedi integration.')\
- .tag(config=True)
-
- backslash_combining_completions = Bool(True,
- help="Enable unicode completions, e.g. \\alpha . "
- "Includes completion of latex commands, unicode names, and expanding "
- "unicode characters back to latex commands.").tag(config=True)
-
- auto_close_dict_keys = Bool(
- False,
- help="""
- Enable auto-closing dictionary keys.
-
- When enabled string keys will be suffixed with a final quote
- (matching the opening quote), tuple keys will also receive a
- separating comma if needed, and keys which are final will
- receive a closing bracket (``]``).
- """,
- ).tag(config=True)
-
- def __init__(self, namespace=None, global_namespace=None, **kwargs):
- """Create a new completer for the command line.
-
- Completer(namespace=ns, global_namespace=ns2) -> completer instance.
-
- If unspecified, the default namespace where completions are performed
- is __main__ (technically, __main__.__dict__). Namespaces should be
- given as dictionaries.
-
- An optional second namespace can be given. This allows the completer
- to handle cases where both the local and global scopes need to be
- distinguished.
- """
-
- # Don't bind to namespace quite yet, but flag whether the user wants a
- # specific namespace or to use __main__.__dict__. This will allow us
- # to bind to __main__.__dict__ at completion time, not now.
- if namespace is None:
- self.use_main_ns = True
- else:
- self.use_main_ns = False
- self.namespace = namespace
-
- # The global namespace, if given, can be bound directly
- if global_namespace is None:
- self.global_namespace = {}
- else:
- self.global_namespace = global_namespace
-
- self.custom_matchers = []
-
- super(Completer, self).__init__(**kwargs)
-
- def complete(self, text, state):
- """Return the next possible completion for 'text'.
-
- This is called successively with state == 0, 1, 2, ... until it
- returns None. The completion should begin with 'text'.
-
- """
- if self.use_main_ns:
- self.namespace = __main__.__dict__
-
- if state == 0:
- if "." in text:
- self.matches = self.attr_matches(text)
- else:
- self.matches = self.global_matches(text)
- try:
- return self.matches[state]
- except IndexError:
- return None
-
- def global_matches(self, text):
- """Compute matches when text is a simple name.
-
- Return a list of all keywords, built-in functions and names currently
- defined in self.namespace or self.global_namespace that match.
-
- """
- matches = []
- match_append = matches.append
- n = len(text)
- for lst in [
- keyword.kwlist,
- builtin_mod.__dict__.keys(),
- list(self.namespace.keys()),
- list(self.global_namespace.keys()),
- ]:
- for word in lst:
- if word[:n] == text and word != "__builtins__":
- match_append(word)
-
- snake_case_re = re.compile(r"[^_]+(_[^_]+)+?\Z")
- for lst in [list(self.namespace.keys()), list(self.global_namespace.keys())]:
- shortened = {
- "_".join([sub[0] for sub in word.split("_")]): word
- for word in lst
- if snake_case_re.match(word)
- }
- for word in shortened.keys():
- if word[:n] == text and word != "__builtins__":
- match_append(shortened[word])
- return matches
-
- def attr_matches(self, text):
- """Compute matches when text contains a dot.
-
- Assuming the text is of the form NAME.NAME....[NAME], and is
- evaluatable in self.namespace or self.global_namespace, it will be
- evaluated and its attributes (as revealed by dir()) are used as
- possible completions. (For class instances, class members are
- also considered.)
-
- WARNING: this can still invoke arbitrary C code, if an object
- with a __getattr__ hook is evaluated.
-
- """
- return self._attr_matches(text)[0]
-
- # we simple attribute matching with normal identifiers.
- _ATTR_MATCH_RE = re.compile(r"(.+)\.(\w*)$")
-
- def _attr_matches(
- self, text: str, include_prefix: bool = True
- ) -> Tuple[Sequence[str], str]:
- m2 = self._ATTR_MATCH_RE.match(self.line_buffer)
- if not m2:
- return [], ""
- expr, attr = m2.group(1, 2)
-
- obj = self._evaluate_expr(expr)
-
- if obj is not_found:
- return [], ""
-
- if self.limit_to__all__ and hasattr(obj, '__all__'):
- words = get__all__entries(obj)
- else:
- words = dir2(obj)
-
- try:
- words = generics.complete_object(obj, words)
- except TryNext:
- pass
- except AssertionError:
- raise
- except Exception:
- # Silence errors from completion function
- pass
- # Build match list to return
- n = len(attr)
-
- # Note: ideally we would just return words here and the prefix
- # reconciliator would know that we intend to append to rather than
- # replace the input text; this requires refactoring to return range
- # which ought to be replaced (as does jedi).
- if include_prefix:
- tokens = _parse_tokens(expr)
- rev_tokens = reversed(tokens)
- skip_over = {tokenize.ENDMARKER, tokenize.NEWLINE}
- name_turn = True
-
- parts = []
- for token in rev_tokens:
- if token.type in skip_over:
- continue
- if token.type == tokenize.NAME and name_turn:
- parts.append(token.string)
- name_turn = False
- elif (
- token.type == tokenize.OP and token.string == "." and not name_turn
- ):
- parts.append(token.string)
- name_turn = True
- else:
- # short-circuit if not empty nor name token
- break
-
- prefix_after_space = "".join(reversed(parts))
- else:
- prefix_after_space = ""
-
- return (
- ["%s.%s" % (prefix_after_space, w) for w in words if w[:n] == attr],
- "." + attr,
- )
-
- def _trim_expr(self, code: str) -> str:
- """
- Trim the code until it is a valid expression and not a tuple;
-
- return the trimmed expression for guarded_eval.
- """
- while code:
- code = code[1:]
- try:
- res = ast.parse(code)
- except SyntaxError:
- continue
-
- assert res is not None
- if len(res.body) != 1:
- continue
- expr = res.body[0].value
- if isinstance(expr, ast.Tuple) and not code[-1] == ")":
- # we skip implicit tuple, like when trimming `fun(a,b`
- # as `a,b` would be a tuple, and we actually expect to get only `b`
- continue
- return code
- return ""
-
- def _evaluate_expr(self, expr):
- obj = not_found
- done = False
- while not done and expr:
- try:
- obj = guarded_eval(
- expr,
- EvaluationContext(
- globals=self.global_namespace,
- locals=self.namespace,
- evaluation=self.evaluation,
- ),
- )
- done = True
- except Exception as e:
- if self.debug:
- print("Evaluation exception", e)
- # trim the expression to remove any invalid prefix
- # e.g. user starts `(d[`, so we get `expr = '(d'`,
- # where parenthesis is not closed.
- # TODO: make this faster by reusing parts of the computation?
- expr = self._trim_expr(expr)
- return obj
-
-def get__all__entries(obj):
- """returns the strings in the __all__ attribute"""
- try:
- words = getattr(obj, '__all__')
- except Exception:
- return []
-
- return [w for w in words if isinstance(w, str)]
-
-
-class _DictKeyState(enum.Flag):
- """Represent state of the key match in context of other possible matches.
-
- - given `d1 = {'a': 1}` completion on `d1['` will yield `{'a': END_OF_ITEM}` as there is no tuple.
- - given `d2 = {('a', 'b'): 1}`: `d2['a', '` will yield `{'b': END_OF_TUPLE}` as there is no tuple members to add beyond `'b'`.
- - given `d3 = {('a', 'b'): 1}`: `d3['` will yield `{'a': IN_TUPLE}` as `'a'` can be added.
- - given `d4 = {'a': 1, ('a', 'b'): 2}`: `d4['` will yield `{'a': END_OF_ITEM & END_OF_TUPLE}`
- """
-
- BASELINE = 0
- END_OF_ITEM = enum.auto()
- END_OF_TUPLE = enum.auto()
- IN_TUPLE = enum.auto()
-
-
-def _parse_tokens(c):
- """Parse tokens even if there is an error."""
- tokens = []
- token_generator = tokenize.generate_tokens(iter(c.splitlines()).__next__)
- while True:
- try:
- tokens.append(next(token_generator))
- except tokenize.TokenError:
- return tokens
- except StopIteration:
- return tokens
-
-
-def _match_number_in_dict_key_prefix(prefix: str) -> Union[str, None]:
- """Match any valid Python numeric literal in a prefix of dictionary keys.
-
- References:
- - https://docs.python.org/3/reference/lexical_analysis.html#numeric-literals
- - https://docs.python.org/3/library/tokenize.html
- """
- if prefix[-1].isspace():
- # if user typed a space we do not have anything to complete
- # even if there was a valid number token before
- return None
- tokens = _parse_tokens(prefix)
- rev_tokens = reversed(tokens)
- skip_over = {tokenize.ENDMARKER, tokenize.NEWLINE}
- number = None
- for token in rev_tokens:
- if token.type in skip_over:
- continue
- if number is None:
- if token.type == tokenize.NUMBER:
- number = token.string
- continue
- else:
- # we did not match a number
- return None
- if token.type == tokenize.OP:
- if token.string == ",":
- break
- if token.string in {"+", "-"}:
- number = token.string + number
- else:
- return None
- return number
-
-
-_INT_FORMATS = {
- "0b": bin,
- "0o": oct,
- "0x": hex,
-}
-
-
-def match_dict_keys(
- keys: List[Union[str, bytes, Tuple[Union[str, bytes], ...]]],
- prefix: str,
- delims: str,
- extra_prefix: Optional[Tuple[Union[str, bytes], ...]] = None,
-) -> Tuple[str, int, Dict[str, _DictKeyState]]:
- """Used by dict_key_matches, matching the prefix to a list of keys
-
- Parameters
- ----------
- keys
- list of keys in dictionary currently being completed.
- prefix
- Part of the text already typed by the user. E.g. `mydict[b'fo`
- delims
- String of delimiters to consider when finding the current key.
- extra_prefix : optional
- Part of the text already typed in multi-key index cases. E.g. for
- `mydict['foo', "bar", 'b`, this would be `('foo', 'bar')`.
-
- Returns
- -------
- A tuple of three elements: ``quote``, ``token_start``, ``matched``, with
- ``quote`` being the quote that need to be used to close current string.
- ``token_start`` the position where the replacement should start occurring,
- ``matches`` a dictionary of replacement/completion keys on keys and values
- indicating whether the state.
- """
- prefix_tuple = extra_prefix if extra_prefix else ()
-
- prefix_tuple_size = sum(
- [
- # for pandas, do not count slices as taking space
- not isinstance(k, slice)
- for k in prefix_tuple
- ]
- )
- text_serializable_types = (str, bytes, int, float, slice)
-
- def filter_prefix_tuple(key):
- # Reject too short keys
- if len(key) <= prefix_tuple_size:
- return False
- # Reject keys which cannot be serialised to text
- for k in key:
- if not isinstance(k, text_serializable_types):
- return False
- # Reject keys that do not match the prefix
- for k, pt in zip(key, prefix_tuple):
- if k != pt and not isinstance(pt, slice):
- return False
- # All checks passed!
- return True
-
- filtered_key_is_final: Dict[Union[str, bytes, int, float], _DictKeyState] = (
- defaultdict(lambda: _DictKeyState.BASELINE)
- )
-
- for k in keys:
- # If at least one of the matches is not final, mark as undetermined.
- # This can happen with `d = {111: 'b', (111, 222): 'a'}` where
- # `111` appears final on first match but is not final on the second.
-
- if isinstance(k, tuple):
- if filter_prefix_tuple(k):
- key_fragment = k[prefix_tuple_size]
- filtered_key_is_final[key_fragment] |= (
- _DictKeyState.END_OF_TUPLE
- if len(k) == prefix_tuple_size + 1
- else _DictKeyState.IN_TUPLE
- )
- elif prefix_tuple_size > 0:
- # we are completing a tuple but this key is not a tuple,
- # so we should ignore it
- pass
- else:
- if isinstance(k, text_serializable_types):
- filtered_key_is_final[k] |= _DictKeyState.END_OF_ITEM
-
- filtered_keys = filtered_key_is_final.keys()
-
- if not prefix:
- return "", 0, {repr(k): v for k, v in filtered_key_is_final.items()}
-
- quote_match = re.search("(?:\"|')", prefix)
- is_user_prefix_numeric = False
-
- if quote_match:
- quote = quote_match.group()
- valid_prefix = prefix + quote
- try:
- prefix_str = literal_eval(valid_prefix)
- except Exception:
- return "", 0, {}
- else:
- # If it does not look like a string, let's assume
- # we are dealing with a number or variable.
- number_match = _match_number_in_dict_key_prefix(prefix)
-
- # We do not want the key matcher to suggest variable names so we yield:
- if number_match is None:
- # The alternative would be to assume that user forgort the quote
- # and if the substring matches, suggest adding it at the start.
- return "", 0, {}
-
- prefix_str = number_match
- is_user_prefix_numeric = True
- quote = ""
-
- pattern = '[^' + ''.join('\\' + c for c in delims) + ']*$'
- token_match = re.search(pattern, prefix, re.UNICODE)
- assert token_match is not None # silence mypy
- token_start = token_match.start()
- token_prefix = token_match.group()
-
- matched: Dict[str, _DictKeyState] = {}
-
- str_key: Union[str, bytes]
-
- for key in filtered_keys:
- if isinstance(key, (int, float)):
- # User typed a number but this key is not a number.
- if not is_user_prefix_numeric:
- continue
- str_key = str(key)
- if isinstance(key, int):
- int_base = prefix_str[:2].lower()
- # if user typed integer using binary/oct/hex notation:
- if int_base in _INT_FORMATS:
- int_format = _INT_FORMATS[int_base]
- str_key = int_format(key)
- else:
- # User typed a string but this key is a number.
- if is_user_prefix_numeric:
- continue
- str_key = key
- try:
- if not str_key.startswith(prefix_str):
- continue
- except (AttributeError, TypeError, UnicodeError):
- # Python 3+ TypeError on b'a'.startswith('a') or vice-versa
- continue
-
- # reformat remainder of key to begin with prefix
- rem = str_key[len(prefix_str) :]
- # force repr wrapped in '
- rem_repr = repr(rem + '"') if isinstance(rem, str) else repr(rem + b'"')
- rem_repr = rem_repr[1 + rem_repr.index("'"):-2]
- if quote == '"':
- # The entered prefix is quoted with ",
- # but the match is quoted with '.
- # A contained " hence needs escaping for comparison:
- rem_repr = rem_repr.replace('"', '\\"')
-
- # then reinsert prefix from start of token
- match = "%s%s" % (token_prefix, rem_repr)
-
- matched[match] = filtered_key_is_final[key]
- return quote, token_start, matched
-
-
-def cursor_to_position(text:str, line:int, column:int)->int:
- """
- Convert the (line,column) position of the cursor in text to an offset in a
- string.
-
- Parameters
- ----------
- text : str
- The text in which to calculate the cursor offset
- line : int
- Line of the cursor; 0-indexed
- column : int
- Column of the cursor 0-indexed
-
- Returns
- -------
- Position of the cursor in ``text``, 0-indexed.
-
- See Also
- --------
- position_to_cursor : reciprocal of this function
-
- """
- lines = text.split('\n')
- assert line <= len(lines), '{} <= {}'.format(str(line), str(len(lines)))
-
- return sum(len(line) + 1 for line in lines[:line]) + column
-
-def position_to_cursor(text:str, offset:int)->Tuple[int, int]:
- """
- Convert the position of the cursor in text (0 indexed) to a line
- number(0-indexed) and a column number (0-indexed) pair
-
- Position should be a valid position in ``text``.
-
- Parameters
- ----------
- text : str
- The text in which to calculate the cursor offset
- offset : int
- Position of the cursor in ``text``, 0-indexed.
-
- Returns
- -------
- (line, column) : (int, int)
- Line of the cursor; 0-indexed, column of the cursor 0-indexed
-
- See Also
- --------
- cursor_to_position : reciprocal of this function
-
- """
-
- assert 0 <= offset <= len(text) , "0 <= %s <= %s" % (offset , len(text))
-
- before = text[:offset]
- blines = before.split('\n') # ! splitnes trim trailing \n
- line = before.count('\n')
- col = len(blines[-1])
- return line, col
-
-
-def _safe_isinstance(obj, module, class_name, *attrs):
- """Checks if obj is an instance of module.class_name if loaded
- """
- if module in sys.modules:
- m = sys.modules[module]
- for attr in [class_name, *attrs]:
- m = getattr(m, attr)
- return isinstance(obj, m)
-
-
-@context_matcher()
-def back_unicode_name_matcher(context: CompletionContext):
- """Match Unicode characters back to Unicode name
-
- Same as :any:`back_unicode_name_matches`, but adopted to new Matcher API.
- """
- fragment, matches = back_unicode_name_matches(context.text_until_cursor)
- return _convert_matcher_v1_result_to_v2(
- matches, type="unicode", fragment=fragment, suppress_if_matches=True
- )
-
-
-def back_unicode_name_matches(text: str) -> Tuple[str, Sequence[str]]:
- """Match Unicode characters back to Unicode name
-
- This does ``☃`` -> ``\\snowman``
-
- Note that snowman is not a valid python3 combining character but will be expanded.
- Though it will not recombine back to the snowman character by the completion machinery.
-
- This will not either back-complete standard sequences like \\n, \\b ...
-
- .. deprecated:: 8.6
- You can use :meth:`back_unicode_name_matcher` instead.
-
- Returns
- =======
-
- Return a tuple with two elements:
-
- - The Unicode character that was matched (preceded with a backslash), or
- empty string,
- - a sequence (of 1), name for the match Unicode character, preceded by
- backslash, or empty if no match.
- """
- if len(text)<2:
- return '', ()
- maybe_slash = text[-2]
- if maybe_slash != '\\':
- return '', ()
-
- char = text[-1]
- # no expand on quote for completion in strings.
- # nor backcomplete standard ascii keys
- if char in string.ascii_letters or char in ('"',"'"):
- return '', ()
- try :
- unic = unicodedata.name(char)
- return '\\'+char,('\\'+unic,)
- except KeyError:
- pass
- return '', ()
-
-
-@context_matcher()
-def back_latex_name_matcher(context: CompletionContext):
- """Match latex characters back to unicode name
-
- Same as :any:`back_latex_name_matches`, but adopted to new Matcher API.
- """
- fragment, matches = back_latex_name_matches(context.text_until_cursor)
- return _convert_matcher_v1_result_to_v2(
- matches, type="latex", fragment=fragment, suppress_if_matches=True
- )
-
-
-def back_latex_name_matches(text: str) -> Tuple[str, Sequence[str]]:
- """Match latex characters back to unicode name
-
- This does ``\\ℵ`` -> ``\\aleph``
-
- .. deprecated:: 8.6
- You can use :meth:`back_latex_name_matcher` instead.
- """
- if len(text)<2:
- return '', ()
- maybe_slash = text[-2]
- if maybe_slash != '\\':
- return '', ()
-
-
- char = text[-1]
- # no expand on quote for completion in strings.
- # nor backcomplete standard ascii keys
- if char in string.ascii_letters or char in ('"',"'"):
- return '', ()
- try :
- latex = reverse_latex_symbol[char]
- # '\\' replace the \ as well
- return '\\'+char,[latex]
- except KeyError:
- pass
- return '', ()
-
-
-def _formatparamchildren(parameter) -> str:
- """
- Get parameter name and value from Jedi Private API
-
- Jedi does not expose a simple way to get `param=value` from its API.
-
- Parameters
- ----------
- parameter
- Jedi's function `Param`
-
- Returns
- -------
- A string like 'a', 'b=1', '*args', '**kwargs'
-
- """
- description = parameter.description
- if not description.startswith('param '):
- raise ValueError('Jedi function parameter description have change format.'
- 'Expected "param ...", found %r".' % description)
- return description[6:]
-
-def _make_signature(completion)-> str:
- """
- Make the signature from a jedi completion
-
- Parameters
- ----------
- completion : jedi.Completion
- object does not complete a function type
-
- Returns
- -------
- a string consisting of the function signature, with the parenthesis but
- without the function name. example:
- `(a, *args, b=1, **kwargs)`
-
- """
-
- # it looks like this might work on jedi 0.17
- if hasattr(completion, 'get_signatures'):
- signatures = completion.get_signatures()
- if not signatures:
- return '(?)'
-
- c0 = completion.get_signatures()[0]
- return '('+c0.to_string().split('(', maxsplit=1)[1]
-
- return '(%s)'% ', '.join([f for f in (_formatparamchildren(p) for signature in completion.get_signatures()
- for p in signature.defined_names()) if f])
-
-
-_CompleteResult = Dict[str, MatcherResult]
-
-
-DICT_MATCHER_REGEX = re.compile(
- r"""(?x)
-( # match dict-referring - or any get item object - expression
- .+
-)
-\[ # open bracket
-\s* # and optional whitespace
-# Capture any number of serializable objects (e.g. "a", "b", 'c')
-# and slices
-((?:(?:
- (?: # closed string
- [uUbB]? # string prefix (r not handled)
- (?:
- '(?:[^']|(? SimpleMatcherResult:
- """Utility to help with transition"""
- result = {
- "completions": [SimpleCompletion(text=match, type=type) for match in matches],
- "suppress": (True if matches else False) if suppress_if_matches else False,
- }
- if fragment is not None:
- result["matched_fragment"] = fragment
- return cast(SimpleMatcherResult, result)
-
-
-class IPCompleter(Completer):
- """Extension of the completer class with IPython-specific features"""
-
- @observe('greedy')
- def _greedy_changed(self, change):
- """update the splitter and readline delims when greedy is changed"""
- if change["new"]:
- self.evaluation = "unsafe"
- self.auto_close_dict_keys = True
- self.splitter.delims = GREEDY_DELIMS
- else:
- self.evaluation = "limited"
- self.auto_close_dict_keys = False
- self.splitter.delims = DELIMS
-
- dict_keys_only = Bool(
- False,
- help="""
- Whether to show dict key matches only.
-
- (disables all matchers except for `IPCompleter.dict_key_matcher`).
- """,
- )
-
- suppress_competing_matchers = UnionTrait(
- [Bool(allow_none=True), DictTrait(Bool(None, allow_none=True))],
- default_value=None,
- help="""
- Whether to suppress completions from other *Matchers*.
-
- When set to ``None`` (default) the matchers will attempt to auto-detect
- whether suppression of other matchers is desirable. For example, at
- the beginning of a line followed by `%` we expect a magic completion
- to be the only applicable option, and after ``my_dict['`` we usually
- expect a completion with an existing dictionary key.
-
- If you want to disable this heuristic and see completions from all matchers,
- set ``IPCompleter.suppress_competing_matchers = False``.
- To disable the heuristic for specific matchers provide a dictionary mapping:
- ``IPCompleter.suppress_competing_matchers = {'IPCompleter.dict_key_matcher': False}``.
-
- Set ``IPCompleter.suppress_competing_matchers = True`` to limit
- completions to the set of matchers with the highest priority;
- this is equivalent to ``IPCompleter.merge_completions`` and
- can be beneficial for performance, but will sometimes omit relevant
- candidates from matchers further down the priority list.
- """,
- ).tag(config=True)
-
- merge_completions = Bool(
- True,
- help="""Whether to merge completion results into a single list
-
- If False, only the completion results from the first non-empty
- completer will be returned.
-
- As of version 8.6.0, setting the value to ``False`` is an alias for:
- ``IPCompleter.suppress_competing_matchers = True.``.
- """,
- ).tag(config=True)
-
- disable_matchers = ListTrait(
- Unicode(),
- help="""List of matchers to disable.
-
- The list should contain matcher identifiers (see :any:`completion_matcher`).
- """,
- ).tag(config=True)
-
- omit__names = Enum(
- (0, 1, 2),
- default_value=2,
- help="""Instruct the completer to omit private method names
-
- Specifically, when completing on ``object.``.
-
- When 2 [default]: all names that start with '_' will be excluded.
-
- When 1: all 'magic' names (``__foo__``) will be excluded.
-
- When 0: nothing will be excluded.
- """
- ).tag(config=True)
- limit_to__all__ = Bool(False,
- help="""
- DEPRECATED as of version 5.0.
-
- Instruct the completer to use __all__ for the completion
-
- Specifically, when completing on ``object.``.
-
- When True: only those names in obj.__all__ will be included.
-
- When False [default]: the __all__ attribute is ignored
- """,
- ).tag(config=True)
-
- profile_completions = Bool(
- default_value=False,
- help="If True, emit profiling data for completion subsystem using cProfile."
- ).tag(config=True)
-
- profiler_output_dir = Unicode(
- default_value=".completion_profiles",
- help="Template for path at which to output profile data for completions."
- ).tag(config=True)
-
- @observe('limit_to__all__')
- def _limit_to_all_changed(self, change):
- warnings.warn('`IPython.core.IPCompleter.limit_to__all__` configuration '
- 'value has been deprecated since IPython 5.0, will be made to have '
- 'no effects and then removed in future version of IPython.',
- UserWarning)
-
- def __init__(
- self, shell=None, namespace=None, global_namespace=None, config=None, **kwargs
- ):
- """IPCompleter() -> completer
-
- Return a completer object.
-
- Parameters
- ----------
- shell
- a pointer to the ipython shell itself. This is needed
- because this completer knows about magic functions, and those can
- only be accessed via the ipython instance.
- namespace : dict, optional
- an optional dict where completions are performed.
- global_namespace : dict, optional
- secondary optional dict for completions, to
- handle cases (such as IPython embedded inside functions) where
- both Python scopes are visible.
- config : Config
- traitlet's config object
- **kwargs
- passed to super class unmodified.
- """
-
- self.magic_escape = ESC_MAGIC
- self.splitter = CompletionSplitter()
-
- # _greedy_changed() depends on splitter and readline being defined:
- super().__init__(
- namespace=namespace,
- global_namespace=global_namespace,
- config=config,
- **kwargs,
- )
-
- # List where completion matches will be stored
- self.matches = []
- self.shell = shell
- # Regexp to split filenames with spaces in them
- self.space_name_re = re.compile(r'([^\\] )')
- # Hold a local ref. to glob.glob for speed
- self.glob = glob.glob
-
- # Determine if we are running on 'dumb' terminals, like (X)Emacs
- # buffers, to avoid completion problems.
- term = os.environ.get('TERM','xterm')
- self.dumb_terminal = term in ['dumb','emacs']
-
- # Special handling of backslashes needed in win32 platforms
- if sys.platform == "win32":
- self.clean_glob = self._clean_glob_win32
- else:
- self.clean_glob = self._clean_glob
-
- #regexp to parse docstring for function signature
- self.docstring_sig_re = re.compile(r'^[\w|\s.]+\(([^)]*)\).*')
- self.docstring_kwd_re = re.compile(r'[\s|\[]*(\w+)(?:\s*=\s*.*)')
- #use this if positional argument name is also needed
- #= re.compile(r'[\s|\[]*(\w+)(?:\s*=?\s*.*)')
-
- self.magic_arg_matchers = [
- self.magic_config_matcher,
- self.magic_color_matcher,
- ]
-
- # This is set externally by InteractiveShell
- self.custom_completers = None
-
- # This is a list of names of unicode characters that can be completed
- # into their corresponding unicode value. The list is large, so we
- # lazily initialize it on first use. Consuming code should access this
- # attribute through the `@unicode_names` property.
- self._unicode_names = None
-
- self._backslash_combining_matchers = [
- self.latex_name_matcher,
- self.unicode_name_matcher,
- back_latex_name_matcher,
- back_unicode_name_matcher,
- self.fwd_unicode_matcher,
- ]
-
- if not self.backslash_combining_completions:
- for matcher in self._backslash_combining_matchers:
- self.disable_matchers.append(_get_matcher_id(matcher))
-
- if not self.merge_completions:
- self.suppress_competing_matchers = True
-
- @property
- def matchers(self) -> List[Matcher]:
- """All active matcher routines for completion"""
- if self.dict_keys_only:
- return [self.dict_key_matcher]
-
- if self.use_jedi:
- return [
- *self.custom_matchers,
- *self._backslash_combining_matchers,
- *self.magic_arg_matchers,
- self.custom_completer_matcher,
- self.magic_matcher,
- self._jedi_matcher,
- self.dict_key_matcher,
- self.file_matcher,
- ]
- else:
- return [
- *self.custom_matchers,
- *self._backslash_combining_matchers,
- *self.magic_arg_matchers,
- self.custom_completer_matcher,
- self.dict_key_matcher,
- self.magic_matcher,
- self.python_matcher,
- self.file_matcher,
- self.python_func_kw_matcher,
- ]
-
- def all_completions(self, text:str) -> List[str]:
- """
- Wrapper around the completion methods for the benefit of emacs.
- """
- prefix = text.rpartition('.')[0]
- with provisionalcompleter():
- return ['.'.join([prefix, c.text]) if prefix and self.use_jedi else c.text
- for c in self.completions(text, len(text))]
-
- return self.complete(text)[1]
-
- def _clean_glob(self, text:str):
- return self.glob("%s*" % text)
-
- def _clean_glob_win32(self, text:str):
- return [f.replace("\\","/")
- for f in self.glob("%s*" % text)]
-
- @context_matcher()
- def file_matcher(self, context: CompletionContext) -> SimpleMatcherResult:
- """Same as :any:`file_matches`, but adopted to new Matcher API."""
- matches = self.file_matches(context.token)
- # TODO: add a heuristic for suppressing (e.g. if it has OS-specific delimiter,
- # starts with `/home/`, `C:\`, etc)
- return _convert_matcher_v1_result_to_v2(matches, type="path")
-
- def file_matches(self, text: str) -> List[str]:
- """Match filenames, expanding ~USER type strings.
-
- Most of the seemingly convoluted logic in this completer is an
- attempt to handle filenames with spaces in them. And yet it's not
- quite perfect, because Python's readline doesn't expose all of the
- GNU readline details needed for this to be done correctly.
-
- For a filename with a space in it, the printed completions will be
- only the parts after what's already been typed (instead of the
- full completions, as is normally done). I don't think with the
- current (as of Python 2.3) Python readline it's possible to do
- better.
-
- .. deprecated:: 8.6
- You can use :meth:`file_matcher` instead.
- """
-
- # chars that require escaping with backslash - i.e. chars
- # that readline treats incorrectly as delimiters, but we
- # don't want to treat as delimiters in filename matching
- # when escaped with backslash
- if text.startswith('!'):
- text = text[1:]
- text_prefix = u'!'
- else:
- text_prefix = u''
-
- text_until_cursor = self.text_until_cursor
- # track strings with open quotes
- open_quotes = has_open_quotes(text_until_cursor)
-
- if '(' in text_until_cursor or '[' in text_until_cursor:
- lsplit = text
- else:
- try:
- # arg_split ~ shlex.split, but with unicode bugs fixed by us
- lsplit = arg_split(text_until_cursor)[-1]
- except ValueError:
- # typically an unmatched ", or backslash without escaped char.
- if open_quotes:
- lsplit = text_until_cursor.split(open_quotes)[-1]
- else:
- return []
- except IndexError:
- # tab pressed on empty line
- lsplit = ""
-
- if not open_quotes and lsplit != protect_filename(lsplit):
- # if protectables are found, do matching on the whole escaped name
- has_protectables = True
- text0,text = text,lsplit
- else:
- has_protectables = False
- text = os.path.expanduser(text)
-
- if text == "":
- return [text_prefix + protect_filename(f) for f in self.glob("*")]
-
- # Compute the matches from the filesystem
- if sys.platform == 'win32':
- m0 = self.clean_glob(text)
- else:
- m0 = self.clean_glob(text.replace('\\', ''))
-
- if has_protectables:
- # If we had protectables, we need to revert our changes to the
- # beginning of filename so that we don't double-write the part
- # of the filename we have so far
- len_lsplit = len(lsplit)
- matches = [text_prefix + text0 +
- protect_filename(f[len_lsplit:]) for f in m0]
- else:
- if open_quotes:
- # if we have a string with an open quote, we don't need to
- # protect the names beyond the quote (and we _shouldn't_, as
- # it would cause bugs when the filesystem call is made).
- matches = m0 if sys.platform == "win32" else\
- [protect_filename(f, open_quotes) for f in m0]
- else:
- matches = [text_prefix +
- protect_filename(f) for f in m0]
-
- # Mark directories in input list by appending '/' to their names.
- return [x+'/' if os.path.isdir(x) else x for x in matches]
-
- @context_matcher()
- def magic_matcher(self, context: CompletionContext) -> SimpleMatcherResult:
- """Match magics."""
- text = context.token
- matches = self.magic_matches(text)
- result = _convert_matcher_v1_result_to_v2(matches, type="magic")
- is_magic_prefix = len(text) > 0 and text[0] == "%"
- result["suppress"] = is_magic_prefix and bool(result["completions"])
- return result
-
- def magic_matches(self, text: str) -> List[str]:
- """Match magics.
-
- .. deprecated:: 8.6
- You can use :meth:`magic_matcher` instead.
- """
- # Get all shell magics now rather than statically, so magics loaded at
- # runtime show up too.
- lsm = self.shell.magics_manager.lsmagic()
- line_magics = lsm['line']
- cell_magics = lsm['cell']
- pre = self.magic_escape
- pre2 = pre+pre
-
- explicit_magic = text.startswith(pre)
-
- # Completion logic:
- # - user gives %%: only do cell magics
- # - user gives %: do both line and cell magics
- # - no prefix: do both
- # In other words, line magics are skipped if the user gives %% explicitly
- #
- # We also exclude magics that match any currently visible names:
- # https://github.com/ipython/ipython/issues/4877, unless the user has
- # typed a %:
- # https://github.com/ipython/ipython/issues/10754
- bare_text = text.lstrip(pre)
- global_matches = self.global_matches(bare_text)
- if not explicit_magic:
- def matches(magic):
- """
- Filter magics, in particular remove magics that match
- a name present in global namespace.
- """
- return ( magic.startswith(bare_text) and
- magic not in global_matches )
- else:
- def matches(magic):
- return magic.startswith(bare_text)
-
- comp = [ pre2+m for m in cell_magics if matches(m)]
- if not text.startswith(pre2):
- comp += [ pre+m for m in line_magics if matches(m)]
-
- return comp
-
- @context_matcher()
- def magic_config_matcher(self, context: CompletionContext) -> SimpleMatcherResult:
- """Match class names and attributes for %config magic."""
- # NOTE: uses `line_buffer` equivalent for compatibility
- matches = self.magic_config_matches(context.line_with_cursor)
- return _convert_matcher_v1_result_to_v2(matches, type="param")
-
- def magic_config_matches(self, text: str) -> List[str]:
- """Match class names and attributes for %config magic.
-
- .. deprecated:: 8.6
- You can use :meth:`magic_config_matcher` instead.
- """
- texts = text.strip().split()
-
- if len(texts) > 0 and (texts[0] == 'config' or texts[0] == '%config'):
- # get all configuration classes
- classes = sorted(set([ c for c in self.shell.configurables
- if c.__class__.class_traits(config=True)
- ]), key=lambda x: x.__class__.__name__)
- classnames = [ c.__class__.__name__ for c in classes ]
-
- # return all classnames if config or %config is given
- if len(texts) == 1:
- return classnames
-
- # match classname
- classname_texts = texts[1].split('.')
- classname = classname_texts[0]
- classname_matches = [ c for c in classnames
- if c.startswith(classname) ]
-
- # return matched classes or the matched class with attributes
- if texts[1].find('.') < 0:
- return classname_matches
- elif len(classname_matches) == 1 and \
- classname_matches[0] == classname:
- cls = classes[classnames.index(classname)].__class__
- help = cls.class_get_help()
- # strip leading '--' from cl-args:
- help = re.sub(re.compile(r'^--', re.MULTILINE), '', help)
- return [ attr.split('=')[0]
- for attr in help.strip().splitlines()
- if attr.startswith(texts[1]) ]
- return []
-
- @context_matcher()
- def magic_color_matcher(self, context: CompletionContext) -> SimpleMatcherResult:
- """Match color schemes for %colors magic."""
- # NOTE: uses `line_buffer` equivalent for compatibility
- matches = self.magic_color_matches(context.line_with_cursor)
- return _convert_matcher_v1_result_to_v2(matches, type="param")
-
- def magic_color_matches(self, text: str) -> List[str]:
- """Match color schemes for %colors magic.
-
- .. deprecated:: 8.6
- You can use :meth:`magic_color_matcher` instead.
- """
- texts = text.split()
- if text.endswith(' '):
- # .split() strips off the trailing whitespace. Add '' back
- # so that: '%colors ' -> ['%colors', '']
- texts.append('')
-
- if len(texts) == 2 and (texts[0] == 'colors' or texts[0] == '%colors'):
- prefix = texts[1]
- return [ color for color in InspectColors.keys()
- if color.startswith(prefix) ]
- return []
-
- @context_matcher(identifier="IPCompleter.jedi_matcher")
- def _jedi_matcher(self, context: CompletionContext) -> _JediMatcherResult:
- matches = self._jedi_matches(
- cursor_column=context.cursor_position,
- cursor_line=context.cursor_line,
- text=context.full_text,
- )
- return {
- "completions": matches,
- # static analysis should not suppress other matchers
- "suppress": False,
- }
-
- def _jedi_matches(
- self, cursor_column: int, cursor_line: int, text: str
- ) -> Iterator[_JediCompletionLike]:
- """
- Return a list of :any:`jedi.api.Completion`\\s object from a ``text`` and
- cursor position.
-
- Parameters
- ----------
- cursor_column : int
- column position of the cursor in ``text``, 0-indexed.
- cursor_line : int
- line position of the cursor in ``text``, 0-indexed
- text : str
- text to complete
-
- Notes
- -----
- If ``IPCompleter.debug`` is ``True`` may return a :any:`_FakeJediCompletion`
- object containing a string with the Jedi debug information attached.
-
- .. deprecated:: 8.6
- You can use :meth:`_jedi_matcher` instead.
- """
- namespaces = [self.namespace]
- if self.global_namespace is not None:
- namespaces.append(self.global_namespace)
-
- completion_filter = lambda x:x
- offset = cursor_to_position(text, cursor_line, cursor_column)
- # filter output if we are completing for object members
- if offset:
- pre = text[offset-1]
- if pre == '.':
- if self.omit__names == 2:
- completion_filter = lambda c:not c.name.startswith('_')
- elif self.omit__names == 1:
- completion_filter = lambda c:not (c.name.startswith('__') and c.name.endswith('__'))
- elif self.omit__names == 0:
- completion_filter = lambda x:x
- else:
- raise ValueError("Don't understand self.omit__names == {}".format(self.omit__names))
-
- interpreter = jedi.Interpreter(text[:offset], namespaces)
- try_jedi = True
-
- try:
- # find the first token in the current tree -- if it is a ' or " then we are in a string
- completing_string = False
- try:
- first_child = next(c for c in interpreter._get_module().tree_node.children if hasattr(c, 'value'))
- except StopIteration:
- pass
- else:
- # note the value may be ', ", or it may also be ''' or """, or
- # in some cases, """what/you/typed..., but all of these are
- # strings.
- completing_string = len(first_child.value) > 0 and first_child.value[0] in {"'", '"'}
-
- # if we are in a string jedi is likely not the right candidate for
- # now. Skip it.
- try_jedi = not completing_string
- except Exception as e:
- # many of things can go wrong, we are using private API just don't crash.
- if self.debug:
- print("Error detecting if completing a non-finished string :", e, '|')
-
- if not try_jedi:
- return iter([])
- try:
- return filter(completion_filter, interpreter.complete(column=cursor_column, line=cursor_line + 1))
- except Exception as e:
- if self.debug:
- return iter(
- [
- _FakeJediCompletion(
- 'Oops Jedi has crashed, please report a bug with the following:\n"""\n%s\ns"""'
- % (e)
- )
- ]
- )
- else:
- return iter([])
-
- @context_matcher()
- def python_matcher(self, context: CompletionContext) -> SimpleMatcherResult:
- """Match attributes or global python names"""
- text = context.line_with_cursor
- if "." in text:
- try:
- matches, fragment = self._attr_matches(text, include_prefix=False)
- if text.endswith(".") and self.omit__names:
- if self.omit__names == 1:
- # true if txt is _not_ a __ name, false otherwise:
- no__name = lambda txt: re.match(r".*\.__.*?__", txt) is None
- else:
- # true if txt is _not_ a _ name, false otherwise:
- no__name = (
- lambda txt: re.match(r"\._.*?", txt[txt.rindex(".") :])
- is None
- )
- matches = filter(no__name, matches)
- return _convert_matcher_v1_result_to_v2(
- matches, type="attribute", fragment=fragment
- )
- except NameError:
- # catches .
- matches = []
- return _convert_matcher_v1_result_to_v2(matches, type="attribute")
- else:
- matches = self.global_matches(context.token)
- # TODO: maybe distinguish between functions, modules and just "variables"
- return _convert_matcher_v1_result_to_v2(matches, type="variable")
-
- @completion_matcher(api_version=1)
- def python_matches(self, text: str) -> Iterable[str]:
- """Match attributes or global python names.
-
- .. deprecated:: 8.27
- You can use :meth:`python_matcher` instead."""
- if "." in text:
- try:
- matches = self.attr_matches(text)
- if text.endswith('.') and self.omit__names:
- if self.omit__names == 1:
- # true if txt is _not_ a __ name, false otherwise:
- no__name = (lambda txt:
- re.match(r'.*\.__.*?__',txt) is None)
- else:
- # true if txt is _not_ a _ name, false otherwise:
- no__name = (lambda txt:
- re.match(r'\._.*?',txt[txt.rindex('.'):]) is None)
- matches = filter(no__name, matches)
- except NameError:
- # catches .
- matches = []
- else:
- matches = self.global_matches(text)
- return matches
-
- def _default_arguments_from_docstring(self, doc):
- """Parse the first line of docstring for call signature.
-
- Docstring should be of the form 'min(iterable[, key=func])\n'.
- It can also parse cython docstring of the form
- 'Minuit.migrad(self, int ncall=10000, resume=True, int nsplit=1)'.
- """
- if doc is None:
- return []
-
- #care only the firstline
- line = doc.lstrip().splitlines()[0]
-
- #p = re.compile(r'^[\w|\s.]+\(([^)]*)\).*')
- #'min(iterable[, key=func])\n' -> 'iterable[, key=func]'
- sig = self.docstring_sig_re.search(line)
- if sig is None:
- return []
- # iterable[, key=func]' -> ['iterable[' ,' key=func]']
- sig = sig.groups()[0].split(',')
- ret = []
- for s in sig:
- #re.compile(r'[\s|\[]*(\w+)(?:\s*=\s*.*)')
- ret += self.docstring_kwd_re.findall(s)
- return ret
-
- def _default_arguments(self, obj):
- """Return the list of default arguments of obj if it is callable,
- or empty list otherwise."""
- call_obj = obj
- ret = []
- if inspect.isbuiltin(obj):
- pass
- elif not (inspect.isfunction(obj) or inspect.ismethod(obj)):
- if inspect.isclass(obj):
- #for cython embedsignature=True the constructor docstring
- #belongs to the object itself not __init__
- ret += self._default_arguments_from_docstring(
- getattr(obj, '__doc__', ''))
- # for classes, check for __init__,__new__
- call_obj = (getattr(obj, '__init__', None) or
- getattr(obj, '__new__', None))
- # for all others, check if they are __call__able
- elif hasattr(obj, '__call__'):
- call_obj = obj.__call__
- ret += self._default_arguments_from_docstring(
- getattr(call_obj, '__doc__', ''))
-
- _keeps = (inspect.Parameter.KEYWORD_ONLY,
- inspect.Parameter.POSITIONAL_OR_KEYWORD)
-
- try:
- sig = inspect.signature(obj)
- ret.extend(k for k, v in sig.parameters.items() if
- v.kind in _keeps)
- except ValueError:
- pass
-
- return list(set(ret))
-
- @context_matcher()
- def python_func_kw_matcher(self, context: CompletionContext) -> SimpleMatcherResult:
- """Match named parameters (kwargs) of the last open function."""
- matches = self.python_func_kw_matches(context.token)
- return _convert_matcher_v1_result_to_v2(matches, type="param")
-
- def python_func_kw_matches(self, text):
- """Match named parameters (kwargs) of the last open function.
-
- .. deprecated:: 8.6
- You can use :meth:`python_func_kw_matcher` instead.
- """
-
- if "." in text: # a parameter cannot be dotted
- return []
- try: regexp = self.__funcParamsRegex
- except AttributeError:
- regexp = self.__funcParamsRegex = re.compile(r'''
- '.*?(?,a=1)", the candidate is "foo"
- tokens = regexp.findall(self.text_until_cursor)
- iterTokens = reversed(tokens)
- openPar = 0
-
- for token in iterTokens:
- if token == ')':
- openPar -= 1
- elif token == '(':
- openPar += 1
- if openPar > 0:
- # found the last unclosed parenthesis
- break
- else:
- return []
- # 2. Concatenate dotted names ("foo.bar" for "foo.bar(x, pa" )
- ids = []
- isId = re.compile(r'\w+$').match
-
- while True:
- try:
- ids.append(next(iterTokens))
- if not isId(ids[-1]):
- ids.pop()
- break
- if not next(iterTokens) == '.':
- break
- except StopIteration:
- break
-
- # Find all named arguments already assigned to, as to avoid suggesting
- # them again
- usedNamedArgs = set()
- par_level = -1
- for token, next_token in zip(tokens, tokens[1:]):
- if token == '(':
- par_level += 1
- elif token == ')':
- par_level -= 1
-
- if par_level != 0:
- continue
-
- if next_token != '=':
- continue
-
- usedNamedArgs.add(token)
-
- argMatches = []
- try:
- callableObj = '.'.join(ids[::-1])
- namedArgs = self._default_arguments(eval(callableObj,
- self.namespace))
-
- # Remove used named arguments from the list, no need to show twice
- for namedArg in set(namedArgs) - usedNamedArgs:
- if namedArg.startswith(text):
- argMatches.append("%s=" %namedArg)
- except:
- pass
-
- return argMatches
-
- @staticmethod
- def _get_keys(obj: Any) -> List[Any]:
- # Objects can define their own completions by defining an
- # _ipy_key_completions_() method.
- method = get_real_method(obj, '_ipython_key_completions_')
- if method is not None:
- return method()
-
- # Special case some common in-memory dict-like types
- if isinstance(obj, dict) or _safe_isinstance(obj, "pandas", "DataFrame"):
- try:
- return list(obj.keys())
- except Exception:
- return []
- elif _safe_isinstance(obj, "pandas", "core", "indexing", "_LocIndexer"):
- try:
- return list(obj.obj.keys())
- except Exception:
- return []
- elif _safe_isinstance(obj, 'numpy', 'ndarray') or\
- _safe_isinstance(obj, 'numpy', 'void'):
- return obj.dtype.names or []
- return []
-
- @context_matcher()
- def dict_key_matcher(self, context: CompletionContext) -> SimpleMatcherResult:
- """Match string keys in a dictionary, after e.g. ``foo[``."""
- matches = self.dict_key_matches(context.token)
- return _convert_matcher_v1_result_to_v2(
- matches, type="dict key", suppress_if_matches=True
- )
-
- def dict_key_matches(self, text: str) -> List[str]:
- """Match string keys in a dictionary, after e.g. ``foo[``.
-
- .. deprecated:: 8.6
- You can use :meth:`dict_key_matcher` instead.
- """
-
- # Short-circuit on closed dictionary (regular expression would
- # not match anyway, but would take quite a while).
- if self.text_until_cursor.strip().endswith("]"):
- return []
-
- match = DICT_MATCHER_REGEX.search(self.text_until_cursor)
-
- if match is None:
- return []
-
- expr, prior_tuple_keys, key_prefix = match.groups()
-
- obj = self._evaluate_expr(expr)
-
- if obj is not_found:
- return []
-
- keys = self._get_keys(obj)
- if not keys:
- return keys
-
- tuple_prefix = guarded_eval(
- prior_tuple_keys,
- EvaluationContext(
- globals=self.global_namespace,
- locals=self.namespace,
- evaluation=self.evaluation, # type: ignore
- in_subscript=True,
- ),
- )
-
- closing_quote, token_offset, matches = match_dict_keys(
- keys, key_prefix, self.splitter.delims, extra_prefix=tuple_prefix
- )
- if not matches:
- return []
-
- # get the cursor position of
- # - the text being completed
- # - the start of the key text
- # - the start of the completion
- text_start = len(self.text_until_cursor) - len(text)
- if key_prefix:
- key_start = match.start(3)
- completion_start = key_start + token_offset
- else:
- key_start = completion_start = match.end()
-
- # grab the leading prefix, to make sure all completions start with `text`
- if text_start > key_start:
- leading = ''
- else:
- leading = text[text_start:completion_start]
-
- # append closing quote and bracket as appropriate
- # this is *not* appropriate if the opening quote or bracket is outside
- # the text given to this method, e.g. `d["""a\nt
- can_close_quote = False
- can_close_bracket = False
-
- continuation = self.line_buffer[len(self.text_until_cursor) :].strip()
-
- if continuation.startswith(closing_quote):
- # do not close if already closed, e.g. `d['a'`
- continuation = continuation[len(closing_quote) :]
- else:
- can_close_quote = True
-
- continuation = continuation.strip()
-
- # e.g. `pandas.DataFrame` has different tuple indexer behaviour,
- # handling it is out of scope, so let's avoid appending suffixes.
- has_known_tuple_handling = isinstance(obj, dict)
-
- can_close_bracket = (
- not continuation.startswith("]") and self.auto_close_dict_keys
- )
- can_close_tuple_item = (
- not continuation.startswith(",")
- and has_known_tuple_handling
- and self.auto_close_dict_keys
- )
- can_close_quote = can_close_quote and self.auto_close_dict_keys
-
- # fast path if closing quote should be appended but not suffix is allowed
- if not can_close_quote and not can_close_bracket and closing_quote:
- return [leading + k for k in matches]
-
- results = []
-
- end_of_tuple_or_item = _DictKeyState.END_OF_TUPLE | _DictKeyState.END_OF_ITEM
-
- for k, state_flag in matches.items():
- result = leading + k
- if can_close_quote and closing_quote:
- result += closing_quote
-
- if state_flag == end_of_tuple_or_item:
- # We do not know which suffix to add,
- # e.g. both tuple item and string
- # match this item.
- pass
-
- if state_flag in end_of_tuple_or_item and can_close_bracket:
- result += "]"
- if state_flag == _DictKeyState.IN_TUPLE and can_close_tuple_item:
- result += ", "
- results.append(result)
- return results
-
- @context_matcher()
- def unicode_name_matcher(self, context: CompletionContext):
- """Same as :any:`unicode_name_matches`, but adopted to new Matcher API."""
- fragment, matches = self.unicode_name_matches(context.text_until_cursor)
- return _convert_matcher_v1_result_to_v2(
- matches, type="unicode", fragment=fragment, suppress_if_matches=True
- )
-
- @staticmethod
- def unicode_name_matches(text: str) -> Tuple[str, List[str]]:
- """Match Latex-like syntax for unicode characters base
- on the name of the character.
-
- This does ``\\GREEK SMALL LETTER ETA`` -> ``η``
-
- Works only on valid python 3 identifier, or on combining characters that
- will combine to form a valid identifier.
- """
- slashpos = text.rfind('\\')
- if slashpos > -1:
- s = text[slashpos+1:]
- try :
- unic = unicodedata.lookup(s)
- # allow combining chars
- if ('a'+unic).isidentifier():
- return '\\'+s,[unic]
- except KeyError:
- pass
- return '', []
-
- @context_matcher()
- def latex_name_matcher(self, context: CompletionContext):
- """Match Latex syntax for unicode characters.
-
- This does both ``\\alp`` -> ``\\alpha`` and ``\\alpha`` -> ``α``
- """
- fragment, matches = self.latex_matches(context.text_until_cursor)
- return _convert_matcher_v1_result_to_v2(
- matches, type="latex", fragment=fragment, suppress_if_matches=True
- )
-
- def latex_matches(self, text: str) -> Tuple[str, Sequence[str]]:
- """Match Latex syntax for unicode characters.
-
- This does both ``\\alp`` -> ``\\alpha`` and ``\\alpha`` -> ``α``
-
- .. deprecated:: 8.6
- You can use :meth:`latex_name_matcher` instead.
- """
- slashpos = text.rfind('\\')
- if slashpos > -1:
- s = text[slashpos:]
- if s in latex_symbols:
- # Try to complete a full latex symbol to unicode
- # \\alpha -> α
- return s, [latex_symbols[s]]
- else:
- # If a user has partially typed a latex symbol, give them
- # a full list of options \al -> [\aleph, \alpha]
- matches = [k for k in latex_symbols if k.startswith(s)]
- if matches:
- return s, matches
- return '', ()
-
- @context_matcher()
- def custom_completer_matcher(self, context):
- """Dispatch custom completer.
-
- If a match is found, suppresses all other matchers except for Jedi.
- """
- matches = self.dispatch_custom_completer(context.token) or []
- result = _convert_matcher_v1_result_to_v2(
- matches, type=_UNKNOWN_TYPE, suppress_if_matches=True
- )
- result["ordered"] = True
- result["do_not_suppress"] = {_get_matcher_id(self._jedi_matcher)}
- return result
-
- def dispatch_custom_completer(self, text):
- """
- .. deprecated:: 8.6
- You can use :meth:`custom_completer_matcher` instead.
- """
- if not self.custom_completers:
- return
-
- line = self.line_buffer
- if not line.strip():
- return None
-
- # Create a little structure to pass all the relevant information about
- # the current completion to any custom completer.
- event = SimpleNamespace()
- event.line = line
- event.symbol = text
- cmd = line.split(None,1)[0]
- event.command = cmd
- event.text_until_cursor = self.text_until_cursor
-
- # for foo etc, try also to find completer for %foo
- if not cmd.startswith(self.magic_escape):
- try_magic = self.custom_completers.s_matches(
- self.magic_escape + cmd)
- else:
- try_magic = []
-
- for c in itertools.chain(self.custom_completers.s_matches(cmd),
- try_magic,
- self.custom_completers.flat_matches(self.text_until_cursor)):
- try:
- res = c(event)
- if res:
- # first, try case sensitive match
- withcase = [r for r in res if r.startswith(text)]
- if withcase:
- return withcase
- # if none, then case insensitive ones are ok too
- text_low = text.lower()
- return [r for r in res if r.lower().startswith(text_low)]
- except TryNext:
- pass
- except KeyboardInterrupt:
- """
- If custom completer take too long,
- let keyboard interrupt abort and return nothing.
- """
- break
-
- return None
-
- def completions(self, text: str, offset: int)->Iterator[Completion]:
- """
- Returns an iterator over the possible completions
-
- .. warning::
-
- Unstable
-
- This function is unstable, API may change without warning.
- It will also raise unless use in proper context manager.
-
- Parameters
- ----------
- text : str
- Full text of the current input, multi line string.
- offset : int
- Integer representing the position of the cursor in ``text``. Offset
- is 0-based indexed.
-
- Yields
- ------
- Completion
-
- Notes
- -----
- The cursor on a text can either be seen as being "in between"
- characters or "On" a character depending on the interface visible to
- the user. For consistency the cursor being on "in between" characters X
- and Y is equivalent to the cursor being "on" character Y, that is to say
- the character the cursor is on is considered as being after the cursor.
-
- Combining characters may span more that one position in the
- text.
-
- .. note::
-
- If ``IPCompleter.debug`` is :any:`True` will yield a ``--jedi/ipython--``
- fake Completion token to distinguish completion returned by Jedi
- and usual IPython completion.
-
- .. note::
-
- Completions are not completely deduplicated yet. If identical
- completions are coming from different sources this function does not
- ensure that each completion object will only be present once.
- """
- warnings.warn("_complete is a provisional API (as of IPython 6.0). "
- "It may change without warnings. "
- "Use in corresponding context manager.",
- category=ProvisionalCompleterWarning, stacklevel=2)
-
- seen = set()
- profiler:Optional[cProfile.Profile]
- try:
- if self.profile_completions:
- import cProfile
- profiler = cProfile.Profile()
- profiler.enable()
- else:
- profiler = None
-
- for c in self._completions(text, offset, _timeout=self.jedi_compute_type_timeout/1000):
- if c and (c in seen):
- continue
- yield c
- seen.add(c)
- except KeyboardInterrupt:
- """if completions take too long and users send keyboard interrupt,
- do not crash and return ASAP. """
- pass
- finally:
- if profiler is not None:
- profiler.disable()
- ensure_dir_exists(self.profiler_output_dir)
- output_path = os.path.join(self.profiler_output_dir, str(uuid.uuid4()))
- print("Writing profiler output to", output_path)
- profiler.dump_stats(output_path)
-
- def _completions(self, full_text: str, offset: int, *, _timeout) -> Iterator[Completion]:
- """
- Core completion module.Same signature as :any:`completions`, with the
- extra `timeout` parameter (in seconds).
-
- Computing jedi's completion ``.type`` can be quite expensive (it is a
- lazy property) and can require some warm-up, more warm up than just
- computing the ``name`` of a completion. The warm-up can be :
-
- - Long warm-up the first time a module is encountered after
- install/update: actually build parse/inference tree.
-
- - first time the module is encountered in a session: load tree from
- disk.
-
- We don't want to block completions for tens of seconds so we give the
- completer a "budget" of ``_timeout`` seconds per invocation to compute
- completions types, the completions that have not yet been computed will
- be marked as "unknown" an will have a chance to be computed next round
- are things get cached.
-
- Keep in mind that Jedi is not the only thing treating the completion so
- keep the timeout short-ish as if we take more than 0.3 second we still
- have lots of processing to do.
-
- """
- deadline = time.monotonic() + _timeout
-
- before = full_text[:offset]
- cursor_line, cursor_column = position_to_cursor(full_text, offset)
-
- jedi_matcher_id = _get_matcher_id(self._jedi_matcher)
-
- def is_non_jedi_result(
- result: MatcherResult, identifier: str
- ) -> TypeGuard[SimpleMatcherResult]:
- return identifier != jedi_matcher_id
-
- results = self._complete(
- full_text=full_text, cursor_line=cursor_line, cursor_pos=cursor_column
- )
-
- non_jedi_results: Dict[str, SimpleMatcherResult] = {
- identifier: result
- for identifier, result in results.items()
- if is_non_jedi_result(result, identifier)
- }
-
- jedi_matches = (
- cast(_JediMatcherResult, results[jedi_matcher_id])["completions"]
- if jedi_matcher_id in results
- else ()
- )
-
- iter_jm = iter(jedi_matches)
- if _timeout:
- for jm in iter_jm:
- try:
- type_ = jm.type
- except Exception:
- if self.debug:
- print("Error in Jedi getting type of ", jm)
- type_ = None
- delta = len(jm.name_with_symbols) - len(jm.complete)
- if type_ == 'function':
- signature = _make_signature(jm)
- else:
- signature = ''
- yield Completion(start=offset - delta,
- end=offset,
- text=jm.name_with_symbols,
- type=type_,
- signature=signature,
- _origin='jedi')
-
- if time.monotonic() > deadline:
- break
-
- for jm in iter_jm:
- delta = len(jm.name_with_symbols) - len(jm.complete)
- yield Completion(
- start=offset - delta,
- end=offset,
- text=jm.name_with_symbols,
- type=_UNKNOWN_TYPE, # don't compute type for speed
- _origin="jedi",
- signature="",
- )
-
- # TODO:
- # Suppress this, right now just for debug.
- if jedi_matches and non_jedi_results and self.debug:
- some_start_offset = before.rfind(
- next(iter(non_jedi_results.values()))["matched_fragment"]
- )
- yield Completion(
- start=some_start_offset,
- end=offset,
- text="--jedi/ipython--",
- _origin="debug",
- type="none",
- signature="",
- )
-
- ordered: List[Completion] = []
- sortable: List[Completion] = []
-
- for origin, result in non_jedi_results.items():
- matched_text = result["matched_fragment"]
- start_offset = before.rfind(matched_text)
- is_ordered = result.get("ordered", False)
- container = ordered if is_ordered else sortable
-
- # I'm unsure if this is always true, so let's assert and see if it
- # crash
- assert before.endswith(matched_text)
-
- for simple_completion in result["completions"]:
- completion = Completion(
- start=start_offset,
- end=offset,
- text=simple_completion.text,
- _origin=origin,
- signature="",
- type=simple_completion.type or _UNKNOWN_TYPE,
- )
- container.append(completion)
-
- yield from list(self._deduplicate(ordered + self._sort(sortable)))[
- :MATCHES_LIMIT
- ]
-
- def complete(self, text=None, line_buffer=None, cursor_pos=None) -> Tuple[str, Sequence[str]]:
- """Find completions for the given text and line context.
-
- Note that both the text and the line_buffer are optional, but at least
- one of them must be given.
-
- Parameters
- ----------
- text : string, optional
- Text to perform the completion on. If not given, the line buffer
- is split using the instance's CompletionSplitter object.
- line_buffer : string, optional
- If not given, the completer attempts to obtain the current line
- buffer via readline. This keyword allows clients which are
- requesting for text completions in non-readline contexts to inform
- the completer of the entire text.
- cursor_pos : int, optional
- Index of the cursor in the full line buffer. Should be provided by
- remote frontends where kernel has no access to frontend state.
-
- Returns
- -------
- Tuple of two items:
- text : str
- Text that was actually used in the completion.
- matches : list
- A list of completion matches.
-
- Notes
- -----
- This API is likely to be deprecated and replaced by
- :any:`IPCompleter.completions` in the future.
-
- """
- warnings.warn('`Completer.complete` is pending deprecation since '
- 'IPython 6.0 and will be replaced by `Completer.completions`.',
- PendingDeprecationWarning)
- # potential todo, FOLD the 3rd throw away argument of _complete
- # into the first 2 one.
- # TODO: Q: does the above refer to jedi completions (i.e. 0-indexed?)
- # TODO: should we deprecate now, or does it stay?
-
- results = self._complete(
- line_buffer=line_buffer, cursor_pos=cursor_pos, text=text, cursor_line=0
- )
-
- jedi_matcher_id = _get_matcher_id(self._jedi_matcher)
-
- return self._arrange_and_extract(
- results,
- # TODO: can we confirm that excluding Jedi here was a deliberate choice in previous version?
- skip_matchers={jedi_matcher_id},
- # this API does not support different start/end positions (fragments of token).
- abort_if_offset_changes=True,
- )
-
- def _arrange_and_extract(
- self,
- results: Dict[str, MatcherResult],
- skip_matchers: Set[str],
- abort_if_offset_changes: bool,
- ):
- sortable: List[AnyMatcherCompletion] = []
- ordered: List[AnyMatcherCompletion] = []
- most_recent_fragment = None
- for identifier, result in results.items():
- if identifier in skip_matchers:
- continue
- if not result["completions"]:
- continue
- if not most_recent_fragment:
- most_recent_fragment = result["matched_fragment"]
- if (
- abort_if_offset_changes
- and result["matched_fragment"] != most_recent_fragment
- ):
- break
- if result.get("ordered", False):
- ordered.extend(result["completions"])
- else:
- sortable.extend(result["completions"])
-
- if not most_recent_fragment:
- most_recent_fragment = "" # to satisfy typechecker (and just in case)
-
- return most_recent_fragment, [
- m.text for m in self._deduplicate(ordered + self._sort(sortable))
- ]
-
- def _complete(self, *, cursor_line, cursor_pos, line_buffer=None, text=None,
- full_text=None) -> _CompleteResult:
- """
- Like complete but can also returns raw jedi completions as well as the
- origin of the completion text. This could (and should) be made much
- cleaner but that will be simpler once we drop the old (and stateful)
- :any:`complete` API.
-
- With current provisional API, cursor_pos act both (depending on the
- caller) as the offset in the ``text`` or ``line_buffer``, or as the
- ``column`` when passing multiline strings this could/should be renamed
- but would add extra noise.
-
- Parameters
- ----------
- cursor_line
- Index of the line the cursor is on. 0 indexed.
- cursor_pos
- Position of the cursor in the current line/line_buffer/text. 0
- indexed.
- line_buffer : optional, str
- The current line the cursor is in, this is mostly due to legacy
- reason that readline could only give a us the single current line.
- Prefer `full_text`.
- text : str
- The current "token" the cursor is in, mostly also for historical
- reasons. as the completer would trigger only after the current line
- was parsed.
- full_text : str
- Full text of the current cell.
-
- Returns
- -------
- An ordered dictionary where keys are identifiers of completion
- matchers and values are ``MatcherResult``s.
- """
-
- # if the cursor position isn't given, the only sane assumption we can
- # make is that it's at the end of the line (the common case)
- if cursor_pos is None:
- cursor_pos = len(line_buffer) if text is None else len(text)
-
- if self.use_main_ns:
- self.namespace = __main__.__dict__
-
- # if text is either None or an empty string, rely on the line buffer
- if (not line_buffer) and full_text:
- line_buffer = full_text.split('\n')[cursor_line]
- if not text: # issue #11508: check line_buffer before calling split_line
- text = (
- self.splitter.split_line(line_buffer, cursor_pos) if line_buffer else ""
- )
-
- # If no line buffer is given, assume the input text is all there was
- if line_buffer is None:
- line_buffer = text
-
- # deprecated - do not use `line_buffer` in new code.
- self.line_buffer = line_buffer
- self.text_until_cursor = self.line_buffer[:cursor_pos]
-
- if not full_text:
- full_text = line_buffer
-
- context = CompletionContext(
- full_text=full_text,
- cursor_position=cursor_pos,
- cursor_line=cursor_line,
- token=text,
- limit=MATCHES_LIMIT,
- )
-
- # Start with a clean slate of completions
- results: Dict[str, MatcherResult] = {}
-
- jedi_matcher_id = _get_matcher_id(self._jedi_matcher)
-
- suppressed_matchers: Set[str] = set()
-
- matchers = {
- _get_matcher_id(matcher): matcher
- for matcher in sorted(
- self.matchers, key=_get_matcher_priority, reverse=True
- )
- }
-
- for matcher_id, matcher in matchers.items():
- matcher_id = _get_matcher_id(matcher)
-
- if matcher_id in self.disable_matchers:
- continue
-
- if matcher_id in results:
- warnings.warn(f"Duplicate matcher ID: {matcher_id}.")
-
- if matcher_id in suppressed_matchers:
- continue
-
- result: MatcherResult
- try:
- if _is_matcher_v1(matcher):
- result = _convert_matcher_v1_result_to_v2(
- matcher(text), type=_UNKNOWN_TYPE
- )
- elif _is_matcher_v2(matcher):
- result = matcher(context)
- else:
- api_version = _get_matcher_api_version(matcher)
- raise ValueError(f"Unsupported API version {api_version}")
- except BaseException:
- # Show the ugly traceback if the matcher causes an
- # exception, but do NOT crash the kernel!
- sys.excepthook(*sys.exc_info())
- continue
-
- # set default value for matched fragment if suffix was not selected.
- result["matched_fragment"] = result.get("matched_fragment", context.token)
-
- if not suppressed_matchers:
- suppression_recommended: Union[bool, Set[str]] = result.get(
- "suppress", False
- )
-
- suppression_config = (
- self.suppress_competing_matchers.get(matcher_id, None)
- if isinstance(self.suppress_competing_matchers, dict)
- else self.suppress_competing_matchers
- )
- should_suppress = (
- (suppression_config is True)
- or (suppression_recommended and (suppression_config is not False))
- ) and has_any_completions(result)
-
- if should_suppress:
- suppression_exceptions: Set[str] = result.get(
- "do_not_suppress", set()
- )
- if isinstance(suppression_recommended, Iterable):
- to_suppress = set(suppression_recommended)
- else:
- to_suppress = set(matchers)
- suppressed_matchers = to_suppress - suppression_exceptions
-
- new_results = {}
- for previous_matcher_id, previous_result in results.items():
- if previous_matcher_id not in suppressed_matchers:
- new_results[previous_matcher_id] = previous_result
- results = new_results
-
- results[matcher_id] = result
-
- _, matches = self._arrange_and_extract(
- results,
- # TODO Jedi completions non included in legacy stateful API; was this deliberate or omission?
- # if it was omission, we can remove the filtering step, otherwise remove this comment.
- skip_matchers={jedi_matcher_id},
- abort_if_offset_changes=False,
- )
-
- # populate legacy stateful API
- self.matches = matches
-
- return results
-
- @staticmethod
- def _deduplicate(
- matches: Sequence[AnyCompletion],
- ) -> Iterable[AnyCompletion]:
- filtered_matches: Dict[str, AnyCompletion] = {}
- for match in matches:
- text = match.text
- if (
- text not in filtered_matches
- or filtered_matches[text].type == _UNKNOWN_TYPE
- ):
- filtered_matches[text] = match
-
- return filtered_matches.values()
-
- @staticmethod
- def _sort(matches: Sequence[AnyCompletion]):
- return sorted(matches, key=lambda x: completions_sorting_key(x.text))
-
- @context_matcher()
- def fwd_unicode_matcher(self, context: CompletionContext):
- """Same as :any:`fwd_unicode_match`, but adopted to new Matcher API."""
- # TODO: use `context.limit` to terminate early once we matched the maximum
- # number that will be used downstream; can be added as an optional to
- # `fwd_unicode_match(text: str, limit: int = None)` or we could re-implement here.
- fragment, matches = self.fwd_unicode_match(context.text_until_cursor)
- return _convert_matcher_v1_result_to_v2(
- matches, type="unicode", fragment=fragment, suppress_if_matches=True
- )
-
- def fwd_unicode_match(self, text: str) -> Tuple[str, Sequence[str]]:
- """
- Forward match a string starting with a backslash with a list of
- potential Unicode completions.
-
- Will compute list of Unicode character names on first call and cache it.
-
- .. deprecated:: 8.6
- You can use :meth:`fwd_unicode_matcher` instead.
-
- Returns
- -------
- At tuple with:
- - matched text (empty if no matches)
- - list of potential completions, empty tuple otherwise)
- """
- # TODO: self.unicode_names is here a list we traverse each time with ~100k elements.
- # We could do a faster match using a Trie.
-
- # Using pygtrie the following seem to work:
-
- # s = PrefixSet()
-
- # for c in range(0,0x10FFFF + 1):
- # try:
- # s.add(unicodedata.name(chr(c)))
- # except ValueError:
- # pass
- # [''.join(k) for k in s.iter(prefix)]
-
- # But need to be timed and adds an extra dependency.
-
- slashpos = text.rfind('\\')
- # if text starts with slash
- if slashpos > -1:
- # PERF: It's important that we don't access self._unicode_names
- # until we're inside this if-block. _unicode_names is lazily
- # initialized, and it takes a user-noticeable amount of time to
- # initialize it, so we don't want to initialize it unless we're
- # actually going to use it.
- s = text[slashpos + 1 :]
- sup = s.upper()
- candidates = [x for x in self.unicode_names if x.startswith(sup)]
- if candidates:
- return s, candidates
- candidates = [x for x in self.unicode_names if sup in x]
- if candidates:
- return s, candidates
- splitsup = sup.split(" ")
- candidates = [
- x for x in self.unicode_names if all(u in x for u in splitsup)
- ]
- if candidates:
- return s, candidates
-
- return "", ()
-
- # if text does not start with slash
- else:
- return '', ()
-
- @property
- def unicode_names(self) -> List[str]:
- """List of names of unicode code points that can be completed.
-
- The list is lazily initialized on first access.
- """
- if self._unicode_names is None:
- names = []
- for c in range(0,0x10FFFF + 1):
- try:
- names.append(unicodedata.name(chr(c)))
- except ValueError:
- pass
- self._unicode_names = _unicode_name_compute(_UNICODE_RANGES)
-
- return self._unicode_names
-
-def _unicode_name_compute(ranges:List[Tuple[int,int]]) -> List[str]:
- names = []
- for start,stop in ranges:
- for c in range(start, stop) :
- try:
- names.append(unicodedata.name(chr(c)))
- except ValueError:
- pass
- return names
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/completerlib.py b/.venv/lib/python3.12/site-packages/IPython/core/completerlib.py
deleted file mode 100644
index f15490f..0000000
--- a/.venv/lib/python3.12/site-packages/IPython/core/completerlib.py
+++ /dev/null
@@ -1,382 +0,0 @@
-# encoding: utf-8
-"""Implementations for various useful completers.
-
-These are all loaded by default by IPython.
-"""
-#-----------------------------------------------------------------------------
-# Copyright (C) 2010-2011 The IPython Development Team.
-#
-# Distributed under the terms of the BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-# Stdlib imports
-import glob
-import inspect
-import os
-import re
-import sys
-from importlib import import_module
-from importlib.machinery import all_suffixes
-
-
-# Third-party imports
-from time import time
-from zipimport import zipimporter
-
-# Our own imports
-from .completer import expand_user, compress_user
-from .error import TryNext
-from ..utils._process_common import arg_split
-
-# FIXME: this should be pulled in with the right call via the component system
-from IPython import get_ipython
-
-from typing import List
-
-#-----------------------------------------------------------------------------
-# Globals and constants
-#-----------------------------------------------------------------------------
-_suffixes = all_suffixes()
-
-# Time in seconds after which the rootmodules will be stored permanently in the
-# ipython ip.db database (kept in the user's .ipython dir).
-TIMEOUT_STORAGE = 2
-
-# Time in seconds after which we give up
-TIMEOUT_GIVEUP = 20
-
-# Regular expression for the python import statement
-import_re = re.compile(r'(?P[^\W\d]\w*?)'
- r'(?P[/\\]__init__)?'
- r'(?P%s)$' %
- r'|'.join(re.escape(s) for s in _suffixes))
-
-# RE for the ipython %run command (python + ipython scripts)
-magic_run_re = re.compile(r'.*(\.ipy|\.ipynb|\.py[w]?)$')
-
-#-----------------------------------------------------------------------------
-# Local utilities
-#-----------------------------------------------------------------------------
-
-
-def module_list(path: str) -> List[str]:
- """
- Return the list containing the names of the modules available in the given
- folder.
- """
- # sys.path has the cwd as an empty string, but isdir/listdir need it as '.'
- if path == '':
- path = '.'
-
- # A few local constants to be used in loops below
- pjoin = os.path.join
-
- if os.path.isdir(path):
- # Build a list of all files in the directory and all files
- # in its subdirectories. For performance reasons, do not
- # recurse more than one level into subdirectories.
- files: List[str] = []
- for root, dirs, nondirs in os.walk(path, followlinks=True):
- subdir = root[len(path)+1:]
- if subdir:
- files.extend(pjoin(subdir, f) for f in nondirs)
- dirs[:] = [] # Do not recurse into additional subdirectories.
- else:
- files.extend(nondirs)
-
- else:
- try:
- files = list(zipimporter(path)._files.keys()) # type: ignore
- except Exception:
- files = []
-
- # Build a list of modules which match the import_re regex.
- modules = []
- for f in files:
- m = import_re.match(f)
- if m:
- modules.append(m.group('name'))
- return list(set(modules))
-
-
-def get_root_modules():
- """
- Returns a list containing the names of all the modules available in the
- folders of the pythonpath.
-
- ip.db['rootmodules_cache'] maps sys.path entries to list of modules.
- """
- ip = get_ipython()
- if ip is None:
- # No global shell instance to store cached list of modules.
- # Don't try to scan for modules every time.
- return list(sys.builtin_module_names)
-
- if getattr(ip.db, "_mock", False):
- rootmodules_cache = {}
- else:
- rootmodules_cache = ip.db.get("rootmodules_cache", {})
- rootmodules = list(sys.builtin_module_names)
- start_time = time()
- store = False
- for path in sys.path:
- try:
- modules = rootmodules_cache[path]
- except KeyError:
- modules = module_list(path)
- try:
- modules.remove('__init__')
- except ValueError:
- pass
- if path not in ('', '.'): # cwd modules should not be cached
- rootmodules_cache[path] = modules
- if time() - start_time > TIMEOUT_STORAGE and not store:
- store = True
- print("\nCaching the list of root modules, please wait!")
- print("(This will only be done once - type '%rehashx' to "
- "reset cache!)\n")
- sys.stdout.flush()
- if time() - start_time > TIMEOUT_GIVEUP:
- print("This is taking too long, we give up.\n")
- return []
- rootmodules.extend(modules)
- if store:
- ip.db['rootmodules_cache'] = rootmodules_cache
- rootmodules = list(set(rootmodules))
- return rootmodules
-
-
-def is_importable(module, attr: str, only_modules) -> bool:
- if only_modules:
- try:
- mod = getattr(module, attr)
- except ModuleNotFoundError:
- # See gh-14434
- return False
- return inspect.ismodule(mod)
- else:
- return not(attr[:2] == '__' and attr[-2:] == '__')
-
-def is_possible_submodule(module, attr):
- try:
- obj = getattr(module, attr)
- except AttributeError:
- # Is possibly an unimported submodule
- return True
- except TypeError:
- # https://github.com/ipython/ipython/issues/9678
- return False
- return inspect.ismodule(obj)
-
-
-def try_import(mod: str, only_modules=False) -> List[str]:
- """
- Try to import given module and return list of potential completions.
- """
- mod = mod.rstrip('.')
- try:
- m = import_module(mod)
- except:
- return []
-
- m_is_init = '__init__' in (getattr(m, '__file__', '') or '')
-
- completions = []
- if (not hasattr(m, '__file__')) or (not only_modules) or m_is_init:
- completions.extend( [attr for attr in dir(m) if
- is_importable(m, attr, only_modules)])
-
- m_all = getattr(m, "__all__", [])
- if only_modules:
- completions.extend(attr for attr in m_all if is_possible_submodule(m, attr))
- else:
- completions.extend(m_all)
-
- if m_is_init:
- file_ = m.__file__
- file_path = os.path.dirname(file_) # type: ignore
- if file_path is not None:
- completions.extend(module_list(file_path))
- completions_set = {c for c in completions if isinstance(c, str)}
- completions_set.discard('__init__')
- return list(completions_set)
-
-
-#-----------------------------------------------------------------------------
-# Completion-related functions.
-#-----------------------------------------------------------------------------
-
-def quick_completer(cmd, completions):
- r""" Easily create a trivial completer for a command.
-
- Takes either a list of completions, or all completions in string (that will
- be split on whitespace).
-
- Example::
-
- [d:\ipython]|1> import ipy_completers
- [d:\ipython]|2> ipy_completers.quick_completer('foo', ['bar','baz'])
- [d:\ipython]|3> foo b
- bar baz
- [d:\ipython]|3> foo ba
- """
-
- if isinstance(completions, str):
- completions = completions.split()
-
- def do_complete(self, event):
- return completions
-
- get_ipython().set_hook('complete_command',do_complete, str_key = cmd)
-
-def module_completion(line):
- """
- Returns a list containing the completion possibilities for an import line.
-
- The line looks like this :
- 'import xml.d'
- 'from xml.dom import'
- """
-
- words = line.split(' ')
- nwords = len(words)
-
- # from whatever -> 'import '
- if nwords == 3 and words[0] == 'from':
- return ['import ']
-
- # 'from xy' or 'import xy'
- if nwords < 3 and (words[0] in {'%aimport', 'import', 'from'}) :
- if nwords == 1:
- return get_root_modules()
- mod = words[1].split('.')
- if len(mod) < 2:
- return get_root_modules()
- completion_list = try_import('.'.join(mod[:-1]), True)
- return ['.'.join(mod[:-1] + [el]) for el in completion_list]
-
- # 'from xyz import abc'
- if nwords >= 3 and words[0] == 'from':
- mod = words[1]
- return try_import(mod)
-
-#-----------------------------------------------------------------------------
-# Completers
-#-----------------------------------------------------------------------------
-# These all have the func(self, event) signature to be used as custom
-# completers
-
-def module_completer(self,event):
- """Give completions after user has typed 'import ...' or 'from ...'"""
-
- # This works in all versions of python. While 2.5 has
- # pkgutil.walk_packages(), that particular routine is fairly dangerous,
- # since it imports *EVERYTHING* on sys.path. That is: a) very slow b) full
- # of possibly problematic side effects.
- # This search the folders in the sys.path for available modules.
-
- return module_completion(event.line)
-
-# FIXME: there's a lot of logic common to the run, cd and builtin file
-# completers, that is currently reimplemented in each.
-
-def magic_run_completer(self, event):
- """Complete files that end in .py or .ipy or .ipynb for the %run command.
- """
- comps = arg_split(event.line, strict=False)
- # relpath should be the current token that we need to complete.
- if (len(comps) > 1) and (not event.line.endswith(' ')):
- relpath = comps[-1].strip("'\"")
- else:
- relpath = ''
-
- #print("\nev=", event) # dbg
- #print("rp=", relpath) # dbg
- #print('comps=', comps) # dbg
-
- lglob = glob.glob
- isdir = os.path.isdir
- relpath, tilde_expand, tilde_val = expand_user(relpath)
-
- # Find if the user has already typed the first filename, after which we
- # should complete on all files, since after the first one other files may
- # be arguments to the input script.
-
- if any(magic_run_re.match(c) for c in comps):
- matches = [f.replace('\\','/') + ('/' if isdir(f) else '')
- for f in lglob(relpath+'*')]
- else:
- dirs = [f.replace('\\','/') + "/" for f in lglob(relpath+'*') if isdir(f)]
- pys = [f.replace('\\','/')
- for f in lglob(relpath+'*.py') + lglob(relpath+'*.ipy') +
- lglob(relpath+'*.ipynb') + lglob(relpath + '*.pyw')]
-
- matches = dirs + pys
-
- #print('run comp:', dirs+pys) # dbg
- return [compress_user(p, tilde_expand, tilde_val) for p in matches]
-
-
-def cd_completer(self, event):
- """Completer function for cd, which only returns directories."""
- ip = get_ipython()
- relpath = event.symbol
-
- #print(event) # dbg
- if event.line.endswith('-b') or ' -b ' in event.line:
- # return only bookmark completions
- bkms = self.db.get('bookmarks', None)
- if bkms:
- return bkms.keys()
- else:
- return []
-
- if event.symbol == '-':
- width_dh = str(len(str(len(ip.user_ns['_dh']) + 1)))
- # jump in directory history by number
- fmt = '-%0' + width_dh +'d [%s]'
- ents = [ fmt % (i,s) for i,s in enumerate(ip.user_ns['_dh'])]
- if len(ents) > 1:
- return ents
- return []
-
- if event.symbol.startswith('--'):
- return ["--" + os.path.basename(d) for d in ip.user_ns['_dh']]
-
- # Expand ~ in path and normalize directory separators.
- relpath, tilde_expand, tilde_val = expand_user(relpath)
- relpath = relpath.replace('\\','/')
-
- found = []
- for d in [f.replace('\\','/') + '/' for f in glob.glob(relpath+'*')
- if os.path.isdir(f)]:
- if ' ' in d:
- # we don't want to deal with any of that, complex code
- # for this is elsewhere
- raise TryNext
-
- found.append(d)
-
- if not found:
- if os.path.isdir(relpath):
- return [compress_user(relpath, tilde_expand, tilde_val)]
-
- # if no completions so far, try bookmarks
- bks = self.db.get('bookmarks',{})
- bkmatches = [s for s in bks if s.startswith(event.symbol)]
- if bkmatches:
- return bkmatches
-
- raise TryNext
-
- return [compress_user(p, tilde_expand, tilde_val) for p in found]
-
-def reset_completer(self, event):
- "A completer for %reset magic"
- return '-f -s in out array dhist'.split()
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/crashhandler.py b/.venv/lib/python3.12/site-packages/IPython/core/crashhandler.py
deleted file mode 100644
index 9c75c5d..0000000
--- a/.venv/lib/python3.12/site-packages/IPython/core/crashhandler.py
+++ /dev/null
@@ -1,248 +0,0 @@
-# encoding: utf-8
-"""sys.excepthook for IPython itself, leaves a detailed report on disk.
-
-Authors:
-
-* Fernando Perez
-* Brian E. Granger
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2001-2007 Fernando Perez.
-# Copyright (C) 2008-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-import sys
-import traceback
-from pprint import pformat
-from pathlib import Path
-
-import builtins as builtin_mod
-
-from IPython.core import ultratb
-from IPython.core.application import Application
-from IPython.core.release import author_email
-from IPython.utils.sysinfo import sys_info
-
-from IPython.core.release import __version__ as version
-
-from typing import Optional, Dict
-import types
-
-#-----------------------------------------------------------------------------
-# Code
-#-----------------------------------------------------------------------------
-
-# Template for the user message.
-_default_message_template = """\
-Oops, {app_name} crashed. We do our best to make it stable, but...
-
-A crash report was automatically generated with the following information:
- - A verbatim copy of the crash traceback.
- - A copy of your input history during this session.
- - Data on your current {app_name} configuration.
-
-It was left in the file named:
-\t'{crash_report_fname}'
-If you can email this file to the developers, the information in it will help
-them in understanding and correcting the problem.
-
-You can mail it to: {contact_name} at {contact_email}
-with the subject '{app_name} Crash Report'.
-
-If you want to do it now, the following command will work (under Unix):
-mail -s '{app_name} Crash Report' {contact_email} < {crash_report_fname}
-
-In your email, please also include information about:
-- The operating system under which the crash happened: Linux, macOS, Windows,
- other, and which exact version (for example: Ubuntu 16.04.3, macOS 10.13.2,
- Windows 10 Pro), and whether it is 32-bit or 64-bit;
-- How {app_name} was installed: using pip or conda, from GitHub, as part of
- a Docker container, or other, providing more detail if possible;
-- How to reproduce the crash: what exact sequence of instructions can one
- input to get the same crash? Ideally, find a minimal yet complete sequence
- of instructions that yields the crash.
-
-To ensure accurate tracking of this issue, please file a report about it at:
-{bug_tracker}
-"""
-
-_lite_message_template = """
-If you suspect this is an IPython {version} bug, please report it at:
- https://github.com/ipython/ipython/issues
-or send an email to the mailing list at {email}
-
-You can print a more detailed traceback right now with "%tb", or use "%debug"
-to interactively debug it.
-
-Extra-detailed tracebacks for bug-reporting purposes can be enabled via:
- {config}Application.verbose_crash=True
-"""
-
-
-class CrashHandler:
- """Customizable crash handlers for IPython applications.
-
- Instances of this class provide a :meth:`__call__` method which can be
- used as a ``sys.excepthook``. The :meth:`__call__` signature is::
-
- def __call__(self, etype, evalue, etb)
- """
-
- message_template = _default_message_template
- section_sep = '\n\n'+'*'*75+'\n\n'
- info: Dict[str, Optional[str]]
-
- def __init__(
- self,
- app: Application,
- contact_name: Optional[str] = None,
- contact_email: Optional[str] = None,
- bug_tracker: Optional[str] = None,
- show_crash_traceback: bool = True,
- call_pdb: bool = False,
- ):
- """Create a new crash handler
-
- Parameters
- ----------
- app : Application
- A running :class:`Application` instance, which will be queried at
- crash time for internal information.
- contact_name : str
- A string with the name of the person to contact.
- contact_email : str
- A string with the email address of the contact.
- bug_tracker : str
- A string with the URL for your project's bug tracker.
- show_crash_traceback : bool
- If false, don't print the crash traceback on stderr, only generate
- the on-disk report
- call_pdb
- Whether to call pdb on crash
-
- Attributes
- ----------
- These instances contain some non-argument attributes which allow for
- further customization of the crash handler's behavior. Please see the
- source for further details.
-
- """
- self.crash_report_fname = "Crash_report_%s.txt" % app.name
- self.app = app
- self.call_pdb = call_pdb
- #self.call_pdb = True # dbg
- self.show_crash_traceback = show_crash_traceback
- self.info = dict(app_name = app.name,
- contact_name = contact_name,
- contact_email = contact_email,
- bug_tracker = bug_tracker,
- crash_report_fname = self.crash_report_fname)
-
- def __call__(
- self,
- etype: type[BaseException],
- evalue: BaseException,
- etb: types.TracebackType,
- ) -> None:
- """Handle an exception, call for compatible with sys.excepthook"""
-
- # do not allow the crash handler to be called twice without reinstalling it
- # this prevents unlikely errors in the crash handling from entering an
- # infinite loop.
- sys.excepthook = sys.__excepthook__
-
- # Report tracebacks shouldn't use color in general (safer for users)
- color_scheme = 'NoColor'
-
- # Use this ONLY for developer debugging (keep commented out for release)
- # color_scheme = 'Linux' # dbg
- ipython_dir = getattr(self.app, "ipython_dir", None)
- if ipython_dir is not None:
- assert isinstance(ipython_dir, str)
- rptdir = Path(ipython_dir)
- else:
- rptdir = Path.cwd()
- if not rptdir.is_dir():
- rptdir = Path.cwd()
- report_name = rptdir / self.crash_report_fname
- # write the report filename into the instance dict so it can get
- # properly expanded out in the user message template
- self.crash_report_fname = str(report_name)
- self.info["crash_report_fname"] = str(report_name)
- TBhandler = ultratb.VerboseTB(
- color_scheme=color_scheme,
- long_header=True,
- call_pdb=self.call_pdb,
- )
- if self.call_pdb:
- TBhandler(etype,evalue,etb)
- return
- else:
- traceback = TBhandler.text(etype,evalue,etb,context=31)
-
- # print traceback to screen
- if self.show_crash_traceback:
- print(traceback, file=sys.stderr)
-
- # and generate a complete report on disk
- try:
- report = open(report_name, "w", encoding="utf-8")
- except:
- print('Could not create crash report on disk.', file=sys.stderr)
- return
-
- with report:
- # Inform user on stderr of what happened
- print('\n'+'*'*70+'\n', file=sys.stderr)
- print(self.message_template.format(**self.info), file=sys.stderr)
-
- # Construct report on disk
- report.write(self.make_report(str(traceback)))
-
- builtin_mod.input("Hit to quit (your terminal may close):")
-
- def make_report(self, traceback: str) -> str:
- """Return a string containing a crash report."""
-
- sec_sep = self.section_sep
-
- report = ['*'*75+'\n\n'+'IPython post-mortem report\n\n']
- rpt_add = report.append
- rpt_add(sys_info())
-
- try:
- config = pformat(self.app.config)
- rpt_add(sec_sep)
- rpt_add("Application name: %s\n\n" % self.app.name)
- rpt_add("Current user configuration structure:\n\n")
- rpt_add(config)
- except:
- pass
- rpt_add(sec_sep+'Crash traceback:\n\n' + traceback)
-
- return ''.join(report)
-
-
-def crash_handler_lite(
- etype: type[BaseException], evalue: BaseException, tb: types.TracebackType
-) -> None:
- """a light excepthook, adding a small message to the usual traceback"""
- traceback.print_exception(etype, evalue, tb)
-
- from IPython.core.interactiveshell import InteractiveShell
- if InteractiveShell.initialized():
- # we are in a Shell environment, give %magic example
- config = "%config "
- else:
- # we are not in a shell, show generic config
- config = "c."
- print(_lite_message_template.format(email=author_email, config=config, version=version), file=sys.stderr)
-
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/debugger.py b/.venv/lib/python3.12/site-packages/IPython/core/debugger.py
deleted file mode 100644
index 76c42e0..0000000
--- a/.venv/lib/python3.12/site-packages/IPython/core/debugger.py
+++ /dev/null
@@ -1,1136 +0,0 @@
-"""
-Pdb debugger class.
-
-
-This is an extension to PDB which adds a number of new features.
-Note that there is also the `IPython.terminal.debugger` class which provides UI
-improvements.
-
-We also strongly recommend to use this via the `ipdb` package, which provides
-extra configuration options.
-
-Among other things, this subclass of PDB:
- - supports many IPython magics like pdef/psource
- - hide frames in tracebacks based on `__tracebackhide__`
- - allows to skip frames based on `__debuggerskip__`
-
-
-Global Configuration
---------------------
-
-The IPython debugger will by read the global ``~/.pdbrc`` file.
-That is to say you can list all commands supported by ipdb in your `~/.pdbrc`
-configuration file, to globally configure pdb.
-
-Example::
-
- # ~/.pdbrc
- skip_predicates debuggerskip false
- skip_hidden false
- context 25
-
-Features
---------
-
-The IPython debugger can hide and skip frames when printing or moving through
-the stack. This can have a performance impact, so can be configures.
-
-The skipping and hiding frames are configurable via the `skip_predicates`
-command.
-
-By default, frames from readonly files will be hidden, frames containing
-``__tracebackhide__ = True`` will be hidden.
-
-Frames containing ``__debuggerskip__`` will be stepped over, frames whose parent
-frames value of ``__debuggerskip__`` is ``True`` will also be skipped.
-
- >>> def helpers_helper():
- ... pass
- ...
- ... def helper_1():
- ... print("don't step in me")
- ... helpers_helpers() # will be stepped over unless breakpoint set.
- ...
- ...
- ... def helper_2():
- ... print("in me neither")
- ...
-
-One can define a decorator that wraps a function between the two helpers:
-
- >>> def pdb_skipped_decorator(function):
- ...
- ...
- ... def wrapped_fn(*args, **kwargs):
- ... __debuggerskip__ = True
- ... helper_1()
- ... __debuggerskip__ = False
- ... result = function(*args, **kwargs)
- ... __debuggerskip__ = True
- ... helper_2()
- ... # setting __debuggerskip__ to False again is not necessary
- ... return result
- ...
- ... return wrapped_fn
-
-When decorating a function, ipdb will directly step into ``bar()`` by
-default:
-
- >>> @foo_decorator
- ... def bar(x, y):
- ... return x * y
-
-
-You can toggle the behavior with
-
- ipdb> skip_predicates debuggerskip false
-
-or configure it in your ``.pdbrc``
-
-
-
-License
--------
-
-Modified from the standard pdb.Pdb class to avoid including readline, so that
-the command line completion of other programs which include this isn't
-damaged.
-
-In the future, this class will be expanded with improvements over the standard
-pdb.
-
-The original code in this file is mainly lifted out of cmd.py in Python 2.2,
-with minor changes. Licensing should therefore be under the standard Python
-terms. For details on the PSF (Python Software Foundation) standard license,
-see:
-
-https://docs.python.org/2/license.html
-
-
-All the changes since then are under the same license as IPython.
-
-"""
-
-#*****************************************************************************
-#
-# This file is licensed under the PSF license.
-#
-# Copyright (C) 2001 Python Software Foundation, www.python.org
-# Copyright (C) 2005-2006 Fernando Perez.
-#
-#
-#*****************************************************************************
-
-from __future__ import annotations
-
-import inspect
-import linecache
-import os
-import re
-import sys
-from contextlib import contextmanager
-from functools import lru_cache
-
-from IPython import get_ipython
-from IPython.core.excolors import exception_colors
-from IPython.utils import PyColorize, coloransi, py3compat
-
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
- # otherwise circular import
- from IPython.core.interactiveshell import InteractiveShell
-
-# skip module docstests
-__skip_doctest__ = True
-
-prompt = 'ipdb> '
-
-# We have to check this directly from sys.argv, config struct not yet available
-from pdb import Pdb as OldPdb
-
-# Allow the set_trace code to operate outside of an ipython instance, even if
-# it does so with some limitations. The rest of this support is implemented in
-# the Tracer constructor.
-
-DEBUGGERSKIP = "__debuggerskip__"
-
-
-# this has been implemented in Pdb in Python 3.13 (https://github.com/python/cpython/pull/106676
-# on lower python versions, we backported the feature.
-CHAIN_EXCEPTIONS = sys.version_info < (3, 13)
-
-
-def make_arrow(pad):
- """generate the leading arrow in front of traceback or debugger"""
- if pad >= 2:
- return '-'*(pad-2) + '> '
- elif pad == 1:
- return '>'
- return ''
-
-
-def BdbQuit_excepthook(et, ev, tb, excepthook=None):
- """Exception hook which handles `BdbQuit` exceptions.
-
- All other exceptions are processed using the `excepthook`
- parameter.
- """
- raise ValueError(
- "`BdbQuit_excepthook` is deprecated since version 5.1. It is still around only because it is still imported by ipdb.",
- )
-
-
-RGX_EXTRA_INDENT = re.compile(r'(?<=\n)\s+')
-
-
-def strip_indentation(multiline_string):
- return RGX_EXTRA_INDENT.sub('', multiline_string)
-
-
-def decorate_fn_with_doc(new_fn, old_fn, additional_text=""):
- """Make new_fn have old_fn's doc string. This is particularly useful
- for the ``do_...`` commands that hook into the help system.
- Adapted from from a comp.lang.python posting
- by Duncan Booth."""
- def wrapper(*args, **kw):
- return new_fn(*args, **kw)
- if old_fn.__doc__:
- wrapper.__doc__ = strip_indentation(old_fn.__doc__) + additional_text
- return wrapper
-
-
-class Pdb(OldPdb):
- """Modified Pdb class, does not load readline.
-
- for a standalone version that uses prompt_toolkit, see
- `IPython.terminal.debugger.TerminalPdb` and
- `IPython.terminal.debugger.set_trace()`
-
-
- This debugger can hide and skip frames that are tagged according to some predicates.
- See the `skip_predicates` commands.
-
- """
-
- shell: InteractiveShell
-
- if CHAIN_EXCEPTIONS:
- MAX_CHAINED_EXCEPTION_DEPTH = 999
-
- default_predicates = {
- "tbhide": True,
- "readonly": False,
- "ipython_internal": True,
- "debuggerskip": True,
- }
-
- def __init__(self, completekey=None, stdin=None, stdout=None, context=5, **kwargs):
- """Create a new IPython debugger.
-
- Parameters
- ----------
- completekey : default None
- Passed to pdb.Pdb.
- stdin : default None
- Passed to pdb.Pdb.
- stdout : default None
- Passed to pdb.Pdb.
- context : int
- Number of lines of source code context to show when
- displaying stacktrace information.
- **kwargs
- Passed to pdb.Pdb.
-
- Notes
- -----
- The possibilities are python version dependent, see the python
- docs for more info.
- """
-
- # Parent constructor:
- try:
- self.context = int(context)
- if self.context <= 0:
- raise ValueError("Context must be a positive integer")
- except (TypeError, ValueError) as e:
- raise ValueError("Context must be a positive integer") from e
-
- # `kwargs` ensures full compatibility with stdlib's `pdb.Pdb`.
- OldPdb.__init__(self, completekey, stdin, stdout, **kwargs)
-
- # IPython changes...
- self.shell = get_ipython()
-
- if self.shell is None:
- save_main = sys.modules['__main__']
- # No IPython instance running, we must create one
- from IPython.terminal.interactiveshell import \
- TerminalInteractiveShell
- self.shell = TerminalInteractiveShell.instance()
- # needed by any code which calls __import__("__main__") after
- # the debugger was entered. See also #9941.
- sys.modules["__main__"] = save_main
-
-
- color_scheme = self.shell.colors
-
- self.aliases = {}
-
- # Create color table: we copy the default one from the traceback
- # module and add a few attributes needed for debugging
- self.color_scheme_table = exception_colors()
-
- # shorthands
- C = coloransi.TermColors
- cst = self.color_scheme_table
-
-
- # Add a python parser so we can syntax highlight source while
- # debugging.
- self.parser = PyColorize.Parser(style=color_scheme)
- self.set_colors(color_scheme)
-
- # Set the prompt - the default prompt is '(Pdb)'
- self.prompt = prompt
- self.skip_hidden = True
- self.report_skipped = True
-
- # list of predicates we use to skip frames
- self._predicates = self.default_predicates
-
- if CHAIN_EXCEPTIONS:
- self._chained_exceptions = tuple()
- self._chained_exception_index = 0
-
- #
- def set_colors(self, scheme):
- """Shorthand access to the color table scheme selector method."""
- self.color_scheme_table.set_active_scheme(scheme)
- self.parser.style = scheme
-
- def set_trace(self, frame=None):
- if frame is None:
- frame = sys._getframe().f_back
- self.initial_frame = frame
- return super().set_trace(frame)
-
- def _hidden_predicate(self, frame):
- """
- Given a frame return whether it it should be hidden or not by IPython.
- """
-
- if self._predicates["readonly"]:
- fname = frame.f_code.co_filename
- # we need to check for file existence and interactively define
- # function would otherwise appear as RO.
- if os.path.isfile(fname) and not os.access(fname, os.W_OK):
- return True
-
- if self._predicates["tbhide"]:
- if frame in (self.curframe, getattr(self, "initial_frame", None)):
- return False
- frame_locals = self._get_frame_locals(frame)
- if "__tracebackhide__" not in frame_locals:
- return False
- return frame_locals["__tracebackhide__"]
- return False
-
- def hidden_frames(self, stack):
- """
- Given an index in the stack return whether it should be skipped.
-
- This is used in up/down and where to skip frames.
- """
- # The f_locals dictionary is updated from the actual frame
- # locals whenever the .f_locals accessor is called, so we
- # avoid calling it here to preserve self.curframe_locals.
- # Furthermore, there is no good reason to hide the current frame.
- ip_hide = [self._hidden_predicate(s[0]) for s in stack]
- ip_start = [i for i, s in enumerate(ip_hide) if s == "__ipython_bottom__"]
- if ip_start and self._predicates["ipython_internal"]:
- ip_hide = [h if i > ip_start[0] else True for (i, h) in enumerate(ip_hide)]
- return ip_hide
-
- if CHAIN_EXCEPTIONS:
-
- def _get_tb_and_exceptions(self, tb_or_exc):
- """
- Given a tracecack or an exception, return a tuple of chained exceptions
- and current traceback to inspect.
- This will deal with selecting the right ``__cause__`` or ``__context__``
- as well as handling cycles, and return a flattened list of exceptions we
- can jump to with do_exceptions.
- """
- _exceptions = []
- if isinstance(tb_or_exc, BaseException):
- traceback, current = tb_or_exc.__traceback__, tb_or_exc
-
- while current is not None:
- if current in _exceptions:
- break
- _exceptions.append(current)
- if current.__cause__ is not None:
- current = current.__cause__
- elif (
- current.__context__ is not None
- and not current.__suppress_context__
- ):
- current = current.__context__
-
- if len(_exceptions) >= self.MAX_CHAINED_EXCEPTION_DEPTH:
- self.message(
- f"More than {self.MAX_CHAINED_EXCEPTION_DEPTH}"
- " chained exceptions found, not all exceptions"
- "will be browsable with `exceptions`."
- )
- break
- else:
- traceback = tb_or_exc
- return tuple(reversed(_exceptions)), traceback
-
- @contextmanager
- def _hold_exceptions(self, exceptions):
- """
- Context manager to ensure proper cleaning of exceptions references
- When given a chained exception instead of a traceback,
- pdb may hold references to many objects which may leak memory.
- We use this context manager to make sure everything is properly cleaned
- """
- try:
- self._chained_exceptions = exceptions
- self._chained_exception_index = len(exceptions) - 1
- yield
- finally:
- # we can't put those in forget as otherwise they would
- # be cleared on exception change
- self._chained_exceptions = tuple()
- self._chained_exception_index = 0
-
- def do_exceptions(self, arg):
- """exceptions [number]
- List or change current exception in an exception chain.
- Without arguments, list all the current exception in the exception
- chain. Exceptions will be numbered, with the current exception indicated
- with an arrow.
- If given an integer as argument, switch to the exception at that index.
- """
- if not self._chained_exceptions:
- self.message(
- "Did not find chained exceptions. To move between"
- " exceptions, pdb/post_mortem must be given an exception"
- " object rather than a traceback."
- )
- return
- if not arg:
- for ix, exc in enumerate(self._chained_exceptions):
- prompt = ">" if ix == self._chained_exception_index else " "
- rep = repr(exc)
- if len(rep) > 80:
- rep = rep[:77] + "..."
- indicator = (
- " -"
- if self._chained_exceptions[ix].__traceback__ is None
- else f"{ix:>3}"
- )
- self.message(f"{prompt} {indicator} {rep}")
- else:
- try:
- number = int(arg)
- except ValueError:
- self.error("Argument must be an integer")
- return
- if 0 <= number < len(self._chained_exceptions):
- if self._chained_exceptions[number].__traceback__ is None:
- self.error(
- "This exception does not have a traceback, cannot jump to it"
- )
- return
-
- self._chained_exception_index = number
- self.setup(None, self._chained_exceptions[number].__traceback__)
- self.print_stack_entry(self.stack[self.curindex])
- else:
- self.error("No exception with that number")
-
- def interaction(self, frame, tb_or_exc):
- try:
- if CHAIN_EXCEPTIONS:
- # this context manager is part of interaction in 3.13
- _chained_exceptions, tb = self._get_tb_and_exceptions(tb_or_exc)
- if isinstance(tb_or_exc, BaseException):
- assert tb is not None, "main exception must have a traceback"
- with self._hold_exceptions(_chained_exceptions):
- OldPdb.interaction(self, frame, tb)
- else:
- OldPdb.interaction(self, frame, tb_or_exc)
-
- except KeyboardInterrupt:
- self.stdout.write("\n" + self.shell.get_exception_only())
-
- def precmd(self, line):
- """Perform useful escapes on the command before it is executed."""
-
- if line.endswith("??"):
- line = "pinfo2 " + line[:-2]
- elif line.endswith("?"):
- line = "pinfo " + line[:-1]
-
- line = super().precmd(line)
-
- return line
-
- def new_do_quit(self, arg):
- return OldPdb.do_quit(self, arg)
-
- do_q = do_quit = decorate_fn_with_doc(new_do_quit, OldPdb.do_quit)
-
- def print_stack_trace(self, context=None):
- Colors = self.color_scheme_table.active_colors
- ColorsNormal = Colors.Normal
- if context is None:
- context = self.context
- try:
- context = int(context)
- if context <= 0:
- raise ValueError("Context must be a positive integer")
- except (TypeError, ValueError) as e:
- raise ValueError("Context must be a positive integer") from e
- try:
- skipped = 0
- for hidden, frame_lineno in zip(self.hidden_frames(self.stack), self.stack):
- if hidden and self.skip_hidden:
- skipped += 1
- continue
- if skipped:
- print(
- f"{Colors.excName} [... skipping {skipped} hidden frame(s)]{ColorsNormal}\n"
- )
- skipped = 0
- self.print_stack_entry(frame_lineno, context=context)
- if skipped:
- print(
- f"{Colors.excName} [... skipping {skipped} hidden frame(s)]{ColorsNormal}\n"
- )
- except KeyboardInterrupt:
- pass
-
- def print_stack_entry(self, frame_lineno, prompt_prefix='\n-> ',
- context=None):
- if context is None:
- context = self.context
- try:
- context = int(context)
- if context <= 0:
- raise ValueError("Context must be a positive integer")
- except (TypeError, ValueError) as e:
- raise ValueError("Context must be a positive integer") from e
- print(self.format_stack_entry(frame_lineno, '', context), file=self.stdout)
-
- # vds: >>
- frame, lineno = frame_lineno
- filename = frame.f_code.co_filename
- self.shell.hooks.synchronize_with_editor(filename, lineno, 0)
- # vds: <<
-
- def _get_frame_locals(self, frame):
- """ "
- Accessing f_local of current frame reset the namespace, so we want to avoid
- that or the following can happen
-
- ipdb> foo
- "old"
- ipdb> foo = "new"
- ipdb> foo
- "new"
- ipdb> where
- ipdb> foo
- "old"
-
- So if frame is self.current_frame we instead return self.curframe_locals
-
- """
- if frame is getattr(self, "curframe", None):
- return self.curframe_locals
- else:
- return frame.f_locals
-
- def format_stack_entry(self, frame_lineno, lprefix=': ', context=None):
- if context is None:
- context = self.context
- try:
- context = int(context)
- if context <= 0:
- print("Context must be a positive integer", file=self.stdout)
- except (TypeError, ValueError):
- print("Context must be a positive integer", file=self.stdout)
-
- import reprlib
-
- ret = []
-
- Colors = self.color_scheme_table.active_colors
- ColorsNormal = Colors.Normal
- tpl_link = "%s%%s%s" % (Colors.filenameEm, ColorsNormal)
- tpl_call = "%s%%s%s%%s%s" % (Colors.vName, Colors.valEm, ColorsNormal)
- tpl_line = "%%s%s%%s %s%%s" % (Colors.lineno, ColorsNormal)
- tpl_line_em = "%%s%s%%s %s%%s%s" % (Colors.linenoEm, Colors.line, ColorsNormal)
-
- frame, lineno = frame_lineno
-
- return_value = ''
- loc_frame = self._get_frame_locals(frame)
- if "__return__" in loc_frame:
- rv = loc_frame["__return__"]
- # return_value += '->'
- return_value += reprlib.repr(rv) + "\n"
- ret.append(return_value)
-
- #s = filename + '(' + `lineno` + ')'
- filename = self.canonic(frame.f_code.co_filename)
- link = tpl_link % py3compat.cast_unicode(filename)
-
- if frame.f_code.co_name:
- func = frame.f_code.co_name
- else:
- func = ""
-
- call = ""
- if func != "?":
- if "__args__" in loc_frame:
- args = reprlib.repr(loc_frame["__args__"])
- else:
- args = '()'
- call = tpl_call % (func, args)
-
- # The level info should be generated in the same format pdb uses, to
- # avoid breaking the pdbtrack functionality of python-mode in *emacs.
- if frame is self.curframe:
- ret.append('> ')
- else:
- ret.append(" ")
- ret.append("%s(%s)%s\n" % (link, lineno, call))
-
- start = lineno - 1 - context//2
- lines = linecache.getlines(filename)
- start = min(start, len(lines) - context)
- start = max(start, 0)
- lines = lines[start : start + context]
-
- for i, line in enumerate(lines):
- show_arrow = start + 1 + i == lineno
- linetpl = (frame is self.curframe or show_arrow) and tpl_line_em or tpl_line
- ret.append(
- self.__format_line(
- linetpl, filename, start + 1 + i, line, arrow=show_arrow
- )
- )
- return "".join(ret)
-
- def __format_line(self, tpl_line, filename, lineno, line, arrow=False):
- bp_mark = ""
- bp_mark_color = ""
-
- new_line, err = self.parser.format2(line, 'str')
- if not err:
- line = new_line
-
- bp = None
- if lineno in self.get_file_breaks(filename):
- bps = self.get_breaks(filename, lineno)
- bp = bps[-1]
-
- if bp:
- Colors = self.color_scheme_table.active_colors
- bp_mark = str(bp.number)
- bp_mark_color = Colors.breakpoint_enabled
- if not bp.enabled:
- bp_mark_color = Colors.breakpoint_disabled
-
- numbers_width = 7
- if arrow:
- # This is the line with the error
- pad = numbers_width - len(str(lineno)) - len(bp_mark)
- num = '%s%s' % (make_arrow(pad), str(lineno))
- else:
- num = '%*s' % (numbers_width - len(bp_mark), str(lineno))
-
- return tpl_line % (bp_mark_color + bp_mark, num, line)
-
- def print_list_lines(self, filename, first, last):
- """The printing (as opposed to the parsing part of a 'list'
- command."""
- try:
- Colors = self.color_scheme_table.active_colors
- ColorsNormal = Colors.Normal
- tpl_line = '%%s%s%%s %s%%s' % (Colors.lineno, ColorsNormal)
- tpl_line_em = '%%s%s%%s %s%%s%s' % (Colors.linenoEm, Colors.line, ColorsNormal)
- src = []
- if filename == "" and hasattr(self, "_exec_filename"):
- filename = self._exec_filename
-
- for lineno in range(first, last+1):
- line = linecache.getline(filename, lineno)
- if not line:
- break
-
- if lineno == self.curframe.f_lineno:
- line = self.__format_line(
- tpl_line_em, filename, lineno, line, arrow=True
- )
- else:
- line = self.__format_line(
- tpl_line, filename, lineno, line, arrow=False
- )
-
- src.append(line)
- self.lineno = lineno
-
- print(''.join(src), file=self.stdout)
-
- except KeyboardInterrupt:
- pass
-
- def do_skip_predicates(self, args):
- """
- Turn on/off individual predicates as to whether a frame should be hidden/skip.
-
- The global option to skip (or not) hidden frames is set with skip_hidden
-
- To change the value of a predicate
-
- skip_predicates key [true|false]
-
- Call without arguments to see the current values.
-
- To permanently change the value of an option add the corresponding
- command to your ``~/.pdbrc`` file. If you are programmatically using the
- Pdb instance you can also change the ``default_predicates`` class
- attribute.
- """
- if not args.strip():
- print("current predicates:")
- for p, v in self._predicates.items():
- print(" ", p, ":", v)
- return
- type_value = args.strip().split(" ")
- if len(type_value) != 2:
- print(
- f"Usage: skip_predicates , with one of {set(self._predicates.keys())}"
- )
- return
-
- type_, value = type_value
- if type_ not in self._predicates:
- print(f"{type_!r} not in {set(self._predicates.keys())}")
- return
- if value.lower() not in ("true", "yes", "1", "no", "false", "0"):
- print(
- f"{value!r} is invalid - use one of ('true', 'yes', '1', 'no', 'false', '0')"
- )
- return
-
- self._predicates[type_] = value.lower() in ("true", "yes", "1")
- if not any(self._predicates.values()):
- print(
- "Warning, all predicates set to False, skip_hidden may not have any effects."
- )
-
- def do_skip_hidden(self, arg):
- """
- Change whether or not we should skip frames with the
- __tracebackhide__ attribute.
- """
- if not arg.strip():
- print(
- f"skip_hidden = {self.skip_hidden}, use 'yes','no', 'true', or 'false' to change."
- )
- elif arg.strip().lower() in ("true", "yes"):
- self.skip_hidden = True
- elif arg.strip().lower() in ("false", "no"):
- self.skip_hidden = False
- if not any(self._predicates.values()):
- print(
- "Warning, all predicates set to False, skip_hidden may not have any effects."
- )
-
- def do_list(self, arg):
- """Print lines of code from the current stack frame
- """
- self.lastcmd = 'list'
- last = None
- if arg and arg != ".":
- try:
- x = eval(arg, {}, {})
- if type(x) == type(()):
- first, last = x
- first = int(first)
- last = int(last)
- if last < first:
- # Assume it's a count
- last = first + last
- else:
- first = max(1, int(x) - 5)
- except:
- print('*** Error in argument:', repr(arg), file=self.stdout)
- return
- elif self.lineno is None or arg == ".":
- first = max(1, self.curframe.f_lineno - 5)
- else:
- first = self.lineno + 1
- if last is None:
- last = first + 10
- self.print_list_lines(self.curframe.f_code.co_filename, first, last)
-
- # vds: >>
- lineno = first
- filename = self.curframe.f_code.co_filename
- self.shell.hooks.synchronize_with_editor(filename, lineno, 0)
- # vds: <<
-
- do_l = do_list
-
- def getsourcelines(self, obj):
- lines, lineno = inspect.findsource(obj)
- if inspect.isframe(obj) and obj.f_globals is self._get_frame_locals(obj):
- # must be a module frame: do not try to cut a block out of it
- return lines, 1
- elif inspect.ismodule(obj):
- return lines, 1
- return inspect.getblock(lines[lineno:]), lineno+1
-
- def do_longlist(self, arg):
- """Print lines of code from the current stack frame.
-
- Shows more lines than 'list' does.
- """
- self.lastcmd = 'longlist'
- try:
- lines, lineno = self.getsourcelines(self.curframe)
- except OSError as err:
- self.error(err)
- return
- last = lineno + len(lines)
- self.print_list_lines(self.curframe.f_code.co_filename, lineno, last)
- do_ll = do_longlist
-
- def do_debug(self, arg):
- """debug code
- Enter a recursive debugger that steps through the code
- argument (which is an arbitrary expression or statement to be
- executed in the current environment).
- """
- trace_function = sys.gettrace()
- sys.settrace(None)
- globals = self.curframe.f_globals
- locals = self.curframe_locals
- p = self.__class__(completekey=self.completekey,
- stdin=self.stdin, stdout=self.stdout)
- p.use_rawinput = self.use_rawinput
- p.prompt = "(%s) " % self.prompt.strip()
- self.message("ENTERING RECURSIVE DEBUGGER")
- sys.call_tracing(p.run, (arg, globals, locals))
- self.message("LEAVING RECURSIVE DEBUGGER")
- sys.settrace(trace_function)
- self.lastcmd = p.lastcmd
-
- def do_pdef(self, arg):
- """Print the call signature for any callable object.
-
- The debugger interface to %pdef"""
- namespaces = [
- ("Locals", self.curframe_locals),
- ("Globals", self.curframe.f_globals),
- ]
- self.shell.find_line_magic("pdef")(arg, namespaces=namespaces)
-
- def do_pdoc(self, arg):
- """Print the docstring for an object.
-
- The debugger interface to %pdoc."""
- namespaces = [
- ("Locals", self.curframe_locals),
- ("Globals", self.curframe.f_globals),
- ]
- self.shell.find_line_magic("pdoc")(arg, namespaces=namespaces)
-
- def do_pfile(self, arg):
- """Print (or run through pager) the file where an object is defined.
-
- The debugger interface to %pfile.
- """
- namespaces = [
- ("Locals", self.curframe_locals),
- ("Globals", self.curframe.f_globals),
- ]
- self.shell.find_line_magic("pfile")(arg, namespaces=namespaces)
-
- def do_pinfo(self, arg):
- """Provide detailed information about an object.
-
- The debugger interface to %pinfo, i.e., obj?."""
- namespaces = [
- ("Locals", self.curframe_locals),
- ("Globals", self.curframe.f_globals),
- ]
- self.shell.find_line_magic("pinfo")(arg, namespaces=namespaces)
-
- def do_pinfo2(self, arg):
- """Provide extra detailed information about an object.
-
- The debugger interface to %pinfo2, i.e., obj??."""
- namespaces = [
- ("Locals", self.curframe_locals),
- ("Globals", self.curframe.f_globals),
- ]
- self.shell.find_line_magic("pinfo2")(arg, namespaces=namespaces)
-
- def do_psource(self, arg):
- """Print (or run through pager) the source code for an object."""
- namespaces = [
- ("Locals", self.curframe_locals),
- ("Globals", self.curframe.f_globals),
- ]
- self.shell.find_line_magic("psource")(arg, namespaces=namespaces)
-
- def do_where(self, arg):
- """w(here)
- Print a stack trace, with the most recent frame at the bottom.
- An arrow indicates the "current frame", which determines the
- context of most commands. 'bt' is an alias for this command.
-
- Take a number as argument as an (optional) number of context line to
- print"""
- if arg:
- try:
- context = int(arg)
- except ValueError as err:
- self.error(err)
- return
- self.print_stack_trace(context)
- else:
- self.print_stack_trace()
-
- do_w = do_where
-
- def break_anywhere(self, frame):
- """
- _stop_in_decorator_internals is overly restrictive, as we may still want
- to trace function calls, so we need to also update break_anywhere so
- that is we don't `stop_here`, because of debugger skip, we may still
- stop at any point inside the function
-
- """
-
- sup = super().break_anywhere(frame)
- if sup:
- return sup
- if self._predicates["debuggerskip"]:
- if DEBUGGERSKIP in frame.f_code.co_varnames:
- return True
- if frame.f_back and self._get_frame_locals(frame.f_back).get(DEBUGGERSKIP):
- return True
- return False
-
- def _is_in_decorator_internal_and_should_skip(self, frame):
- """
- Utility to tell us whether we are in a decorator internal and should stop.
-
- """
- # if we are disabled don't skip
- if not self._predicates["debuggerskip"]:
- return False
-
- return self._cachable_skip(frame)
-
- @lru_cache(1024)
- def _cached_one_parent_frame_debuggerskip(self, frame):
- """
- Cache looking up for DEBUGGERSKIP on parent frame.
-
- This should speedup walking through deep frame when one of the highest
- one does have a debugger skip.
-
- This is likely to introduce fake positive though.
- """
- while getattr(frame, "f_back", None):
- frame = frame.f_back
- if self._get_frame_locals(frame).get(DEBUGGERSKIP):
- return True
- return None
-
- @lru_cache(1024)
- def _cachable_skip(self, frame):
- # if frame is tagged, skip by default.
- if DEBUGGERSKIP in frame.f_code.co_varnames:
- return True
-
- # if one of the parent frame value set to True skip as well.
- if self._cached_one_parent_frame_debuggerskip(frame):
- return True
-
- return False
-
- def stop_here(self, frame):
- if self._is_in_decorator_internal_and_should_skip(frame) is True:
- return False
-
- hidden = False
- if self.skip_hidden:
- hidden = self._hidden_predicate(frame)
- if hidden:
- if self.report_skipped:
- Colors = self.color_scheme_table.active_colors
- ColorsNormal = Colors.Normal
- print(
- f"{Colors.excName} [... skipped 1 hidden frame]{ColorsNormal}\n"
- )
- return super().stop_here(frame)
-
- def do_up(self, arg):
- """u(p) [count]
- Move the current frame count (default one) levels up in the
- stack trace (to an older frame).
-
- Will skip hidden frames.
- """
- # modified version of upstream that skips
- # frames with __tracebackhide__
- if self.curindex == 0:
- self.error("Oldest frame")
- return
- try:
- count = int(arg or 1)
- except ValueError:
- self.error("Invalid frame count (%s)" % arg)
- return
- skipped = 0
- if count < 0:
- _newframe = 0
- else:
- counter = 0
- hidden_frames = self.hidden_frames(self.stack)
- for i in range(self.curindex - 1, -1, -1):
- if hidden_frames[i] and self.skip_hidden:
- skipped += 1
- continue
- counter += 1
- if counter >= count:
- break
- else:
- # if no break occurred.
- self.error(
- "all frames above hidden, use `skip_hidden False` to get get into those."
- )
- return
-
- Colors = self.color_scheme_table.active_colors
- ColorsNormal = Colors.Normal
- _newframe = i
- self._select_frame(_newframe)
- if skipped:
- print(
- f"{Colors.excName} [... skipped {skipped} hidden frame(s)]{ColorsNormal}\n"
- )
-
- def do_down(self, arg):
- """d(own) [count]
- Move the current frame count (default one) levels down in the
- stack trace (to a newer frame).
-
- Will skip hidden frames.
- """
- if self.curindex + 1 == len(self.stack):
- self.error("Newest frame")
- return
- try:
- count = int(arg or 1)
- except ValueError:
- self.error("Invalid frame count (%s)" % arg)
- return
- if count < 0:
- _newframe = len(self.stack) - 1
- else:
- counter = 0
- skipped = 0
- hidden_frames = self.hidden_frames(self.stack)
- for i in range(self.curindex + 1, len(self.stack)):
- if hidden_frames[i] and self.skip_hidden:
- skipped += 1
- continue
- counter += 1
- if counter >= count:
- break
- else:
- self.error(
- "all frames below hidden, use `skip_hidden False` to get get into those."
- )
- return
-
- Colors = self.color_scheme_table.active_colors
- ColorsNormal = Colors.Normal
- if skipped:
- print(
- f"{Colors.excName} [... skipped {skipped} hidden frame(s)]{ColorsNormal}\n"
- )
- _newframe = i
-
- self._select_frame(_newframe)
-
- do_d = do_down
- do_u = do_up
-
- def do_context(self, context):
- """context number_of_lines
- Set the number of lines of source code to show when displaying
- stacktrace information.
- """
- try:
- new_context = int(context)
- if new_context <= 0:
- raise ValueError()
- self.context = new_context
- except ValueError:
- self.error(
- f"The 'context' command requires a positive integer argument (current value {self.context})."
- )
-
-
-class InterruptiblePdb(Pdb):
- """Version of debugger where KeyboardInterrupt exits the debugger altogether."""
-
- def cmdloop(self, intro=None):
- """Wrap cmdloop() such that KeyboardInterrupt stops the debugger."""
- try:
- return OldPdb.cmdloop(self, intro=intro)
- except KeyboardInterrupt:
- self.stop_here = lambda frame: False
- self.do_quit("")
- sys.settrace(None)
- self.quitting = False
- raise
-
- def _cmdloop(self):
- while True:
- try:
- # keyboard interrupts allow for an easy way to cancel
- # the current command, so allow them during interactive input
- self.allow_kbdint = True
- self.cmdloop()
- self.allow_kbdint = False
- break
- except KeyboardInterrupt:
- self.message('--KeyboardInterrupt--')
- raise
-
-
-def set_trace(frame=None, header=None):
- """
- Start debugging from `frame`.
-
- If frame is not specified, debugging starts from caller's frame.
- """
- pdb = Pdb()
- if header is not None:
- pdb.message(header)
- pdb.set_trace(frame or sys._getframe().f_back)
diff --git a/.venv/lib/python3.12/site-packages/IPython/core/display.py b/.venv/lib/python3.12/site-packages/IPython/core/display.py
deleted file mode 100644
index a943447..0000000
--- a/.venv/lib/python3.12/site-packages/IPython/core/display.py
+++ /dev/null
@@ -1,1373 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Top-level display functions for displaying object in different formats."""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-
-from binascii import b2a_base64, hexlify
-import html
-import json
-import mimetypes
-import os
-import struct
-import warnings
-from copy import deepcopy
-from os.path import splitext
-from pathlib import Path, PurePath
-
-from typing import Optional
-
-from IPython.testing.skipdoctest import skip_doctest
-from . import display_functions
-
-
-__all__ = [
- "display_pretty",
- "display_html",
- "display_markdown",
- "display_svg",
- "display_png",
- "display_jpeg",
- "display_webp",
- "display_latex",
- "display_json",
- "display_javascript",
- "display_pdf",
- "DisplayObject",
- "TextDisplayObject",
- "Pretty",
- "HTML",
- "Markdown",
- "Math",
- "Latex",
- "SVG",
- "ProgressBar",
- "JSON",
- "GeoJSON",
- "Javascript",
- "Image",
- "set_matplotlib_formats",
- "set_matplotlib_close",
- "Video",
-]
-
-_deprecated_names = ["display", "clear_output", "publish_display_data", "update_display", "DisplayHandle"]
-
-__all__ = __all__ + _deprecated_names
-
-
-# ----- warn to import from IPython.display -----
-
-from warnings import warn
-
-
-def __getattr__(name):
- if name in _deprecated_names:
- warn(
- f"Importing {name} from IPython.core.display is deprecated since IPython 7.14, please import from IPython.display",
- DeprecationWarning,
- stacklevel=2,
- )
- return getattr(display_functions, name)
-
- if name in globals().keys():
- return globals()[name]
- else:
- raise AttributeError(f"module {__name__} has no attribute {name}")
-
-
-#-----------------------------------------------------------------------------
-# utility functions
-#-----------------------------------------------------------------------------
-
-def _safe_exists(path):
- """Check path, but don't let exceptions raise"""
- try:
- return os.path.exists(path)
- except Exception:
- return False
-
-
-def _display_mimetype(mimetype, objs, raw=False, metadata=None):
- """internal implementation of all display_foo methods
-
- Parameters
- ----------
- mimetype : str
- The mimetype to be published (e.g. 'image/png')
- *objs : object
- The Python objects to display, or if raw=True raw text data to
- display.
- raw : bool
- Are the data objects raw data or Python objects that need to be
- formatted before display? [default: False]
- metadata : dict (optional)
- Metadata to be associated with the specific mimetype output.
- """
- if metadata:
- metadata = {mimetype: metadata}
- if raw:
- # turn list of pngdata into list of { 'image/png': pngdata }
- objs = [ {mimetype: obj} for obj in objs ]
- display_functions.display(*objs, raw=raw, metadata=metadata, include=[mimetype])
-
-#-----------------------------------------------------------------------------
-# Main functions
-#-----------------------------------------------------------------------------
-
-
-def display_pretty(*objs, **kwargs):
- """Display the pretty (default) representation of an object.
-
- Parameters
- ----------
- *objs : object
- The Python objects to display, or if raw=True raw text data to
- display.
- raw : bool
- Are the data objects raw data or Python objects that need to be
- formatted before display? [default: False]
- metadata : dict (optional)
- Metadata to be associated with the specific mimetype output.
- """
- _display_mimetype('text/plain', objs, **kwargs)
-
-
-def display_html(*objs, **kwargs):
- """Display the HTML representation of an object.
-
- Note: If raw=False and the object does not have a HTML
- representation, no HTML will be shown.
-
- Parameters
- ----------
- *objs : object
- The Python objects to display, or if raw=True raw HTML data to
- display.
- raw : bool
- Are the data objects raw data or Python objects that need to be
- formatted before display? [default: False]
- metadata : dict (optional)
- Metadata to be associated with the specific mimetype output.
- """
- _display_mimetype('text/html', objs, **kwargs)
-
-
-def display_markdown(*objs, **kwargs):
- """Displays the Markdown representation of an object.
-
- Parameters
- ----------
- *objs : object
- The Python objects to display, or if raw=True raw markdown data to
- display.
- raw : bool
- Are the data objects raw data or Python objects that need to be
- formatted before display? [default: False]
- metadata : dict (optional)
- Metadata to be associated with the specific mimetype output.
- """
-
- _display_mimetype('text/markdown', objs, **kwargs)
-
-
-def display_svg(*objs, **kwargs):
- """Display the SVG representation of an object.
-
- Parameters
- ----------
- *objs : object
- The Python objects to display, or if raw=True raw svg data to
- display.
- raw : bool
- Are the data objects raw data or Python objects that need to be
- formatted before display? [default: False]
- metadata : dict (optional)
- Metadata to be associated with the specific mimetype output.
- """
- _display_mimetype('image/svg+xml', objs, **kwargs)
-
-
-def display_png(*objs, **kwargs):
- """Display the PNG representation of an object.
-
- Parameters
- ----------
- *objs : object
- The Python objects to display, or if raw=True raw png data to
- display.
- raw : bool
- Are the data objects raw data or Python objects that need to be
- formatted before display? [default: False]
- metadata : dict (optional)
- Metadata to be associated with the specific mimetype output.
- """
- _display_mimetype('image/png', objs, **kwargs)
-
-
-def display_jpeg(*objs, **kwargs):
- """Display the JPEG representation of an object.
-
- Parameters
- ----------
- *objs : object
- The Python objects to display, or if raw=True raw JPEG data to
- display.
- raw : bool
- Are the data objects raw data or Python objects that need to be
- formatted before display? [default: False]
- metadata : dict (optional)
- Metadata to be associated with the specific mimetype output.
- """
- _display_mimetype('image/jpeg', objs, **kwargs)
-
-
-def display_webp(*objs, **kwargs):
- """Display the WEBP representation of an object.
-
- Parameters
- ----------
- *objs : object
- The Python objects to display, or if raw=True raw JPEG data to
- display.
- raw : bool
- Are the data objects raw data or Python objects that need to be
- formatted before display? [default: False]
- metadata : dict (optional)
- Metadata to be associated with the specific mimetype output.
- """
- _display_mimetype("image/webp", objs, **kwargs)
-
-
-def display_latex(*objs, **kwargs):
- """Display the LaTeX representation of an object.
-
- Parameters
- ----------
- *objs : object
- The Python objects to display, or if raw=True raw latex data to
- display.
- raw : bool
- Are the data objects raw data or Python objects that need to be
- formatted before display? [default: False]
- metadata : dict (optional)
- Metadata to be associated with the specific mimetype output.
- """
- _display_mimetype('text/latex', objs, **kwargs)
-
-
-def display_json(*objs, **kwargs):
- """Display the JSON representation of an object.
-
- Note that not many frontends support displaying JSON.
-
- Parameters
- ----------
- *objs : object
- The Python objects to display, or if raw=True raw json data to
- display.
- raw : bool
- Are the data objects raw data or Python objects that need to be
- formatted before display? [default: False]
- metadata : dict (optional)
- Metadata to be associated with the specific mimetype output.
- """
- _display_mimetype('application/json', objs, **kwargs)
-
-
-def display_javascript(*objs, **kwargs):
- """Display the Javascript representation of an object.
-
- Parameters
- ----------
- *objs : object
- The Python objects to display, or if raw=True raw javascript data to
- display.
- raw : bool
- Are the data objects raw data or Python objects that need to be
- formatted before display? [default: False]
- metadata : dict (optional)
- Metadata to be associated with the specific mimetype output.
- """
- _display_mimetype('application/javascript', objs, **kwargs)
-
-
-def display_pdf(*objs, **kwargs):
- """Display the PDF representation of an object.
-
- Parameters
- ----------
- *objs : object
- The Python objects to display, or if raw=True raw javascript data to
- display.
- raw : bool
- Are the data objects raw data or Python objects that need to be
- formatted before display? [default: False]
- metadata : dict (optional)
- Metadata to be associated with the specific mimetype output.
- """
- _display_mimetype('application/pdf', objs, **kwargs)
-
-
-#-----------------------------------------------------------------------------
-# Smart classes
-#-----------------------------------------------------------------------------
-
-
-class DisplayObject(object):
- """An object that wraps data to be displayed."""
-
- _read_flags = 'r'
- _show_mem_addr = False
- metadata = None
-
- def __init__(self, data=None, url=None, filename=None, metadata=None):
- """Create a display object given raw data.
-
- When this object is returned by an expression or passed to the
- display function, it will result in the data being displayed
- in the frontend. The MIME type of the data should match the
- subclasses used, so the Png subclass should be used for 'image/png'
- data. If the data is a URL, the data will first be downloaded
- and then displayed.
-
- Parameters
- ----------
- data : unicode, str or bytes
- The raw data or a URL or file to load the data from
- url : unicode
- A URL to download the data from.
- filename : unicode
- Path to a local file to load the data from.
- metadata : dict
- Dict of metadata associated to be the object when displayed
- """
- if isinstance(data, (Path, PurePath)):
- data = str(data)
-
- if data is not None and isinstance(data, str):
- if data.startswith('http') and url is None:
- url = data
- filename = None
- data = None
- elif _safe_exists(data) and filename is None:
- url = None
- filename = data
- data = None
-
- self.url = url
- self.filename = filename
- # because of @data.setter methods in
- # subclasses ensure url and filename are set
- # before assigning to self.data
- self.data = data
-
- if metadata is not None:
- self.metadata = metadata
- elif self.metadata is None:
- self.metadata = {}
-
- self.reload()
- self._check_data()
-
- def __repr__(self):
- if not self._show_mem_addr:
- cls = self.__class__
- r = "<%s.%s object>" % (cls.__module__, cls.__name__)
- else:
- r = super(DisplayObject, self).__repr__()
- return r
-
- def _check_data(self):
- """Override in subclasses if there's something to check."""
- pass
-
- def _data_and_metadata(self):
- """shortcut for returning metadata with shape information, if defined"""
- if self.metadata:
- return self.data, deepcopy(self.metadata)
- else:
- return self.data
-
- def reload(self):
- """Reload the raw data from file or URL."""
- if self.filename is not None:
- encoding = None if "b" in self._read_flags else "utf-8"
- with open(self.filename, self._read_flags, encoding=encoding) as f:
- self.data = f.read()
- elif self.url is not None:
- # Deferred import
- from urllib.request import urlopen
- response = urlopen(self.url)
- data = response.read()
- # extract encoding from header, if there is one:
- encoding = None
- if 'content-type' in response.headers:
- for sub in response.headers['content-type'].split(';'):
- sub = sub.strip()
- if sub.startswith('charset'):
- encoding = sub.split('=')[-1].strip()
- break
- if 'content-encoding' in response.headers:
- # TODO: do deflate?
- if 'gzip' in response.headers['content-encoding']:
- import gzip
- from io import BytesIO
-
- # assume utf-8 if encoding is not specified
- with gzip.open(
- BytesIO(data), "rt", encoding=encoding or "utf-8"
- ) as fp:
- encoding = None
- data = fp.read()
-
- # decode data, if an encoding was specified
- # We only touch self.data once since
- # subclasses such as SVG have @data.setter methods
- # that transform self.data into ... well svg.
- if encoding:
- self.data = data.decode(encoding, 'replace')
- else:
- self.data = data
-
-
-class TextDisplayObject(DisplayObject):
- """Create a text display object given raw data.
-
- Parameters
- ----------
- data : str or unicode
- The raw data or a URL or file to load the data from.
- url : unicode
- A URL to download the data from.
- filename : unicode
- Path to a local file to load the data from.
- metadata : dict
- Dict of metadata associated to be the object when displayed
- """
- def _check_data(self):
- if self.data is not None and not isinstance(self.data, str):
- raise TypeError("%s expects text, not %r" % (self.__class__.__name__, self.data))
-
-class Pretty(TextDisplayObject):
-
- def _repr_pretty_(self, pp, cycle):
- return pp.text(self.data)
-
-
-class HTML(TextDisplayObject):
-
- def __init__(self, data=None, url=None, filename=None, metadata=None):
- def warn():
- if not data:
- return False
-
- #
- # Avoid calling lower() on the entire data, because it could be a
- # long string and we're only interested in its beginning and end.
- #
- prefix = data[:10].lower()
- suffix = data[-10:].lower()
- return prefix.startswith("